diff --git a/platform/dbops/binaries/weaviate-src/entities/schema/config/vector_index_config.go b/platform/dbops/binaries/weaviate-src/entities/schema/config/vector_index_config.go new file mode 100644 index 0000000000000000000000000000000000000000..f0b29bb89ef4a13cb7a4314ed86314b7096ce0c5 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/schema/config/vector_index_config.go @@ -0,0 +1,70 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "fmt" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modelsext" +) + +type VectorIndexConfig interface { + IndexType() string + DistanceName() string + IsMultiVector() bool +} + +func TypeAssertVectorIndex(class *models.Class, targetVectors []string) ([]VectorIndexConfig, error) { + if len(class.VectorConfig) == 0 || (modelsext.ClassHasLegacyVectorIndex(class) && len(targetVectors) == 0) { + vectorIndexConfig, ok := class.VectorIndexConfig.(VectorIndexConfig) + if !ok { + return nil, fmt.Errorf("class '%s' vector index: config is not schema.VectorIndexConfig: %T", + class.Class, class.VectorIndexConfig) + } + return []VectorIndexConfig{vectorIndexConfig}, nil + } + + if len(class.VectorConfig) == 1 { + var vectorConfig models.VectorConfig + for _, v := range class.VectorConfig { + vectorConfig = v + break + } + vectorIndexConfig, ok := vectorConfig.VectorIndexConfig.(VectorIndexConfig) + if !ok { + return nil, fmt.Errorf("class '%s' vector index: config is not schema.VectorIndexConfig: %T", + class.Class, class.VectorIndexConfig) + } + return []VectorIndexConfig{vectorIndexConfig}, nil + } + + if len(targetVectors) == 0 { + return nil, errors.Errorf("multiple vector configs found for class '%s', but no target vector specified", class.Class) + } + + configs := make([]VectorIndexConfig, 0, len(targetVectors)) + for _, targetVector := range targetVectors { + vectorConfig, ok := modelsext.ClassGetVectorConfig(class, targetVector) + if !ok { + return nil, errors.Errorf("vector config not found for target vector: %s", targetVector) + } + vectorIndexConfig, ok := vectorConfig.VectorIndexConfig.(VectorIndexConfig) + if !ok { + return nil, fmt.Errorf("targetVector '%s' vector index: config is not schema.VectorIndexConfig: %T", + targetVector, class.VectorIndexConfig) + } + configs = append(configs, vectorIndexConfig) + } + return configs, nil +} diff --git a/platform/dbops/binaries/weaviate-src/entities/schema/configvalidation/config_validation.go b/platform/dbops/binaries/weaviate-src/entities/schema/configvalidation/config_validation.go new file mode 100644 index 0000000000000000000000000000000000000000..5736ed885f74251a60f951d7bc89f03f3673f94e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/schema/configvalidation/config_validation.go @@ -0,0 +1,44 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package configvalidation + +import ( + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema/config" + "github.com/weaviate/weaviate/entities/vectorindex/common" +) + +func CheckCertaintyCompatibility(class *models.Class, targetVectors []string) error { + if class == nil { + return errors.Errorf("no class provided to check certainty compatibility") + } + if len(targetVectors) > 1 { + return errors.Errorf("multiple target vectors are not supported with certainty") + } + + vectorConfigs, err := config.TypeAssertVectorIndex(class, targetVectors) + if err != nil { + return err + } + if dn := vectorConfigs[0].DistanceName(); dn != common.DistanceCosine { + return certaintyUnsupportedError(dn) + } + + return nil +} + +func certaintyUnsupportedError(distType string) error { + return errors.Errorf( + "can't compute and return certainty when vector index is configured with %s distance", + distType) +} diff --git a/platform/dbops/binaries/weaviate-src/entities/schema/configvalidation/config_validation_test.go b/platform/dbops/binaries/weaviate-src/entities/schema/configvalidation/config_validation_test.go new file mode 100644 index 0000000000000000000000000000000000000000..a52313ebea1692049cfbb0da5eec074126cb340a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/schema/configvalidation/config_validation_test.go @@ -0,0 +1,61 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package configvalidation + +import ( + "testing" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + vectorIndex "github.com/weaviate/weaviate/entities/vectorindex/common" + "github.com/weaviate/weaviate/entities/vectorindex/hnsw" +) + +func TestCertainty(t *testing.T) { + cases := []struct { + name string + targetVectors []string + class *models.Class + fail bool + }{ + {name: "no vectorizer", targetVectors: nil, class: &models.Class{}, fail: true}, + {name: "cosine", targetVectors: nil, class: &models.Class{VectorIndexConfig: hnsw.UserConfig{Distance: vectorIndex.DistanceCosine}}, fail: false}, + {name: "dot", targetVectors: nil, class: &models.Class{VectorIndexConfig: hnsw.UserConfig{Distance: vectorIndex.DistanceDot}}, fail: true}, + {name: "single target vector", targetVectors: []string{"named"}, class: &models.Class{VectorConfig: map[string]models.VectorConfig{ + "named": {VectorIndexConfig: hnsw.UserConfig{Distance: vectorIndex.DistanceCosine}}, + "named2": {VectorIndexConfig: hnsw.UserConfig{Distance: vectorIndex.DistanceCosine}}, + }}, fail: false}, + {name: "multi target vector", targetVectors: []string{"named", "named2"}, class: &models.Class{VectorConfig: map[string]models.VectorConfig{ + "named": {VectorIndexConfig: hnsw.UserConfig{Distance: vectorIndex.DistanceCosine}}, + "named2": {VectorIndexConfig: hnsw.UserConfig{Distance: vectorIndex.DistanceCosine}}, + }}, fail: true}, + {name: "single target vector and dot", targetVectors: []string{"named"}, class: &models.Class{VectorConfig: map[string]models.VectorConfig{ + "named": {VectorIndexConfig: hnsw.UserConfig{Distance: vectorIndex.DistanceDot}}, + "named2": {VectorIndexConfig: hnsw.UserConfig{Distance: vectorIndex.DistanceCosine}}, + }}, fail: true}, + {name: "single target vector and dot for non selected", targetVectors: []string{"named2"}, class: &models.Class{VectorConfig: map[string]models.VectorConfig{ + "named": {VectorIndexConfig: hnsw.UserConfig{Distance: vectorIndex.DistanceDot}}, + "named2": {VectorIndexConfig: hnsw.UserConfig{Distance: vectorIndex.DistanceCosine}}, + }}, fail: false}, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + err := CheckCertaintyCompatibility(tt.class, tt.targetVectors) + if tt.fail { + require.NotNil(t, err) + } else { + require.Nil(t, err) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/entities/schema/crossref/bulk_builder.go b/platform/dbops/binaries/weaviate-src/entities/schema/crossref/bulk_builder.go new file mode 100644 index 0000000000000000000000000000000000000000..228e279b441c78d5cafb290824fb0e4cca9005e3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/schema/crossref/bulk_builder.go @@ -0,0 +1,107 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package crossref + +import ( + "fmt" + + "github.com/go-openapi/strfmt" + "github.com/weaviate/weaviate/usecases/byteops" +) + +// BulkBuilder is a low-alloc tool to build many beacon strings (as []byte). It +// is optimized to allocate just once as opposed to once per ID. This makes it +// considerably faster when generating 100s of thousand of beacons strings. The +// main intended use case for this is building propValuePairs in ref-filters. +// +// The BulkBuilder makes some estimations for how much memory will be necessary +// based on expected input params. If those requirements get exceeded, it will +// still be safe to use, but will fallback to allocating dynamically. +type BulkBuilder struct { + byteops.ReadWriter + prefix []byte +} + +func NewBulkBuilderWithEstimates(expectedCount int, exampleClassName string, + overheadRatio float64, +) *BulkBuilder { + prefix := []byte("weaviate://localhost/") + + lenOfTypicalClassName := int(float64(len(exampleClassName)) * overheadRatio) + predictedSize := expectedCount * (len(prefix) + 1 + lenOfTypicalClassName + 36) + + bb := &BulkBuilder{ + prefix: prefix, + ReadWriter: byteops.NewReadWriter(make([]byte, predictedSize)), + } + + return bb +} + +func (bb *BulkBuilder) ClassAndID(className string, + id strfmt.UUID, +) []byte { + requiredSpace := len(bb.prefix) + len(id) + if int(bb.Position)+requiredSpace >= len(bb.Buffer) { + return bb.fallbackWithClassName(className, id) + } + + // copy the start pos, we will need this at the end to know what to return to + // the caller + start := bb.Position + bb.CopyBytesToBuffer(bb.prefix) + + // This is a safe way, in case a class-name ever contains non-ASCII + // characters. If we could be 100% sure that a class is ASCII-only, we could + // remove this allocation and instead use the same copy-by-rune approach that + // we use later on for the ID. + bb.CopyBytesToBuffer([]byte(className)) + bb.WriteByte('/') // The separating slash between class and ID + for _, runeValue := range id { + // We know that the UUID-string never contains non-ASCII characters. This + // means it safe to convert the uint32-rune into a uint8. This allows us to + // copy char by char without any additional allocs + bb.WriteByte(uint8(runeValue)) + } + + return bb.Buffer[start:bb.Position] +} + +func (bb *BulkBuilder) LegacyIDOnly(id strfmt.UUID) []byte { + requiredSpace := len(bb.prefix) + len(id) + if int(bb.Position)+requiredSpace >= len(bb.Buffer) { + return bb.fallbackWithoutClassName(id) + } + + // copy the start pos, we will need this at the end to know what to return to + // the caller + start := bb.Position + bb.CopyBytesToBuffer(bb.prefix) + for _, runeValue := range id { + // We know that the UUID-string never contains non-ASCII characters. This + // means it safe to convert the uint32-rune into a uint8. This allows us to + // copy char by char without any additional allocs + bb.WriteByte(uint8(runeValue)) + } + + return bb.Buffer[start:bb.Position] +} + +func (bb *BulkBuilder) fallbackWithClassName( + className string, id strfmt.UUID, +) []byte { + return []byte(fmt.Sprintf("%s%s/%s", bb.prefix, className, id)) +} + +func (bb *BulkBuilder) fallbackWithoutClassName(id strfmt.UUID) []byte { + return []byte(fmt.Sprintf("%s%s", bb.prefix, id)) +} diff --git a/platform/dbops/binaries/weaviate-src/entities/schema/crossref/bulk_builder_test.go b/platform/dbops/binaries/weaviate-src/entities/schema/crossref/bulk_builder_test.go new file mode 100644 index 0000000000000000000000000000000000000000..853bc302a51d631330fd540af7e5449730690206 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/schema/crossref/bulk_builder_test.go @@ -0,0 +1,109 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package crossref + +import ( + "fmt" + "testing" + + "github.com/go-openapi/strfmt" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" +) + +func TestBulkBuilder(t *testing.T) { + tests := []struct { + name string + expectedFn func(id string) string + estimatedSize int + iterations int + className string + withClassName bool + }{ + { + name: "with class name - enough-prealloc", + withClassName: true, + className: "MyClass", + expectedFn: func(id string) string { + return fmt.Sprintf("weaviate://localhost/MyClass/%s", id) + }, + estimatedSize: 25, + iterations: 25, + }, + { + name: "with class name with non-ASCII- enough-prealloc", + withClassName: true, + className: "My國Class", + expectedFn: func(id string) string { + return fmt.Sprintf("weaviate://localhost/My國Class/%s", id) + }, + estimatedSize: 25, + iterations: 25, + }, + { + name: "with class name - not enough-prealloc", + withClassName: true, + className: "MyClass", + expectedFn: func(id string) string { + return fmt.Sprintf("weaviate://localhost/MyClass/%s", id) + }, + estimatedSize: 10, + iterations: 25, + }, + { + name: "with class name with non-ASCII - not enough-prealloc", + withClassName: true, + className: "My國Class", + expectedFn: func(id string) string { + return fmt.Sprintf("weaviate://localhost/My國Class/%s", id) + }, + estimatedSize: 10, + iterations: 25, + }, + { + name: "without class name - enough-prealloc", + withClassName: false, + className: "MyClass", + expectedFn: func(id string) string { + return fmt.Sprintf("weaviate://localhost/%s", id) + }, + estimatedSize: 25, + iterations: 25, + }, + { + name: "without class name - not enough-prealloc", + withClassName: false, + className: "MyClass", + expectedFn: func(id string) string { + return fmt.Sprintf("weaviate://localhost/%s", id) + }, + estimatedSize: 10, + iterations: 25, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + bb := NewBulkBuilderWithEstimates(tt.estimatedSize, tt.className, 1.00) + for i := 0; i < tt.iterations; i++ { + id := uuid.New().String() + if tt.withClassName { + res := bb.ClassAndID(tt.className, strfmt.UUID(id)) + assert.Equal(t, tt.expectedFn(id), string(res)) + } else { + res := bb.LegacyIDOnly(strfmt.UUID(id)) + assert.Equal(t, tt.expectedFn(id), string(res)) + } + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/entities/schema/crossref/crossref.go b/platform/dbops/binaries/weaviate-src/entities/schema/crossref/crossref.go new file mode 100644 index 0000000000000000000000000000000000000000..bf851d12aebf35bbb116dce5257c0ad08161d39a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/schema/crossref/crossref.go @@ -0,0 +1,117 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package crossref + +import ( + "fmt" + "net/url" + "strings" + + "github.com/go-openapi/strfmt" + "github.com/weaviate/weaviate/entities/models" +) + +const ( + _LocalHost = "localhost" + _Schema = "weaviate" +) + +// Ref is an abstraction of the cross-refs which are specified in a URI format +// in the API. When this type is used it is safe to assume that a Ref is +// semantically valid. This guarantee would not be possible on the URI format, +// as the URI can be well-formed, but not contain the data we expect in it. +// Do not use directly, such as crossref.Ref{}, as you won't have any +// guarantees in this case. Always use one of the parsing options or New() +type Ref struct { + Local bool `json:"local"` + PeerName string `json:"peerName"` + TargetID strfmt.UUID `json:"targetID"` + Class string `json:"className"` +} + +// Parse is a safe way to generate a Ref, as it will error if any of the input +// parameters are not as expected. +func Parse(uriString string) (*Ref, error) { + uri, err := url.Parse(uriString) + if err != nil || uri.Path == "" { + return nil, fmt.Errorf("invalid cref URI: %w", err) + } + + segments := strings.Split(uri.Path, "/") + class, id, idx := "", "", 1 + switch len(segments) { + case 3: + class = segments[1] + id = segments[2] + idx = 2 + case 2: + id = segments[1] + default: + return nil, fmt.Errorf( + "invalid cref URI: path must be of format '/', but got '%s'", uri.Path) + } + if ok := strfmt.IsUUID(id); !ok { + return nil, fmt.Errorf("invalid cref URI: %dnd path segment must be uuid, but got '%s'", + idx, id) + } + + return &Ref{ + Local: uri.Host == _LocalHost, + PeerName: uri.Host, + TargetID: strfmt.UUID(id), + Class: class, + }, nil +} + +// ParseSingleRef is a safe way to generate a Ref from a models.SingleRef, a +// helper construct that represents the API structure. It will error if any of +// the input parameters are not as expected. +func ParseSingleRef(singleRef *models.SingleRef) (*Ref, error) { + return Parse(string(singleRef.Beacon)) +} + +// New is a safe way to generate a Reference, as all required arguments must be +// set in the constructor fn +func New(peerName string, class string, target strfmt.UUID) *Ref { + return &Ref{ + Local: peerName == _LocalHost, + PeerName: peerName, + TargetID: target, + Class: class, + } +} + +func NewLocalhost(class string, target strfmt.UUID) *Ref { + return New(_LocalHost, class, target) +} + +func (r *Ref) String() string { + path := fmt.Sprintf("%s/%s", r.Class, r.TargetID) + if r.Class == "" { + path = fmt.Sprintf("/%s", r.TargetID) + } + uri := url.URL{ + Host: r.PeerName, + Scheme: _Schema, + Path: path, + } + + return uri.String() +} + +// SingleRef converts the parsed Ref back into the API helper construct +// containing a stringified representation (URI format) of the Ref +func (r *Ref) SingleRef() *models.SingleRef { + return &models.SingleRef{ + Beacon: strfmt.URI(r.String()), + } +} diff --git a/platform/dbops/binaries/weaviate-src/entities/schema/crossref/crossref_source.go b/platform/dbops/binaries/weaviate-src/entities/schema/crossref/crossref_source.go new file mode 100644 index 0000000000000000000000000000000000000000..82a7ecd051e947efba1d791f243658db7cc7ebfb --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/schema/crossref/crossref_source.go @@ -0,0 +1,102 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package crossref + +import ( + "fmt" + "net/url" + "strings" + "unicode" + + "github.com/go-openapi/strfmt" + "github.com/weaviate/weaviate/entities/schema" +) + +// RefSource is an abstraction of the source of a cross-ref. The opposite would +// be Ref which represents the target instead. A RefSource is specified in a URI +// format in the API. When this type is used it is safe to assume that a Ref is +// semantically valid. This guarantee would not be possible on the URI format, +// as the URI can be well-formed, but not contain the data we expect in it. Do +// not use directly, such as crossref.RefSource{}, as you won't have any +// guarantees in this case. Always use one of the parsing options or New() +type RefSource struct { + Local bool `json:"local"` + PeerName string `json:"peerName"` + Property schema.PropertyName `json:"property"` + Class schema.ClassName `json:"class"` + TargetID strfmt.UUID `json:"targetID"` +} + +func NewSource(className schema.ClassName, + property schema.PropertyName, id strfmt.UUID, +) *RefSource { + return &RefSource{ + Local: true, + PeerName: "localhost", + Class: className, + TargetID: id, + Property: property, + } +} + +// ParseSource is a safe way to generate a RefSource, as it will error if any +// of the input parameters are not as expected. +func ParseSource(uriString string) (*RefSource, error) { + uri, err := url.Parse(uriString) + if err != nil { + return nil, fmt.Errorf("invalid cref URI: %w", err) + } + + pathSegments := strings.Split(uri.Path, "/") + if len(pathSegments) != 4 { + return nil, fmt.Errorf( + "invalid cref URI: must use long-form: path must be of format '///', but got '%s'", + uri.Path) + } + + if ok := strfmt.IsUUID(pathSegments[2]); !ok { + return nil, fmt.Errorf("invalid cref URI: 2nd path segment must be uuid, but got '%s'", + pathSegments[3]) + } + + class := pathSegments[1] + if class == "" { + return nil, fmt.Errorf("className cannot be empty") + } + + if unicode.IsLower(rune(class[0])) { + return nil, fmt.Errorf("className must start with an uppercase letter, but got %s", class) + } + + property := pathSegments[3] + if property == "" { + return nil, fmt.Errorf("property cannot be empty") + } + + return &RefSource{ + Local: (uri.Host == "localhost"), + PeerName: uri.Host, + TargetID: strfmt.UUID(pathSegments[2]), + Class: schema.ClassName(class), + Property: schema.PropertyName(property), + }, nil +} + +func (r *RefSource) String() string { + uri := url.URL{ + Host: r.PeerName, + Scheme: "weaviate", + Path: fmt.Sprintf("/%s/%s/%s", r.Class, r.TargetID, r.Property), + } + + return uri.String() +} diff --git a/platform/dbops/binaries/weaviate-src/entities/schema/crossref/crossref_source_test.go b/platform/dbops/binaries/weaviate-src/entities/schema/crossref/crossref_source_test.go new file mode 100644 index 0000000000000000000000000000000000000000..e699e75b22afee99283bcd861e12f5810a332523 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/schema/crossref/crossref_source_test.go @@ -0,0 +1,169 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package crossref + +import ( + "testing" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/schema" +) + +func Test_Source_ParsingFromString(t *testing.T) { + t.Run("from a local object ref that is well-formed", func(t *testing.T) { + uri := "weaviate://localhost/MyClassName/c2cd3f91-0160-477e-869a-8da8829e0a4d/myRefProp" + ref, err := ParseSource(uri) + + require.Nil(t, err, "should not error") + + t.Run("is a local ref", func(t *testing.T) { + assert.Equal(t, ref.Local, true) + }) + + t.Run("peerName points to localhost", func(t *testing.T) { + assert.Equal(t, ref.PeerName, "localhost") + }) + + t.Run("id points correctly", func(t *testing.T) { + assert.Equal(t, ref.TargetID, strfmt.UUID("c2cd3f91-0160-477e-869a-8da8829e0a4d")) + }) + + t.Run("the class name is correct", func(t *testing.T) { + assert.Equal(t, ref.Class, schema.ClassName("MyClassName")) + }) + + t.Run("the property name is correct", func(t *testing.T) { + assert.Equal(t, ref.Property, schema.PropertyName("myRefProp")) + }) + + t.Run("assembling a new source and comparing if the match", func(t *testing.T) { + alt := NewSource("MyClassName", "myRefProp", + "c2cd3f91-0160-477e-869a-8da8829e0a4d") + assert.Equal(t, ref, alt) + }) + }) + + t.Run("from a local action ref that is well-formed", func(t *testing.T) { + uri := "weaviate://localhost/MyActionClass/c2cd3f91-0160-477e-869a-8da8829e0a4d/myRefProp" + ref, err := ParseSource(uri) + + require.Nil(t, err, "should not error") + + t.Run("is a local ref", func(t *testing.T) { + assert.Equal(t, ref.Local, true) + }) + + t.Run("peerName points to localhost", func(t *testing.T) { + assert.Equal(t, ref.PeerName, "localhost") + }) + + t.Run("id points correctly", func(t *testing.T) { + assert.Equal(t, ref.TargetID, strfmt.UUID("c2cd3f91-0160-477e-869a-8da8829e0a4d")) + }) + + t.Run("the class name is correct", func(t *testing.T) { + assert.Equal(t, ref.Class, schema.ClassName("MyActionClass")) + }) + + t.Run("the property name is correct", func(t *testing.T) { + assert.Equal(t, ref.Property, schema.PropertyName("myRefProp")) + }) + + t.Run("assembling a new source and comparing if the match", func(t *testing.T) { + alt := NewSource("MyActionClass", "myRefProp", + "c2cd3f91-0160-477e-869a-8da8829e0a4d") + assert.Equal(t, ref, alt) + }) + }) + + t.Run("from a network action ref that is well-formed", func(t *testing.T) { + uri := "weaviate://another-weaviate/SomeActionClass/c2cd3f91-0160-477e-869a-8da8829e0a4d/myRefProp" + ref, err := ParseSource(uri) + + require.Nil(t, err, "should not error") + + t.Run("is a local ref", func(t *testing.T) { + assert.Equal(t, ref.Local, false) + }) + + t.Run("peerName points to localhost", func(t *testing.T) { + assert.Equal(t, ref.PeerName, "another-weaviate") + }) + + t.Run("id points correctly", func(t *testing.T) { + assert.Equal(t, ref.TargetID, strfmt.UUID("c2cd3f91-0160-477e-869a-8da8829e0a4d")) + }) + + t.Run("the class name is correct", func(t *testing.T) { + assert.Equal(t, ref.Class, schema.ClassName("SomeActionClass")) + }) + + t.Run("the property name is correct", func(t *testing.T) { + assert.Equal(t, ref.Property, schema.PropertyName("myRefProp")) + }) + }) + + t.Run("with formatting errors", func(t *testing.T) { + type testCaseError struct { + name string + uri string + } + + tests := []testCaseError{ + { + name: "with an invalid URL", + uri: "i:am:not:a:url", + }, + { + name: "with too few path segments", + uri: "weaviate://localhost/SomeClass", + }, + { + name: "with too many path segments", + uri: "weaviate://localhost/SomeClass/c2cd3f91-0160-477e-869a-8da8829e0a4d/myRefProp/somethingElse", + }, + { + name: "without a property", + uri: "weaviate://localhost/SomeClass/c2cd3f91-0160-477e-869a-8da8829e0a4d/", + }, + { + name: "with an invalid uuid", + uri: "weaviate://localhost/SomeClass/c2cd3f91-iSneakedInHere-477e-869a-8da8829e0a4d", + }, + { + name: "with an invalid kind", // was /humans/SomeClass + uri: "weaviate://localhost/SomeClass/c2cd3f91-0160-477e-869a-8da8829e0a4d", + }, + { + name: "with a lowercased class name", + uri: "weaviate://localhost/someClass/c2cd3f91-0160-477e-869a-8da8829e0a4d/myRefProp", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + _, err := ParseSource(test.uri) + assert.NotNil(t, err, test.name) + }) + } + }) +} + +func Test_Source_GenerateString(t *testing.T) { + uri := "weaviate://localhost/MyClass/c2cd3f91-0160-477e-869a-8da8829e0a4d/myRefProp" + ref, err := ParseSource(uri) + + require.Nil(t, err, "should not error") + assert.Equal(t, uri, ref.String(), "should be the same as the input string") +} diff --git a/platform/dbops/binaries/weaviate-src/entities/schema/crossref/crossref_test.go b/platform/dbops/binaries/weaviate-src/entities/schema/crossref/crossref_test.go new file mode 100644 index 0000000000000000000000000000000000000000..25b0f6f538ece9ce30cf10953d935d640e36a67c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/schema/crossref/crossref_test.go @@ -0,0 +1,233 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package crossref + +import ( + "testing" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" +) + +func TestParseCrossReference(t *testing.T) { + ref := Ref{ + Local: true, + PeerName: _LocalHost, + TargetID: "c2cd3f91-0160-477e-869a-8da8829e0a4d", + Class: "class", + } + tests := []struct { + beacon string + ref Ref + ok bool + }{ + { + beacon: "weaviate://localhost/class/c2cd3f91-0160-477e-869a-8da8829e0a4d", + ref: ref, + ok: true, + }, + { + beacon: "weaviate://remote/class/c2cd3f91-0160-477e-869a-8da8829e0a4d", + ref: Ref{false, "remote", ref.TargetID, "class"}, + ok: true, + }, + { + beacon: "weaviate://localhost/c2cd3f91-0160-477e-869a-8da8829e0a4d", + ref: Ref{true, _LocalHost, ref.TargetID, ""}, + ok: true, + }, + { + beacon: "weaviate://remote/c2cd3f91-0160-477e-869a-8da8829e0a4d", + ref: Ref{false, "remote", ref.TargetID, ""}, + ok: true, + }, + { + beacon: "weaviate://localhost/class/c2cd3f91-0160-477e-869a-8da8829e0a4d/i-shouldnt-be-here", + }, + { + beacon: "weaviate://localhost/class/invalid-id", + }, + { + beacon: "weaviate://localhost/class", + }, + { + beacon: "weaviate://localhost", + }, + { + beacon: "i:am:not:a:url", + }, + } + for i, tc := range tests { + got, err := Parse(tc.beacon) + if (err == nil) != tc.ok { + t.Errorf("%d - Parse(%s) error %v error expected: %t", i, tc.beacon, err, tc.ok) + continue + } + if err != nil { + continue + } + if *got != tc.ref { + t.Errorf("%d - Parse(%s) got %v want %v", i, tc.beacon, *got, tc.ref) + } + if beacon := got.String(); beacon != tc.beacon { + t.Errorf("beacon expected: %v want %v", tc.beacon, beacon) + } + } +} + +func TestSingleRef(t *testing.T) { + ref := NewLocalhost("class", "c2cd3f91-0160-477e-869a-8da8829e0a4d") + expected := &models.SingleRef{ + Beacon: strfmt.URI("weaviate://localhost/class/c2cd3f91-0160-477e-869a-8da8829e0a4d"), + } + sref := ref.SingleRef() + assert.Equal(t, expected, sref, "should create a singleRef") + xref, _ := ParseSingleRef(sref) + assert.Equal(t, ref, xref) +} + +func Test_ParsingFromStringDeprecated(t *testing.T) { + t.Run("from a local object ref that is well-formed", func(t *testing.T) { + uri := "weaviate://localhost/c2cd3f91-0160-477e-869a-8da8829e0a4d" + ref, err := Parse(uri) + + require.Nil(t, err, "should not error") + + t.Run("is a local ref", func(t *testing.T) { + assert.Equal(t, ref.Local, true) + }) + + t.Run("peerName points to localhost", func(t *testing.T) { + assert.Equal(t, ref.PeerName, "localhost") + }) + + t.Run("id points correctly", func(t *testing.T) { + assert.Equal(t, ref.TargetID, strfmt.UUID("c2cd3f91-0160-477e-869a-8da8829e0a4d")) + }) + }) + + t.Run("from a local action ref that is well-formed", func(t *testing.T) { + uri := "weaviate://localhost/c2cd3f91-0160-477e-869a-8da8829e0a4d" + ref, err := Parse(uri) + + require.Nil(t, err, "should not error") + + t.Run("is a local ref", func(t *testing.T) { + assert.Equal(t, ref.Local, true) + }) + + t.Run("peerName points to localhost", func(t *testing.T) { + assert.Equal(t, ref.PeerName, "localhost") + }) + + t.Run("id points correctly", func(t *testing.T) { + assert.Equal(t, ref.TargetID, strfmt.UUID("c2cd3f91-0160-477e-869a-8da8829e0a4d")) + }) + }) + + t.Run("from a network action ref that is well-formed", func(t *testing.T) { + uri := "weaviate://another-weaviate/c2cd3f91-0160-477e-869a-8da8829e0a4d" + ref, err := Parse(uri) + + require.Nil(t, err, "should not error") + + t.Run("is a local ref", func(t *testing.T) { + assert.Equal(t, ref.Local, false) + }) + + t.Run("peerName points to localhost", func(t *testing.T) { + assert.Equal(t, ref.PeerName, "another-weaviate") + }) + + t.Run("id points correctly", func(t *testing.T) { + assert.Equal(t, ref.TargetID, strfmt.UUID("c2cd3f91-0160-477e-869a-8da8829e0a4d")) + }) + }) + + t.Run("with formatting errors", func(t *testing.T) { + type testCaseError struct { + name string + uri string + } + + tests := []testCaseError{ + { + name: "with an invalid URL", + uri: "i:am:not:a:url", + }, + { + name: "with too few path segments", + uri: "weaviate://localhost", + }, + { + name: "with too many path segments", + uri: "weaviate://localhost/c2cd3f91-0160-477e-869a-8da8829e0a4d/i-shouldnt-be-here", + }, + { + name: "with an invalid uuid", + uri: "weaviate://localhost/c2cd3f91-iSneakedInHere-477e-869a-8da8829e0a4d", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + _, err := Parse(test.uri) + assert.NotNil(t, err, test.name) + }) + } + }) +} + +func Test_ParsingFromSingleRefDeprecated(t *testing.T) { + t.Run("from a local object ref that is well-formed", func(t *testing.T) { + uri := strfmt.URI("weaviate://localhost/c2cd3f91-0160-477e-869a-8da8829e0a4d") + singleRef := &models.SingleRef{ + Beacon: uri, + } + ref, err := ParseSingleRef(singleRef) + + require.Nil(t, err, "should not error") + + t.Run("is a local ref", func(t *testing.T) { + assert.Equal(t, ref.Local, true) + }) + + t.Run("peerName points to localhost", func(t *testing.T) { + assert.Equal(t, ref.PeerName, "localhost") + }) + + t.Run("id points correctly", func(t *testing.T) { + assert.Equal(t, ref.TargetID, strfmt.UUID("c2cd3f91-0160-477e-869a-8da8829e0a4d")) + }) + }) +} + +func Test_GenerateStringDeprecated(t *testing.T) { + uri := "weaviate://localhost/c2cd3f91-0160-477e-869a-8da8829e0a4d" + ref, err := Parse(uri) + + require.Nil(t, err, "should not error") + assert.Equal(t, uri, ref.String(), "should be the same as the input string") +} + +func Test_DeprecatedSingleRef(t *testing.T) { + uri := "weaviate://localhost/c2cd3f91-0160-477e-869a-8da8829e0a4d" + ref, err := Parse(uri) + expectedResult := &models.SingleRef{ + Beacon: strfmt.URI(uri), + } + + require.Nil(t, err, "should not error") + assert.Equal(t, expectedResult, ref.SingleRef(), "should create a singleRef (api construct)") +} diff --git a/platform/dbops/binaries/weaviate-src/entities/schema/test_utils/nested_properties.go b/platform/dbops/binaries/weaviate-src/entities/schema/test_utils/nested_properties.go new file mode 100644 index 0000000000000000000000000000000000000000..a9cf6f7f8340783d747edb97d3b313951a32ceaf --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/schema/test_utils/nested_properties.go @@ -0,0 +1,45 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test_utils + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" +) + +func AssertNestedPropsMatch(t *testing.T, nestedPropsA, nestedPropsB []*models.NestedProperty) { + require.Len(t, nestedPropsB, len(nestedPropsA), "nestedProps: different length") + + npMap := map[string]int{} + for index, np := range nestedPropsA { + npMap[np.Name] = index + } + + for _, npB := range nestedPropsB { + require.Contains(t, npMap, npB.Name) + npA := nestedPropsA[npMap[npB.Name]] + + assert.Equal(t, npA.DataType, npB.DataType) + assert.Equal(t, npA.IndexFilterable, npB.IndexFilterable) + assert.Equal(t, npA.IndexSearchable, npB.IndexSearchable) + assert.Equal(t, npA.IndexRangeFilters, npB.IndexRangeFilters) + assert.Equal(t, npA.Tokenization, npB.Tokenization) + + if _, isNested := schema.AsNested(npA.DataType); isNested { + AssertNestedPropsMatch(t, npA.NestedProperties, npB.NestedProperties) + } + } +} diff --git a/platform/dbops/binaries/weaviate-src/entities/schema/test_utils/properties.go b/platform/dbops/binaries/weaviate-src/entities/schema/test_utils/properties.go new file mode 100644 index 0000000000000000000000000000000000000000..35fcf47728c23920c7ca23d7249f37996d356634 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/schema/test_utils/properties.go @@ -0,0 +1,45 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test_utils + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" +) + +func AssertPropsMatch(t *testing.T, propsA, propsB []*models.Property) { + require.Len(t, propsB, len(propsA), "props: different length") + + pMap := map[string]int{} + for idx, p := range propsA { + pMap[p.Name] = idx + } + + for _, pB := range propsB { + require.Contains(t, pMap, pB.Name) + pA := propsA[pMap[pB.Name]] + + assert.Equal(t, pA.DataType, pB.DataType) + assert.Equal(t, pA.IndexFilterable, pB.IndexFilterable) + assert.Equal(t, pA.IndexSearchable, pB.IndexSearchable) + assert.Equal(t, pA.IndexRangeFilters, pB.IndexRangeFilters) + assert.Equal(t, pA.Tokenization, pB.Tokenization) + + if _, isNested := schema.AsNested(pA.DataType); isNested { + AssertNestedPropsMatch(t, pA.NestedProperties, pB.NestedProperties) + } + } +} diff --git a/platform/dbops/binaries/weaviate-src/entities/storobj/enrich_schema_datatypes.go b/platform/dbops/binaries/weaviate-src/entities/storobj/enrich_schema_datatypes.go new file mode 100644 index 0000000000000000000000000000000000000000..8f2baedd7fd68e0105e52d72c60a8412d7856ad7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/storobj/enrich_schema_datatypes.go @@ -0,0 +1,409 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package storobj + +import ( + "fmt" + + "github.com/go-openapi/strfmt" + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" +) + +func enrichSchemaTypes(schema map[string]interface{}, ofNestedProp bool) error { + if schema == nil { + return nil + } + + for propName, value := range schema { + switch typed := value.(type) { + case []interface{}: + if isArrayValue(typed) { + switch typed[0].(type) { + case float64: + parsed, err := parseNumberArrayValue(typed) + if err != nil { + return errors.Wrapf(err, "property %q of type string array", propName) + } + + schema[propName] = parsed + case bool: + parsed, err := parseBoolArrayValue(typed) + if err != nil { + return errors.Wrapf(err, "property %q of type boolean array", propName) + } + + schema[propName] = parsed + default: + parsed, err := parseStringArrayValue(typed) + if err != nil { + return errors.Wrapf(err, "property %q of type string array", propName) + } + + schema[propName] = parsed + } + } else if len(typed) == 0 { + // empty arrays. Here we use []interface{} as a placeholder + // type for an empty array, since we cannot determine its + // actual type. in the future, we should persist the schema + // property type information alongside the value to avoid + // this situation + schema[propName] = typed + } else { + // nested properties does not support refs + if !ofNestedProp { + parsed, err := parseCrossRef(typed) + if err == nil { + schema[propName] = parsed + continue + } + } + // apperently object[] + for i := range typed { + t2, ok := typed[i].(map[string]interface{}) + if !ok { + return fmt.Errorf("expected element [%d] of '%s' to be map, %T found", i, propName, typed[i]) + } + enrichSchemaTypes(t2, true) + } + schema[propName] = typed + } + case map[string]interface{}: + parsed, err := parseMapProp(typed, ofNestedProp) + if err != nil { + return errors.Wrapf(err, "property %q of type map", propName) + } + + schema[propName] = parsed + default: + continue + } + } + + return nil +} + +// nested properties does not support phone or geo data types +func parseMapProp(input map[string]interface{}, ofNestedProp bool) (interface{}, error) { + if !ofNestedProp && isGeoProp(input) { + return parseGeoProp(input) + } + if !ofNestedProp && isPhoneProp(input) { + return parsePhoneNumber(input) + } + // apparently object + err := enrichSchemaTypes(input, true) + return input, err +} + +func isGeoProp(input map[string]interface{}) bool { + expectedProps := []string{"latitude", "longitude"} + + if len(input) != len(expectedProps) { + return false + } + for _, prop := range expectedProps { + if _, ok := input[prop]; !ok { + return false + } + } + return true +} + +func isPhoneProp(input map[string]interface{}) bool { + validExpectedProps := [][]string{ + {"input", "internationalFormatted", "nationalFormatted", "national", "countryCode", "valid"}, + {"input", "internationalFormatted", "nationalFormatted", "national", "countryCode", "valid", "defaultCountry"}, + {"input", "internationalFormatted", "nationalFormatted", "national", "countryCode"}, + {"input", "internationalFormatted", "nationalFormatted", "national", "countryCode", "defaultCountry"}, + } + + for _, expectedProps := range validExpectedProps { + match := true + if len(expectedProps) != len(input) { + match = false + } else { + for _, prop := range expectedProps { + if _, ok := input[prop]; !ok { + match = false + break + } + } + } + if match { + return true + } + } + return false +} + +func parseGeoProp(input map[string]interface{}) (*models.GeoCoordinates, error) { + latFloat, ok := input["latitude"].(float64) + if !ok { + return nil, fmt.Errorf("explected lat to be float64, but is %T", input["latitude"]) + } + + lonFloat, ok := input["longitude"].(float64) + if !ok { + return nil, fmt.Errorf("explected lon to be float64, but is %T", input["longitude"]) + } + + return &models.GeoCoordinates{ + Latitude: ptFloat32(float32(latFloat)), + Longitude: ptFloat32(float32(lonFloat)), + }, nil +} + +func ptFloat32(in float32) *float32 { + return &in +} + +func parsePhoneNumber(input map[string]interface{}) (*models.PhoneNumber, error) { + out := &models.PhoneNumber{} + + phoneInput, err := extractStringFromMap(input, "input") + if err != nil { + return nil, err + } + out.Input = phoneInput + + international, err := extractStringFromMap(input, "internationalFormatted") + if err != nil { + return nil, err + } + out.InternationalFormatted = international + + nationalFormatted, err := extractStringFromMap(input, "nationalFormatted") + if err != nil { + return nil, err + } + out.NationalFormatted = nationalFormatted + + national, err := extractNumberFromMap(input, "national") + if err != nil { + return nil, err + } + out.National = uint64(national) + + countryCode, err := extractNumberFromMap(input, "countryCode") + if err != nil { + return nil, err + } + out.CountryCode = uint64(countryCode) + + defaultCountry, err := extractStringFromMap(input, "defaultCountry") + if err != nil { + return nil, err + } + out.DefaultCountry = defaultCountry + + valid, err := extractBoolFromMap(input, "valid") + if err != nil { + return nil, err + } + out.Valid = valid + + return out, nil +} + +func extractNumberFromMap(input map[string]interface{}, key string) (float64, error) { + field, ok := input[key] + if ok { + asFloat, ok := field.(float64) + if !ok { + return 0, fmt.Errorf("expected '%s' to be float64, but is %T", key, field) + } + + return asFloat, nil + } + return 0, nil +} + +func extractStringFromMap(input map[string]interface{}, key string) (string, error) { + field, ok := input[key] + if ok { + asString, ok := field.(string) + if !ok { + return "", fmt.Errorf("expected '%s' to be string, but is %T", key, field) + } + + return asString, nil + } + return "", nil +} + +func extractBoolFromMap(input map[string]interface{}, key string) (bool, error) { + field, ok := input[key] + if ok { + asBool, ok := field.(bool) + if !ok { + return false, fmt.Errorf("expected '%s' to be bool, but is %T", key, field) + } + + return asBool, nil + } + return false, nil +} + +func isArrayValue(value []interface{}) bool { + if len(value) > 0 { + _, ok := value[0].(map[string]interface{}) + return !ok + } + return false +} + +func parseStringArrayValue(value []interface{}) ([]string, error) { + parsed := make([]string, len(value)) + for i := range value { + asString, ok := value[i].(string) + if !ok { + return nil, fmt.Errorf("string array: expected element %d to be string - got %T", i, value[i]) + } + parsed[i] = asString + } + return parsed, nil +} + +func parseNumberArrayValue(value []interface{}) ([]float64, error) { + parsed := make([]float64, len(value)) + for i := range value { + asFloat, ok := value[i].(float64) + if !ok { + return nil, fmt.Errorf("number array: expected element %d to be float - got %T", i, value[i]) + } + parsed[i] = asFloat + } + return parsed, nil +} + +func parseBoolArrayValue(value []interface{}) ([]bool, error) { + parsed := make([]bool, len(value)) + for i := range value { + asBool, ok := value[i].(bool) + if !ok { + return nil, fmt.Errorf("boolean array: expected element %d to be bool - got %T", i, value[i]) + } + parsed[i] = asBool + } + return parsed, nil +} + +func parseCrossRef(value []interface{}) (models.MultipleRef, error) { + parsed := make(models.MultipleRef, len(value)) + for i, elem := range value { + asMap, ok := elem.(map[string]interface{}) + if !ok { + return nil, fmt.Errorf("crossref: expected element %d to be map - got %T", i, elem) + } + + beacon, ok := asMap["beacon"] + if !ok { + return nil, fmt.Errorf("crossref: expected element %d to have key %q - got %v", i, "beacon", elem) + } + + beaconStr, ok := beacon.(string) + if !ok { + return nil, fmt.Errorf("crossref: expected element %d.beacon to be string - got %T", i, beacon) + } + + parsed[i] = &models.SingleRef{ + Beacon: strfmt.URI(beaconStr), + } + + c, ok := asMap["classification"] + if ok { + classification, err := parseRefClassificationMeta(c) + if err != nil { + return nil, errors.Wrap(err, "crossref: parse classifiation meta") + } + + parsed[i].Classification = classification + } + } + + return parsed, nil +} + +func parseRefClassificationMeta(in interface{}) (*models.ReferenceMetaClassification, error) { + out := &models.ReferenceMetaClassification{} + asMap, ok := in.(map[string]interface{}) + if !ok { + return nil, fmt.Errorf("expected classification to be map - got %T", in) + } + + if cod, err := extractFloat64(asMap, "closestOverallDistance"); err != nil { + return nil, err + } else { + out.ClosestOverallDistance = cod + } + + if mwd, err := extractFloat64(asMap, "meanWinningDistance"); err != nil { + return nil, err + } else { + out.WinningDistance = mwd // deprecated remove in 0.23.0 + out.MeanWinningDistance = mwd + } + + if cwd, err := extractFloat64(asMap, "closestWinningDistance"); err != nil { + return nil, err + } else { + out.ClosestWinningDistance = cwd + } + + if mcd, err := extractFloat64(asMap, "meanLosingDistance"); err != nil { + return nil, err + } else { + out.LosingDistance = &mcd // deprecated remove in 0.23.0 + out.MeanLosingDistance = &mcd + } + + if ccd, err := extractFloat64(asMap, "closestLosingDistance"); err != nil { + return nil, err + } else { + out.ClosestLosingDistance = &ccd + } + + if oc, err := extractFloat64(asMap, "overallCount"); err != nil { + return nil, err + } else { + out.OverallCount = int64(oc) + } + + if wc, err := extractFloat64(asMap, "winningCount"); err != nil { + return nil, err + } else { + out.WinningCount = int64(wc) + } + + if lc, err := extractFloat64(asMap, "losingCount"); err != nil { + return nil, err + } else { + out.LosingCount = int64(lc) + } + + return out, nil +} + +func extractFloat64(source map[string]interface{}, key string) (float64, error) { + value, ok := source[key] + if !ok { + return 0, nil + } + + asFloat, ok := value.(float64) + if !ok { + return 0, fmt.Errorf("expected %s to be float64 - got %T", key, value) + } + + return asFloat, nil +} diff --git a/platform/dbops/binaries/weaviate-src/entities/storobj/errors.go b/platform/dbops/binaries/weaviate-src/entities/storobj/errors.go new file mode 100644 index 0000000000000000000000000000000000000000..e90ee00628e517807035e69881352304a39ba4e3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/storobj/errors.go @@ -0,0 +1,30 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package storobj + +import "fmt" + +type ErrNotFound struct { + DocID uint64 + OriginalMsg string +} + +func NewErrNotFoundf(docID uint64, msg string, args ...interface{}) error { + return ErrNotFound{ + DocID: docID, + OriginalMsg: fmt.Sprintf(msg, args...), + } +} + +func (err ErrNotFound) Error() string { + return fmt.Sprintf("no object found for doc id %d: %s", err.DocID, err.OriginalMsg) +} diff --git a/platform/dbops/binaries/weaviate-src/entities/storobj/parse_single_object.go b/platform/dbops/binaries/weaviate-src/entities/storobj/parse_single_object.go new file mode 100644 index 0000000000000000000000000000000000000000..5fa7346df7080bf031cff35f460dba75e5f4e84d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/storobj/parse_single_object.go @@ -0,0 +1,173 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package storobj + +import ( + "bytes" + "encoding/binary" + "strconv" + + "github.com/buger/jsonparser" + "github.com/google/uuid" + "github.com/pkg/errors" +) + +func ParseAndExtractProperty(data []byte, propName string) ([]string, bool, error) { + if propName == "id" || propName == "_id" { + return extractID(data) + } + if propName == "_creationTimeUnix" { + return extractCreationTimeUnix(data) + } + if propName == "_lastUpdateTimeUnix" { + return extractLastUpdateTimeUnix(data) + } + return ParseAndExtractTextProp(data, propName) +} + +func ParseAndExtractTextProp(data []byte, propName string) ([]string, bool, error) { + vals := []string{} + err := parseAndExtractValueProp(data, propName, func(value []byte) { + vals = append(vals, string(value)) + }) + if err != nil { + return nil, false, err + } + return vals, true, nil +} + +func ParseAndExtractNumberArrayProp(data []byte, propName string) ([]float64, bool, error) { + vals := []float64{} + err := parseAndExtractValueProp(data, propName, func(value []byte) { + vals = append(vals, mustExtractNumber(value)) + }) + if err != nil { + return nil, false, err + } + return vals, true, nil +} + +func ParseAndExtractBoolArrayProp(data []byte, propName string) ([]bool, bool, error) { + vals := []bool{} + err := parseAndExtractValueProp(data, propName, func(value []byte) { + vals = append(vals, mustExtractBool(value)) + }) + if err != nil { + return nil, false, err + } + return vals, true, nil +} + +func parseAndExtractValueProp(data []byte, propName string, valueFn func(value []byte)) error { + propsBytes, err := extractPropsBytes(data) + if err != nil { + return err + } + + val, t, _, err := jsonparser.Get(propsBytes, propName) + // Some objects can have nil as value for the property, in this case skip the object + if err != nil { + if err.Error() == "Key path not found" { + return nil + } + return err + } + + if t == jsonparser.Array { + jsonparser.ArrayEach(val, func(value []byte, dataType jsonparser.ValueType, offset int, err error) { + valueFn(value) + }) + } else { + valueFn(val) + } + + return nil +} + +func mustExtractNumber(value []byte) float64 { + number, err := strconv.ParseFloat(string(value), 64) + if err != nil { + panic("not a float64") + } + return number +} + +func mustExtractBool(value []byte) bool { + boolVal, err := strconv.ParseBool(string(value)) + if err != nil { + panic("not a bool") + } + return boolVal +} + +func extractID(data []byte) ([]string, bool, error) { + start := 1 + 8 + 1 + end := start + 16 + if len(data) > end { + uuidParsed, err := uuid.FromBytes(data[start:end]) + if err != nil { + return nil, false, errors.New("cannot parse id property") + } + return []string{uuidParsed.String()}, true, nil + } + return nil, false, errors.New("id property not found") +} + +func extractCreationTimeUnix(data []byte) ([]string, bool, error) { + start := 1 + 8 + 1 + 16 + end := start + 8 + if len(data) > end { + return extractTimeUnix(data[start:end], "_creationTimeUnix") + } + return nil, false, errors.New("_creationTimeUnix property not found") +} + +func extractLastUpdateTimeUnix(data []byte) ([]string, bool, error) { + start := 1 + 8 + 1 + 16 + 8 + end := start + 8 + if len(data) > end { + return extractTimeUnix(data[start:end], "_lastUpdateTimeUnix") + } + return nil, false, errors.New("_lastUpdateTimeUnix property not found") +} + +func extractTimeUnix(data []byte, propertyName string) ([]string, bool, error) { + var timeUnix int64 + r := bytes.NewReader(data) + if err := binary.Read(r, binary.LittleEndian, &timeUnix); err != nil { + return nil, false, errors.Errorf("cannot parse %s property", propertyName) + } + return []string{strconv.FormatInt(timeUnix, 10)}, true, nil +} + +func extractPropsBytes(data []byte) ([]byte, error) { + version := uint8(data[0]) + if version != 1 { + return nil, errors.Errorf("unsupported binary marshaller version %d", version) + } + + vecLen := binary.LittleEndian.Uint16(data[discardBytesPreVector : discardBytesPreVector+2]) + + classNameStart := int64(discardBytesPreVector) + 2 + int64(vecLen)*4 + + classNameLen := binary.LittleEndian.Uint16(data[classNameStart : classNameStart+2]) + + propsLenStart := classNameStart + 2 + int64(classNameLen) + propsLen := binary.LittleEndian.Uint32(data[propsLenStart : propsLenStart+4]) + + start := int64(propsLenStart + 4) + end := start + int64(propsLen) + + return data[start:end], nil +} + +const discardBytesPreVector = 1 + 8 + 1 + 16 + 8 + 8 diff --git a/platform/dbops/binaries/weaviate-src/entities/storobj/storage_object.go b/platform/dbops/binaries/weaviate-src/entities/storobj/storage_object.go new file mode 100644 index 0000000000000000000000000000000000000000..4fcd292b7c0fecd840e10f4ec8963e4fbe8d97ba --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/storobj/storage_object.go @@ -0,0 +1,1643 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package storobj + +import ( + "bytes" + "encoding/binary" + "encoding/json" + "fmt" + "io" + "math" + "runtime" + + "github.com/buger/jsonparser" + "github.com/go-openapi/strfmt" + "github.com/google/uuid" + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/vmihailenco/msgpack/v5" + "github.com/weaviate/weaviate/entities/additional" + errwrap "github.com/weaviate/weaviate/entities/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/usecases/byteops" +) + +var bufPool *bufferPool + +type Vectors map[string][]float32 + +func init() { + // a 10kB buffer should be large enough for typical cases, it can fit a + // 1536d uncompressed vector and about 3kB of object payload. If the + // initial size is not large enoug, the caller can always allocate a larger + // buffer and return that to the pool instead. + bufPool = newBufferPool(10 * 1024) +} + +type Object struct { + MarshallerVersion uint8 + Object models.Object `json:"object"` + Vector []float32 `json:"vector"` + VectorLen int `json:"-"` + BelongsToNode string `json:"-"` + BelongsToShard string `json:"-"` + IsConsistent bool `json:"-"` + DocID uint64 + Vectors map[string][]float32 `json:"vectors"` + MultiVectors map[string][][]float32 `json:"multivectors"` +} + +func New(docID uint64) *Object { + return &Object{ + MarshallerVersion: 1, + DocID: docID, + } +} + +// TODO: temporary solution +func FromObject(object *models.Object, vector []float32, vectors map[string][]float32, multivectors map[string][][]float32) *Object { + // clear out nil entries of properties to make sure leaving a property out and setting it nil is identical + properties, ok := object.Properties.(map[string]interface{}) + if ok { + for key, prop := range properties { + if prop == nil { + delete(properties, key) + } + } + object.Properties = properties + } + + var vecs map[string][]float32 + if vectors != nil { + vecs = make(map[string][]float32) + for targetVector, vector := range vectors { + vecs[targetVector] = vector + } + } + + var multiVectors map[string][][]float32 + if multivectors != nil { + + multiVectors = make(map[string][][]float32) + for targetVector, vectors := range multivectors { + multiVectors[targetVector] = vectors + } + } + + return &Object{ + Object: *object, + Vector: vector, + MarshallerVersion: 1, + VectorLen: len(vector), + Vectors: vecs, + MultiVectors: multiVectors, + } +} + +func FromBinary(data []byte) (*Object, error) { + ko := &Object{} + if err := ko.UnmarshalBinary(data); err != nil { + return nil, err + } + + return ko, nil +} + +func FromBinaryUUIDOnly(data []byte) (*Object, error) { + ko := &Object{} + + rw := byteops.NewReadWriter(data) + version := rw.ReadUint8() + if version != 1 { + return nil, errors.Errorf("unsupported binary marshaller version %d", version) + } + + ko.MarshallerVersion = version + + ko.DocID = rw.ReadUint64() + rw.MoveBufferPositionForward(1) // ignore kind-byte + uuidObj, err := uuid.FromBytes(rw.ReadBytesFromBuffer(16)) + if err != nil { + return nil, fmt.Errorf("parse uuid: %w", err) + } + ko.Object.ID = strfmt.UUID(uuidObj.String()) + + ko.Object.CreationTimeUnix = int64(rw.ReadUint64()) + ko.Object.LastUpdateTimeUnix = int64(rw.ReadUint64()) + + vecLen := rw.ReadUint16() + rw.MoveBufferPositionForward(uint64(vecLen * 4)) + classNameLen := rw.ReadUint16() + + ko.Object.Class = string(rw.ReadBytesFromBuffer(uint64(classNameLen))) + + return ko, nil +} + +func FromBinaryOptional(data []byte, + addProp additional.Properties, properties *PropertyExtraction, +) (*Object, error) { + ko := &Object{} + + rw := byteops.NewReadWriter(data) + ko.MarshallerVersion = rw.ReadUint8() + if ko.MarshallerVersion != 1 { + return nil, errors.Errorf("unsupported binary marshaller version %d", ko.MarshallerVersion) + } + ko.DocID = rw.ReadUint64() + rw.MoveBufferPositionForward(1) // ignore kind-byte + uuidObj, err := uuid.FromBytes(rw.ReadBytesFromBuffer(16)) + if err != nil { + return nil, fmt.Errorf("parse uuid: %w", err) + } + uuidParsed := strfmt.UUID(uuidObj.String()) + + createTime := int64(rw.ReadUint64()) + updateTime := int64(rw.ReadUint64()) + vectorLength := rw.ReadUint16() + // The vector length should always be returned (for usage metrics purposes) even if the vector itself is skipped + ko.VectorLen = int(vectorLength) + if addProp.Vector { + ko.Object.Vector = make([]float32, vectorLength) + vectorBytes := rw.ReadBytesFromBuffer(uint64(vectorLength) * 4) + for i := 0; i < int(vectorLength); i++ { + bits := binary.LittleEndian.Uint32(vectorBytes[i*4 : (i+1)*4]) + ko.Object.Vector[i] = math.Float32frombits(bits) + } + } else { + rw.MoveBufferPositionForward(uint64(vectorLength) * 4) + ko.Object.Vector = nil + } + ko.Vector = ko.Object.Vector + + classNameLen := rw.ReadUint16() + className := string(rw.ReadBytesFromBuffer(uint64(classNameLen))) + + propLength := rw.ReadUint32() + var props []byte + if addProp.NoProps { + rw.MoveBufferPositionForward(uint64(propLength)) + } else { + props = rw.ReadBytesFromBuffer(uint64(propLength)) + } + + var meta []byte + metaLength := rw.ReadUint32() + if addProp.Classification || len(addProp.ModuleParams) > 0 { + meta = rw.ReadBytesFromBuffer(uint64(metaLength)) + } else { + rw.MoveBufferPositionForward(uint64(metaLength)) + } + + vectorWeightsLength := rw.ReadUint32() + vectorWeights := rw.ReadBytesFromBuffer(uint64(vectorWeightsLength)) + + if len(addProp.Vectors) > 0 { + vectors, err := unmarshalTargetVectors(&rw) + if err != nil { + return nil, err + } + ko.Vectors = vectors + + if vectors != nil { + // If parseObject is called, ko.Object will be overwritten making this effectively a + // no-op, but I'm leaving it here for now to avoid breaking anything. + ko.Object.Vectors = make(models.Vectors) + for vecName, vec := range vectors { + ko.Object.Vectors[vecName] = vec + } + } + } else { + if rw.Position < uint64(len(rw.Buffer)) { + _ = rw.ReadBytesFromBufferWithUint32LengthIndicator() + targetVectorsSegmentLength := rw.ReadUint32() + pos := rw.Position + rw.MoveBufferToAbsolutePosition(pos + uint64(targetVectorsSegmentLength)) + } + } + + if rw.Position < uint64(len(rw.Buffer)) && len(addProp.Vectors) > 0 { + vectorNamesToUnmarshal := map[string]interface{}{} + for _, name := range addProp.Vectors { + vectorNamesToUnmarshal[name] = nil + } + multiVectors, err := unmarshalMultiVectors(&rw, vectorNamesToUnmarshal) + if err != nil { + return nil, err + } + ko.MultiVectors = multiVectors + + if multiVectors != nil { + // If parseObject is called, ko.Object will be overwritten making this effectively a + // no-op, but I'm leaving it here to match the target vector behavior. + if ko.Object.Vectors == nil { + ko.Object.Vectors = make(models.Vectors) + } + for vecName, vec := range multiVectors { + // assume at this level target vectors and multi vectors won't have the same name + ko.Object.Vectors[vecName] = vec + } + } + } + + // some object members need additional "enrichment". Only do this if necessary, ie if they are actually present + if len(props) > 0 || + len(meta) > 0 || + vectorWeightsLength > 0 && + !( // if the length is 4 and the encoded value is "null" (in ascii), vectorweights are not actually present + vectorWeightsLength == 4 && + vectorWeights[0] == 110 && // n + vectorWeights[1] == 117 && // u + vectorWeights[2] == 108 && // l + vectorWeights[3] == 108) { // l + + if err := ko.parseObject( + uuidParsed, + createTime, + updateTime, + className, + props, + meta, + vectorWeights, + properties, + propLength, + ); err != nil { + return nil, errors.Wrap(err, "parse") + } + } else { + ko.Object.ID = uuidParsed + ko.Object.CreationTimeUnix = createTime + ko.Object.LastUpdateTimeUnix = updateTime + ko.Object.Class = className + } + + return ko, nil +} + +type PropertyExtraction struct { + PropertyPaths [][]string +} + +func NewPropExtraction() *PropertyExtraction { + return &PropertyExtraction{ + PropertyPaths: [][]string{}, + } +} + +func (pe *PropertyExtraction) Add(props ...string) *PropertyExtraction { + for i := range props { + pe.PropertyPaths = append(pe.PropertyPaths, []string{props[i]}) + } + return pe +} + +type bucket interface { + GetBySecondary(int, []byte) ([]byte, error) + GetBySecondaryWithBuffer(int, []byte, []byte) ([]byte, []byte, error) +} + +func ObjectsByDocID(bucket bucket, ids []uint64, + additional additional.Properties, properties []string, logger logrus.FieldLogger, +) ([]*Object, error) { + if len(ids) == 1 { // no need to try to run concurrently if there is just one result anyway + return objectsByDocIDSequential(bucket, ids, additional, properties) + } + + return objectsByDocIDParallel(bucket, ids, additional, properties, logger) +} + +func objectsByDocIDParallel(bucket bucket, ids []uint64, + addProp additional.Properties, properties []string, logger logrus.FieldLogger, +) ([]*Object, error) { + parallel := 2 * runtime.GOMAXPROCS(0) + + out := make([]*Object, len(ids)) + + chunkSize := max(int(math.Ceil(float64(len(ids))/float64(parallel))), 1) + + eg := errwrap.NewErrorGroupWrapper(logger) + + // prevent unbounded concurrency on massive chunks + // it's fine to use a multiple of GOMAXPROCS here, as the goroutines are + // mostly IO-bound + eg.SetLimit(parallel) + for chunk := 0; chunk < parallel; chunk++ { + start := chunk * chunkSize + end := start + chunkSize + if end > len(ids) { + end = len(ids) + } + + if start >= len(ids) { + break + } + + eg.Go(func() error { + objs, err := objectsByDocIDSequential(bucket, ids[start:end], addProp, properties) + if err != nil { + return err + } + copy(out[start:start+len(objs)], objs) + return nil + }) + } + + if err := eg.Wait(); err != nil { + return nil, err + } + + // fix gaps in the output array + j := 0 + for i := range out { + if out[i] != nil { + out[j] = out[i] + j++ + } + } + + return out[:j], nil +} + +func objectsByDocIDSequential(bucket bucket, ids []uint64, + additional additional.Properties, properties []string, +) ([]*Object, error) { + if bucket == nil { + return nil, fmt.Errorf("objects bucket not found") + } + + var ( + docIDBuf = make([]byte, 8) + out = make([]*Object, len(ids)) + i = 0 + lsmBuf = bufPool.Get() + ) + + defer func() { + bufPool.Put(lsmBuf) + }() + + var props *PropertyExtraction = nil + // not all code paths forward the list of properties that should be extracted - if nil is passed fall back + if properties != nil { + propertyPaths := make([][]string, len(properties)) + for j := range properties { + propertyPaths[j] = []string{properties[j]} + } + + props = &PropertyExtraction{ + PropertyPaths: propertyPaths, + } + } + + for _, id := range ids { + binary.LittleEndian.PutUint64(docIDBuf, id) + res, newBuf, err := bucket.GetBySecondaryWithBuffer(0, docIDBuf, lsmBuf) + if err != nil { + return nil, err + } + + lsmBuf = newBuf // may have changed, e.g. because it was grown + + // If there is a crash and WAL recovery, the inverted index may have objects that are not in the objects bucket. + // This is an issue that needs to be fixed, but for now we need to reduce the huge amount of log messages that + // are generated by this issue. Logging the first time we encounter a missing object in a query still resulted + // in a huge amount of log messages and it will happen on all queries, so we not log at all for now. + // The user has already been alerted about ppossible data loss when the WAL recovery happened. + // TODO: consider deleting these entries from the inverted index and alerting the user + if res == nil { + continue + } + + unmarshalled, err := FromBinaryOptional(res, additional, props) + if err != nil { + return nil, errors.Wrapf(err, "unmarshal data object at position %d", i) + } + + out[i] = unmarshalled + i++ + } + + return out[:i], nil +} + +func (ko *Object) Class() schema.ClassName { + return schema.ClassName(ko.Object.Class) +} + +func (ko *Object) SetDocID(id uint64) { + ko.DocID = id +} + +func (ko *Object) GetDocID() uint64 { + return ko.DocID +} + +func (ko *Object) CreationTimeUnix() int64 { + return ko.Object.CreationTimeUnix +} + +func (ko *Object) ExplainScore() string { + props := ko.AdditionalProperties() + if props != nil { + iface := props["explainScore"] + if iface != nil { + return iface.(string) + } + } + return "" +} + +func (ko *Object) ID() strfmt.UUID { + return ko.Object.ID +} + +func (ko *Object) SetID(id strfmt.UUID) { + ko.Object.ID = id +} + +func (ko *Object) SetClass(class string) { + ko.Object.Class = class +} + +func (ko *Object) LastUpdateTimeUnix() int64 { + return ko.Object.LastUpdateTimeUnix +} + +// AdditionalProperties groups all properties which are stored with the +// object and not generated at runtime +func (ko *Object) AdditionalProperties() models.AdditionalProperties { + return ko.Object.Additional +} + +func (ko *Object) Properties() models.PropertySchema { + return ko.Object.Properties +} + +func (ko *Object) PropertiesWithAdditional( + additional additional.Properties, +) models.PropertySchema { + properties := ko.Properties() + + if additional.RefMeta { + // nothing to remove + return properties + } + + asMap, ok := properties.(map[string]interface{}) + if !ok || asMap == nil { + return properties + } + + for propName, value := range asMap { + asRefs, ok := value.(models.MultipleRef) + if !ok { + // not a ref, we can skip + continue + } + + for i := range asRefs { + asRefs[i].Classification = nil + } + + asMap[propName] = asRefs + } + + return asMap +} + +func (ko *Object) SetProperties(schema models.PropertySchema) { + ko.Object.Properties = schema +} + +func (ko *Object) VectorWeights() models.VectorWeights { + return ko.Object.VectorWeights +} + +func (ko *Object) SearchResult(additional additional.Properties, tenant string) *search.Result { + propertiesMap, ok := ko.PropertiesWithAdditional(additional).(map[string]interface{}) + if !ok || propertiesMap == nil { + propertiesMap = map[string]interface{}{} + } + propertiesMap["id"] = ko.ID() + ko.SetProperties(propertiesMap) + + additionalProperties := models.AdditionalProperties{} + if ko.AdditionalProperties() != nil { + if interpretation, ok := additional.ModuleParams["interpretation"]; ok { + if interpretationValue, ok := interpretation.(bool); ok && interpretationValue { + additionalProperties["interpretation"] = ko.AdditionalProperties()["interpretation"] + } + } + if additional.Classification { + additionalProperties["classification"] = ko.AdditionalProperties()["classification"] + } + if additional.Group { + additionalProperties["group"] = ko.AdditionalProperties()["group"] + } + } + if ko.ExplainScore() != "" { + additionalProperties["explainScore"] = ko.ExplainScore() + } + + return &search.Result{ + ID: ko.ID(), + DocID: &ko.DocID, + ClassName: ko.Class().String(), + Schema: ko.Properties(), + Vector: ko.Vector, + Vectors: ko.asVectors(ko.Vectors, ko.MultiVectors), + Dims: ko.VectorLen, + // VectorWeights: ko.VectorWeights(), // TODO: add vector weights + Created: ko.CreationTimeUnix(), + Updated: ko.LastUpdateTimeUnix(), + AdditionalProperties: additionalProperties, + // Score is filled in later + ExplainScore: ko.ExplainScore(), + IsConsistent: ko.IsConsistent, + Tenant: tenant, // not part of the binary + // TODO: Beacon? + } +} + +func (ko *Object) asVectors(vectors map[string][]float32, multiVectors map[string][][]float32) models.Vectors { + if (len(vectors) + len(multiVectors)) > 0 { + out := make(models.Vectors) + for targetVector, vector := range vectors { + out[targetVector] = vector + } + for targetVector, vector := range multiVectors { + out[targetVector] = vector + } + return out + } + return nil +} + +func (ko *Object) GetVectors() models.Vectors { + return ko.asVectors(ko.Vectors, ko.MultiVectors) +} + +func (ko *Object) SearchResultWithDist(addl additional.Properties, dist float32) search.Result { + res := ko.SearchResult(addl, "") + res.Dist = dist + res.Certainty = float32(additional.DistToCertainty(float64(dist))) + return *res +} + +func (ko *Object) SearchResultWithScore(addl additional.Properties, score float32) search.Result { + res := ko.SearchResult(addl, "") + res.Score = score + return *res +} + +func (ko *Object) SearchResultWithScoreAndTenant(addl additional.Properties, score float32, tenant string) search.Result { + res := ko.SearchResult(addl, tenant) + res.Score = score + return *res +} + +func (ko *Object) Valid() bool { + return ko.ID() != "" && + ko.Class().String() != "" +} + +// IterateThroughVectorDimensions iterates through all vectors present on the Object and invokes +// the callback with target name and dimensions of the vector. +func (ko *Object) IterateThroughVectorDimensions(f func(targetVector string, dims int) error) error { + if len(ko.Vector) > 0 { + if err := f("", len(ko.Vector)); err != nil { + return err + } + } + + for targetVector, vector := range ko.Vectors { + if err := f(targetVector, len(vector)); err != nil { + return err + } + } + + for targetVector, vectors := range ko.MultiVectors { + var dims int + for _, vector := range vectors { + dims += len(vector) + } + if err := f(targetVector, dims); err != nil { + return err + } + } + return nil +} + +func SearchResults(in []*Object, additional additional.Properties, tenant string) search.Results { + out := make(search.Results, len(in)) + + for i, elem := range in { + out[i] = *(elem.SearchResult(additional, tenant)) + } + + return out +} + +func SearchResultsWithScore(in []*Object, scores []float32, additional additional.Properties, tenant string) search.Results { + out := make(search.Results, len(in)) + + for i, elem := range in { + score := float32(0.0) + if len(scores) > i { + score = scores[i] + } + out[i] = elem.SearchResultWithScoreAndTenant(additional, score, tenant) + } + + return out +} + +func SearchResultsWithDists(in []*Object, addl additional.Properties, + dists []float32, +) search.Results { + out := make(search.Results, len(in)) + + for i, elem := range in { + out[i] = elem.SearchResultWithDist(addl, dists[i]) + } + + return out +} + +func DocIDFromBinary(in []byte) (uint64, error) { + if len(in) < 9 { + return 0, errors.Errorf("binary data too short") + } + // first by is kind, then 8 bytes for the docID + return binary.LittleEndian.Uint64(in[1:9]), nil +} + +func DocIDAndTimeFromBinary(in []byte) (docID uint64, updateTime int64, err error) { + r := bytes.NewReader(in) + + var version uint8 + + le := binary.LittleEndian + + if err := binary.Read(r, le, &version); err != nil { + return 0, 0, err + } + + if version != 1 { + return 0, 0, errors.Errorf("unsupported binary marshaller version %d", version) + } + + err = binary.Read(r, le, &docID) + if err != nil { + return 0, 0, err + } + + var buf [1 + 16 + 8 + 8]byte // kind uuid createtime updatetime + + _, err = io.ReadFull(r, buf[:]) + if err != nil { + return 0, 0, err + } + + updateTime = int64(binary.LittleEndian.Uint64(buf[1+16+8:])) + + return docID, updateTime, nil +} + +// MarshalBinary creates the binary representation of a kind object. Regardless +// of the marshaller version the first byte is a uint8 indicating the version +// followed by the payload which depends on the specific version +// +// Version 1 +// No. of B | Type | Content +// -------------------------------------------------------------- +// 1 | uint8 | MarshallerVersion = 1 +// 8 | uint64 | index id, keep early so id-only lookups are maximum efficient +// 1 | uint8 | kind, 0=action, 1=thing - deprecated +// 16 | uint128 | uuid +// 8 | int64 | create time +// 8 | int64 | update time +// 2 | uint16 | VectorLength +// n*4 | []float32 | vector of length n +// 2 | uint16 | length of class name +// n | []byte | className +// 4 | uint32 | length of schema json +// n | []byte | schema as json +// 4 | uint32 | length of meta json +// n | []byte | meta as json +// 4 | uint32 | length of vectorweights json +// n | []byte | vectorweights as json +// 4 | uint32 | length of packed target vectors offsets (in bytes) +// n | []byte | packed target vectors offsets map { name : offset_in_bytes } +// 4 | uint32 | length of target vectors segment (in bytes) +// n | uint16+[]byte | target vectors segment: sequence of vec_length + vec (uint16 + []byte), (uint16 + []byte) ... +// 4 | uint32 | length of packed multivector offsets (in bytes) +// n | []byte | packed multivector offsets map { name : offset_in_bytes } +// 4 | uint32 | length of multivectors segment (in bytes) +// 4 + (2 + n*4) | uint32 + (uint16+[]byte) | multivectors segment: num vecs + (vec length + vec floats), ... +// TODO vec lengths immediately following num vecs so you can jump straight to specific vec? + +const ( + maxVectorLength int = math.MaxUint16 + maxClassNameLength int = math.MaxUint16 + maxSchemaLength int = math.MaxUint32 + maxMetaLength int = math.MaxUint32 + maxVectorWeightsLength int = math.MaxUint32 + maxTargetVectorsSegmentLength int = math.MaxUint32 + maxTargetVectorsOffsetsLength int = math.MaxUint32 + maxMultiVectorsSegmentLength int = math.MaxUint32 + maxMultiVectorsOffsetsLength int = math.MaxUint32 +) + +func (ko *Object) MarshalBinary() ([]byte, error) { + if ko.MarshallerVersion != 1 { + return nil, errors.Errorf("unsupported marshaller version %d", ko.MarshallerVersion) + } + + kindByte := uint8(0) + // Deprecated Kind field + kindByte = 1 + + idParsed, err := uuid.Parse(ko.ID().String()) + if err != nil { + return nil, err + } + idBytes, err := idParsed.MarshalBinary() + if err != nil { + return nil, err + } + + if len(ko.Vector) > maxVectorLength { + return nil, fmt.Errorf("could not marshal '%s' max length exceeded (%d/%d)", "vector", len(ko.Vector), maxVectorLength) + } + vectorLength := uint32(len(ko.Vector)) + + className := []byte(ko.Class()) + if len(className) > maxClassNameLength { + return nil, fmt.Errorf("could not marshal '%s' max length exceeded (%d/%d)", "className", len(className), maxClassNameLength) + } + classNameLength := uint32(len(className)) + + schema, err := json.Marshal(ko.Properties()) + if err != nil { + return nil, err + } + if len(schema) > maxSchemaLength { + return nil, fmt.Errorf("could not marshal '%s' max length exceeded (%d/%d)", "schema", len(schema), maxSchemaLength) + } + schemaLength := uint32(len(schema)) + + meta, err := json.Marshal(ko.AdditionalProperties()) + if err != nil { + return nil, err + } + if len(meta) > maxMetaLength { + return nil, fmt.Errorf("could not marshal '%s' max length exceeded (%d/%d)", "meta", len(meta), maxMetaLength) + } + metaLength := uint32(len(meta)) + + vectorWeights, err := json.Marshal(ko.VectorWeights()) + if err != nil { + return nil, err + } + if len(vectorWeights) > maxVectorWeightsLength { + return nil, fmt.Errorf("could not marshal '%s' max length exceeded (%d/%d)", "vectorWeights", len(vectorWeights), maxVectorWeightsLength) + } + vectorWeightsLength := uint32(len(vectorWeights)) + + var targetVectorsOffsets []byte + var targetVectorsOffsetsLength uint32 + var targetVectorsSegmentLength int + + targetVectorsOffsetOrder := make([]string, 0, len(ko.Vectors)) + if len(ko.Vectors) > 0 { + offsetsMap := map[string]uint32{} + for name, vec := range ko.Vectors { + if len(vec) > maxVectorLength { + return nil, fmt.Errorf("could not marshal '%s' max length exceeded (%d/%d)", "vector", len(vec), maxVectorLength) + } + + offsetsMap[name] = uint32(targetVectorsSegmentLength) + targetVectorsSegmentLength += 2 + 4*len(vec) // 2 for vec length + vec bytes + + if targetVectorsSegmentLength > maxTargetVectorsSegmentLength { + return nil, + fmt.Errorf("could not marshal '%s' max length exceeded (%d/%d)", + "targetVectorsSegmentLength", targetVectorsSegmentLength, maxTargetVectorsSegmentLength) + } + + targetVectorsOffsetOrder = append(targetVectorsOffsetOrder, name) + } + + targetVectorsOffsets, err = msgpack.Marshal(offsetsMap) + if err != nil { + return nil, fmt.Errorf("could not marshal target vectors offsets: %w", err) + } + if len(targetVectorsOffsets) > maxTargetVectorsOffsetsLength { + return nil, fmt.Errorf("could not marshal '%s' max length exceeded (%d/%d)", "targetVectorsOffsets", len(targetVectorsOffsets), maxTargetVectorsOffsetsLength) + } + targetVectorsOffsetsLength = uint32(len(targetVectorsOffsets)) + } + + var multiVectorsOffsets []byte + var multiVectorsOffsetsLength uint32 + var multiVectorsSegmentLength int + + multiVectorsOffsetOrder := make([]string, 0, len(ko.MultiVectors)) + if len(ko.MultiVectors) > 0 { + offsetsMap := map[string]uint32{} + for name, vecs := range ko.MultiVectors { + offsetsMap[name] = uint32(multiVectorsSegmentLength) + // 4 bytes for number of vectors + multiVectorsSegmentLength += 4 + for _, vec := range vecs { + if len(vec) > maxVectorLength { + return nil, fmt.Errorf("could not marshal '%s' max length exceeded (%d/%d)", "vector", len(vec), maxVectorLength) + } + // 2 bytes for vec length and 4 bytes per float32 + multiVectorsSegmentLength += 2 + 4*len(vec) + + if multiVectorsSegmentLength > maxMultiVectorsSegmentLength { + return nil, + fmt.Errorf("could not marshal '%s' max length exceeded (%d/%d)", + "multiVectorsSegmentLength", multiVectorsSegmentLength, maxMultiVectorsSegmentLength) + } + } + multiVectorsOffsetOrder = append(multiVectorsOffsetOrder, name) + } + + multiVectorsOffsets, err = msgpack.Marshal(offsetsMap) + if err != nil { + return nil, fmt.Errorf("could not marshal multi vectors offsets: %w", err) + } + if len(multiVectorsOffsets) > maxMultiVectorsOffsetsLength { + return nil, fmt.Errorf("could not marshal '%s' max length exceeded (%d/%d)", "multiVectorsOffsets", len(multiVectorsOffsets), maxMultiVectorsOffsetsLength) + } + multiVectorsOffsetsLength = uint32(len(multiVectorsOffsets)) + } + + totalBufferLength := 1 + 8 + 1 + 16 + 8 + 8 + + 2 + vectorLength*4 + + 2 + classNameLength + + 4 + schemaLength + + 4 + metaLength + + 4 + vectorWeightsLength + + 4 + targetVectorsOffsetsLength + + 4 + uint32(targetVectorsSegmentLength) + + 4 + multiVectorsOffsetsLength + + 4 + uint32(multiVectorsSegmentLength) + + byteBuffer := make([]byte, totalBufferLength) + rw := byteops.NewReadWriter(byteBuffer) + rw.WriteByte(ko.MarshallerVersion) + rw.WriteUint64(ko.DocID) + rw.WriteByte(kindByte) + + rw.CopyBytesToBuffer(idBytes) + + rw.WriteUint64(uint64(ko.CreationTimeUnix())) + rw.WriteUint64(uint64(ko.LastUpdateTimeUnix())) + rw.WriteUint16(uint16(vectorLength)) + + for j := uint32(0); j < vectorLength; j++ { + rw.WriteUint32(math.Float32bits(ko.Vector[j])) + } + + rw.WriteUint16(uint16(classNameLength)) + err = rw.CopyBytesToBuffer(className) + if err != nil { + return byteBuffer, errors.Wrap(err, "Could not copy className") + } + + rw.WriteUint32(schemaLength) + err = rw.CopyBytesToBuffer(schema) + if err != nil { + return byteBuffer, errors.Wrap(err, "Could not copy schema") + } + + rw.WriteUint32(metaLength) + err = rw.CopyBytesToBuffer(meta) + if err != nil { + return byteBuffer, errors.Wrap(err, "Could not copy meta") + } + + rw.WriteUint32(vectorWeightsLength) + err = rw.CopyBytesToBuffer(vectorWeights) + if err != nil { + return byteBuffer, errors.Wrap(err, "Could not copy vectorWeights") + } + + rw.WriteUint32(targetVectorsOffsetsLength) + if targetVectorsOffsetsLength > 0 { + err = rw.CopyBytesToBuffer(targetVectorsOffsets) + if err != nil { + return byteBuffer, errors.Wrap(err, "Could not copy targetVectorsOffsets") + } + } + + rw.WriteUint32(uint32(targetVectorsSegmentLength)) + for _, name := range targetVectorsOffsetOrder { + vec := ko.Vectors[name] + vecLen := len(vec) + + rw.WriteUint16(uint16(vecLen)) + for j := 0; j < vecLen; j++ { + rw.WriteUint32(math.Float32bits(vec[j])) + } + } + + rw.WriteUint32(multiVectorsOffsetsLength) + if multiVectorsOffsetsLength > 0 { + err = rw.CopyBytesToBuffer(multiVectorsOffsets) + if err != nil { + return byteBuffer, errors.Wrap(err, "Could not copy multiVectorsOffsets") + } + } + + rw.WriteUint32(uint32(multiVectorsSegmentLength)) + for _, name := range multiVectorsOffsetOrder { + vecs := ko.MultiVectors[name] + rw.WriteUint32(uint32(len(vecs))) + for _, vec := range vecs { + vecLen := len(vec) + rw.WriteUint16(uint16(vecLen)) + for j := 0; j < vecLen; j++ { + rw.WriteUint32(math.Float32bits(vec[j])) + } + } + } + + return byteBuffer, nil +} + +// UnmarshalPropertiesFromObject accepts marshaled object as data and populates resultProperties map with the properties specified by propertyPaths. +// +// Check MarshalBinary for the order of elements in the input array +func UnmarshalPropertiesFromObject(data []byte, resultProperties map[string]interface{}, propertyPaths [][]string) error { + if data[0] != uint8(1) { + return errors.Errorf("unsupported binary marshaller version %d", data[0]) + } + + // clear out old values in case an object misses values. This should NOT shrink the capacity of the map, eg there + // are no allocations when adding the resultProperties of the next object again + clear(resultProperties) + + startPos := uint64(1 + 8 + 1 + 16 + 8 + 8) // elements at the start + rw := byteops.NewReadWriterWithOps(data, byteops.WithPosition(startPos)) + // get the length of the vector, each element is a float32 (4 bytes) + vectorLength := uint64(rw.ReadUint16()) + rw.MoveBufferPositionForward(vectorLength * 4) + classnameLength := uint64(rw.ReadUint16()) + rw.MoveBufferPositionForward(classnameLength) + propertyLength := uint64(rw.ReadUint32()) + + return UnmarshalProperties(rw.Buffer[rw.Position:rw.Position+propertyLength], resultProperties, propertyPaths) +} + +// UnmarshalProperties accepts serialized properties as data and populates resultProperties map with the properties specified by propertyPaths. +func UnmarshalProperties(data []byte, properties map[string]interface{}, propertyPaths [][]string) error { + var returnError error + jsonparser.EachKey(data, func(idx int, value []byte, dataType jsonparser.ValueType, err error) { + propertyName := propertyPaths[idx][len(propertyPaths[idx])-1] + + switch dataType { + case jsonparser.Number, jsonparser.String, jsonparser.Boolean: + val, err := parseValues(dataType, value) + if err != nil { + returnError = err + } + properties[propertyName] = val + case jsonparser.Array: // can be a beacon or an actual array + arrayEntries := value[1 : len(value)-1] // without leading and trailing [] + // this checks if refs are present - the return points to the underlying memory, dont use without copying + _, errBeacon := jsonparser.GetUnsafeString(arrayEntries, "beacon") + if errBeacon == nil { + // there can be more than one + var beacons []interface{} + handler := func(beaconByte []byte, dataType jsonparser.ValueType, offset int, err error) { + beaconVal, err2 := jsonparser.GetString(beaconByte, "beacon") // this points to the underlying memory + returnError = err2 + beacons = append(beacons, map[string]interface{}{"beacon": beaconVal}) + } + _, returnError = jsonparser.ArrayEach(value, handler) + properties[propertyName] = beacons + } else { + // check how many entries there are in the array by counting the ",". This allows us to allocate an + // array with the right size without extending it with every append. + // The size can be too large for string arrays, when they contain "," as part of their content. + entryCount := 0 + for _, b := range arrayEntries { + if b == uint8(44) { // ',' as byte + entryCount++ + } + } + + array := make([]interface{}, 0, entryCount) + _, err = jsonparser.ArrayEach(value, func(innerValue []byte, innerDataType jsonparser.ValueType, offset int, innerErr error) { + var val interface{} + + switch innerDataType { + case jsonparser.Number, jsonparser.String, jsonparser.Boolean: + val, err = parseValues(innerDataType, innerValue) + if err != nil { + returnError = err + return + } + case jsonparser.Object: + nestedProps := map[string]interface{}{} + err := json.Unmarshal(innerValue, &nestedProps) + if err != nil { + returnError = err + return + } + val = nestedProps + default: + returnError = fmt.Errorf("unknown data type ArrayEach %v", innerDataType) + return + } + array = append(array, val) + }) + if err != nil { + returnError = err + } + properties[propertyName] = array + + } + case jsonparser.Object: + // nested objects and geo-props and phonenumbers. + // + // we do not have the schema for nested object and cannot use the efficient jsonparser for them + // (we could for phonenumbers and geo-props but they are not worth the effort) + // however this part is only called if + // - one of the datatypes is present + // - AND the user requests them + // => the performance impact is minimal + nestedProps := map[string]interface{}{} + err := json.Unmarshal(value, &nestedProps) + if err != nil { + returnError = err + } + properties[propertyName] = nestedProps + default: + returnError = fmt.Errorf("unknown data type %v", dataType) + } + }, propertyPaths...) + + return returnError +} + +func parseValues(dt jsonparser.ValueType, value []byte) (interface{}, error) { + switch dt { + case jsonparser.Number: + return jsonparser.ParseFloat(value) + case jsonparser.String: + return jsonparser.ParseString(value) + case jsonparser.Boolean: + return jsonparser.ParseBoolean(value) + default: + panic("Unknown data type") // returning an error would be better + } +} + +// UnmarshalBinary is the versioned way to unmarshal a kind object from binary, +// see MarshalBinary for the exact contents of each version +func (ko *Object) UnmarshalBinary(data []byte) error { + version := data[0] + if version != 1 { + return errors.Errorf("unsupported binary marshaller version %d", version) + } + ko.MarshallerVersion = version + + rw := byteops.NewReadWriterWithOps(data, byteops.WithPosition(1)) + ko.DocID = rw.ReadUint64() + rw.MoveBufferPositionForward(1) // kind-byte + + uuidParsed, err := uuid.FromBytes(data[rw.Position : rw.Position+16]) + if err != nil { + return err + } + rw.MoveBufferPositionForward(16) + + createTime := int64(rw.ReadUint64()) + updateTime := int64(rw.ReadUint64()) + + vectorLength := rw.ReadUint16() + ko.VectorLen = int(vectorLength) + ko.Vector = make([]float32, vectorLength) + for j := 0; j < int(vectorLength); j++ { + ko.Vector[j] = math.Float32frombits(rw.ReadUint32()) + } + + classNameLength := uint64(rw.ReadUint16()) + className, err := rw.CopyBytesFromBuffer(classNameLength, nil) + if err != nil { + return errors.Wrap(err, "Could not copy class name") + } + + schemaLength := uint64(rw.ReadUint32()) + schema, err := rw.CopyBytesFromBuffer(schemaLength, nil) + if err != nil { + return errors.Wrap(err, "Could not copy schema") + } + + metaLength := uint64(rw.ReadUint32()) + meta, err := rw.CopyBytesFromBuffer(metaLength, nil) + if err != nil { + return errors.Wrap(err, "Could not copy meta") + } + + vectorWeightsLength := uint64(rw.ReadUint32()) + vectorWeights, err := rw.CopyBytesFromBuffer(vectorWeightsLength, nil) + if err != nil { + return errors.Wrap(err, "Could not copy vectorWeights") + } + + vectors, err := unmarshalTargetVectors(&rw) + if err != nil { + return err + } + ko.Vectors = vectors + + multiVectors, err := unmarshalMultiVectors(&rw, nil) + if err != nil { + return err + } + ko.MultiVectors = multiVectors + + return ko.parseObject( + strfmt.UUID(uuidParsed.String()), + createTime, + updateTime, + string(className), + schema, + meta, + vectorWeights, nil, 0, + ) +} + +func unmarshalTargetVectors(rw *byteops.ReadWriter) (map[string][]float32, error) { + // This check prevents from panic when somebody is upgrading from version that + // didn't have multiple target vector support. This check is needed bc with named vectors + // feature storage object can have vectors data appended at the end of the file + if rw.Position < uint64(len(rw.Buffer)) { + targetVectorsOffsets := rw.ReadBytesFromBufferWithUint32LengthIndicator() + targetVectorsSegmentLength := rw.ReadUint32() + pos := rw.Position + + if len(targetVectorsOffsets) > 0 { + var tvOffsets map[string]uint32 + if err := msgpack.Unmarshal(targetVectorsOffsets, &tvOffsets); err != nil { + return nil, fmt.Errorf("could not unmarshal target vectors offset: %w", err) + } + + targetVectors := map[string][]float32{} + for name, offset := range tvOffsets { + rw.MoveBufferToAbsolutePosition(pos + uint64(offset)) + vecLen := rw.ReadUint16() + vec := make([]float32, vecLen) + for j := uint16(0); j < vecLen; j++ { + vec[j] = math.Float32frombits(rw.ReadUint32()) + } + targetVectors[name] = vec + } + + rw.MoveBufferToAbsolutePosition(pos + uint64(targetVectorsSegmentLength)) + return targetVectors, nil + } + } + return nil, nil +} + +// unmarshalMultiVectors unmarshals the multi vectors from the buffer. If onlyUnmarshalNames is set and non-empty, +// then only the multivectors which names specified as the map's keys will be unmarshaled. +func unmarshalMultiVectors( + rw *byteops.ReadWriter, + onlyUnmarshalNames map[string]interface{}, +) (map[string][][]float32, error) { + // This check prevents from panic when somebody is upgrading from version that + // didn't have multi vector support. This check is needed bc with the multi vectors + // feature the storage object can have vectors data appended at the end of the file + if rw.Position < uint64(len(rw.Buffer)) { + multiVectorsOffsets := rw.ReadBytesFromBufferWithUint32LengthIndicator() + multiVectorsSegmentLength := rw.ReadUint32() + pos := rw.Position + + if len(multiVectorsOffsets) > 0 { + var mvOffsets map[string]uint32 + if err := msgpack.Unmarshal(multiVectorsOffsets, &mvOffsets); err != nil { + return nil, fmt.Errorf("could not unmarshal multi vectors offset: %w", err) + } + + // NOTE if you sort mvOffsets by offset, you may be able to speed this up via + // sequential reads, haven't tried this yet + multiVectors := map[string][][]float32{} + for name, offset := range mvOffsets { + // if onlyUnmarshalNames is not nil and non-empty, only unmarshal the vectors + // for the names in the map + if len(onlyUnmarshalNames) > 0 { + if _, ok := onlyUnmarshalNames[name]; !ok { + continue + } + } + rw.MoveBufferToAbsolutePosition(pos + uint64(offset)) + numVecs := rw.ReadUint32() + vecs := make([][]float32, 0) + for i := 0; i < int(numVecs); i++ { + vecLen := rw.ReadUint16() + vec := make([]float32, vecLen) + for j := uint16(0); j < vecLen; j++ { + vec[j] = math.Float32frombits(rw.ReadUint32()) + } + vecs = append(vecs, vec) + } + multiVectors[name] = vecs + } + + rw.MoveBufferToAbsolutePosition(pos + uint64(multiVectorsSegmentLength)) + return multiVectors, nil + } + } + return nil, nil +} + +func VectorFromBinary(in []byte, buffer []float32, targetVector string) ([]float32, error) { + if len(in) == 0 { + return nil, nil + } + + version := in[0] + if version != 1 { + return nil, errors.Errorf("unsupported marshaller version %d", version) + } + + if targetVector != "" { + startPos := uint64(1 + 8 + 1 + 16 + 8 + 8) // elements at the start + rw := byteops.NewReadWriterWithOps(in, byteops.WithPosition(startPos)) + + vectorLength := uint64(rw.ReadUint16()) + rw.MoveBufferPositionForward(vectorLength * 4) + + classnameLength := uint64(rw.ReadUint16()) + rw.MoveBufferPositionForward(classnameLength) + + schemaLength := uint64(rw.ReadUint32()) + rw.MoveBufferPositionForward(schemaLength) + + metaLength := uint64(rw.ReadUint32()) + rw.MoveBufferPositionForward(metaLength) + + vectorWeightsLength := uint64(rw.ReadUint32()) + rw.MoveBufferPositionForward(vectorWeightsLength) + + targetVectors, err := unmarshalTargetVectors(&rw) + if err != nil { + return nil, errors.Errorf("unable to unmarshal vector for target vector: %s", targetVector) + } + vector, ok := targetVectors[targetVector] + if !ok { + return nil, errors.Errorf("vector not found for target vector: %s", targetVector) + } + return vector, nil + } + + // since we know the version and know that the blob is not len(0), we can + // assume that we can directly access the vector length field. The only + // situation where this is not accessible would be on corrupted data - where + // it would be acceptable to panic + vecLen := binary.LittleEndian.Uint16(in[42:44]) + + var out []float32 + if cap(buffer) >= int(vecLen) { + out = buffer[:vecLen] + } else { + out = make([]float32, vecLen) + } + vecStart := 44 + vecEnd := vecStart + int(vecLen*4) + + i := 0 + for start := vecStart; start < vecEnd; start += 4 { + asUint := binary.LittleEndian.Uint32(in[start : start+4]) + out[i] = math.Float32frombits(asUint) + i++ + } + + return out, nil +} + +func incrementPos(in []byte, pos int, size int) int { + b := in[pos : pos+size] + if size == 2 { + length := binary.LittleEndian.Uint16(b) + pos += size + int(length) + } else if size == 4 { + length := binary.LittleEndian.Uint32(b) + pos += size + int(length) + return pos + } else if size == 8 { + length := binary.LittleEndian.Uint64(b) + pos += size + int(length) + } + return pos +} + +func MultiVectorFromBinary(in []byte, buffer []float32, targetVector string) ([][]float32, error) { + if len(in) == 0 { + return nil, nil + } + + version := in[0] + if version != 1 { + return nil, errors.Errorf("unsupported marshaller version %d", version) + } + + // since we know the version and know that the blob is not len(0), we can + // assume that we can directly access the vector length field. The only + // situation where this is not accessible would be on corrupted data - where + // it would be acceptable to panic + vecLen := binary.LittleEndian.Uint16(in[42:44]) + + var out []float32 + if cap(buffer) >= int(vecLen) { + out = buffer[:vecLen] + } else { + out = make([]float32, vecLen) + } + vecStart := 44 + vecEnd := vecStart + int(vecLen*4) + + i := 0 + for start := vecStart; start < vecEnd; start += 4 { + asUint := binary.LittleEndian.Uint32(in[start : start+4]) + out[i] = math.Float32frombits(asUint) + i++ + } + + pos := vecEnd + + pos = incrementPos(in, pos, 2) // classNameLength + pos = incrementPos(in, pos, 4) // schemaLength + pos = incrementPos(in, pos, 4) // metaLength + pos = incrementPos(in, pos, 4) // vectorWeightsLength + pos = incrementPos(in, pos, 4) // bufLen + pos = incrementPos(in, pos, 4) // targetVectorsSegmentLength + + // multivector + var multiVectors map[string][][]float32 + + if len(in) > pos { + rw := byteops.NewReadWriterWithOps(in, byteops.WithPosition(uint64(pos))) + mv, err := unmarshalMultiVectors(&rw, map[string]interface{}{targetVector: nil}) + if err != nil { + return nil, errors.Errorf("unable to unmarshal multivector for target vector: %s", targetVector) + } + multiVectors = mv + } + + mvout, ok := multiVectors[targetVector] + if !ok { + return nil, errors.Errorf("vector not found for target vector: %s", targetVector) + } + return mvout, nil +} + +func (ko *Object) parseObject(uuid strfmt.UUID, create, update int64, className string, + propsB []byte, additionalB []byte, vectorWeightsB []byte, properties *PropertyExtraction, propLength uint32, +) error { + var returnProps map[string]interface{} + if properties == nil || propLength == 0 { + if err := json.Unmarshal(propsB, &returnProps); err != nil { + return err + } + } else if len(propsB) >= int(propLength) { + // the properties are not read in all cases, skip if not needed + returnProps = make(map[string]interface{}, len(properties.PropertyPaths)) + if err := UnmarshalProperties(propsB[:propLength], returnProps, properties.PropertyPaths); err != nil { + return err + } + } + + if err := enrichSchemaTypes(returnProps, false); err != nil { + return errors.Wrap(err, "enrich schema datatypes") + } + + var additionalProperties models.AdditionalProperties + if len(additionalB) > 0 { + if err := json.Unmarshal(additionalB, &additionalProperties); err != nil { + return err + } + + if prop, ok := additionalProperties["classification"]; ok { + if classificationMap, ok := prop.(map[string]interface{}); ok { + marshalled, err := json.Marshal(classificationMap) + if err != nil { + return err + } + var classification additional.Classification + err = json.Unmarshal(marshalled, &classification) + if err != nil { + return err + } + additionalProperties["classification"] = &classification + } + } + + if prop, ok := additionalProperties["group"]; ok { + if groupMap, ok := prop.(map[string]interface{}); ok { + marshalled, err := json.Marshal(groupMap) + if err != nil { + return err + } + var group additional.Group + err = json.Unmarshal(marshalled, &group) + if err != nil { + return err + } + + for i, hit := range group.Hits { + if groupHitAdditionalMap, ok := hit["_additional"].(map[string]interface{}); ok { + marshalled, err := json.Marshal(groupHitAdditionalMap) + if err != nil { + return err + } + var groupHitsAdditional additional.GroupHitAdditional + err = json.Unmarshal(marshalled, &groupHitsAdditional) + if err != nil { + return err + } + group.Hits[i]["_additional"] = &groupHitsAdditional + } + } + + additionalProperties["group"] = &group + } + } + } + + var vectorWeights interface{} + if err := json.Unmarshal(vectorWeightsB, &vectorWeights); err != nil { + return err + } + + ko.Object = models.Object{ + Class: className, + CreationTimeUnix: create, + LastUpdateTimeUnix: update, + ID: uuid, + Properties: returnProps, + VectorWeights: vectorWeights, + Additional: additionalProperties, + } + + return nil +} + +// DeepCopyDangerous creates a deep copy of the underlying Object +// WARNING: This was purpose built for the batch ref usecase and only covers +// the situations that are required there. This means that cases which aren't +// reflected in that usecase may still contain references. Thus the suffix +// "Dangerous". If needed, make sure everything is copied and remove the +// suffix. +func (ko *Object) DeepCopyDangerous() *Object { + o := &Object{ + MarshallerVersion: ko.MarshallerVersion, + DocID: ko.DocID, + Object: deepCopyObject(ko.Object), + Vector: deepCopyVector(ko.Vector), + Vectors: deepCopyVectorsMap(ko.Vectors), + MultiVectors: deepCopyMultiVectorsMap(ko.MultiVectors), + } + + return o +} + +func AddOwnership(objs []*Object, node, shard string) { + for i := range objs { + objs[i].BelongsToNode = node + objs[i].BelongsToShard = shard + } +} + +func deepCopyVector(orig []float32) []float32 { + out := make([]float32, len(orig)) + copy(out, orig) + return out +} + +func deepCopyMultiVector(orig [][]float32) [][]float32 { + out := make([][]float32, len(orig)) + copy(out, orig) + return out +} + +func deepCopyVectors(orig models.Vectors) models.Vectors { + out := make(models.Vectors, len(orig)) + for key, vec := range orig { + switch v := any(vec).(type) { + case []float32: + out[key] = deepCopyVector(v) + case [][]float32: + out[key] = deepCopyMultiVector(v) + default: + // do nothing + } + } + return out +} + +func deepCopyVectorsMap(orig map[string][]float32) map[string][]float32 { + out := make(map[string][]float32, len(orig)) + for key, vec := range orig { + switch v := any(vec).(type) { + case []float32: + out[key] = deepCopyVector(v) + default: + // do nothing + } + } + return out +} + +func deepCopyMultiVectorsMap(orig map[string][][]float32) map[string][][]float32 { + out := make(map[string][][]float32, len(orig)) + for key, vec := range orig { + switch v := any(vec).(type) { + case [][]float32: + out[key] = deepCopyMultiVector(v) + default: + // do nothing + } + } + return out +} + +func deepCopyObject(orig models.Object) models.Object { + return models.Object{ + Class: orig.Class, + ID: orig.ID, + CreationTimeUnix: orig.CreationTimeUnix, + LastUpdateTimeUnix: orig.LastUpdateTimeUnix, + Vector: deepCopyVector(orig.Vector), + VectorWeights: orig.VectorWeights, + Additional: orig.Additional, // WARNING: not a deep copy!! + Properties: deepCopyProperties(orig.Properties), + Vectors: deepCopyVectors(orig.Vectors), + } +} + +func deepCopyProperties(orig models.PropertySchema) models.PropertySchema { + if orig == nil { + return nil + } + + asMap, ok := orig.(map[string]interface{}) + if !ok { + // not a map, don't know what to do with this + return nil + } + + out := map[string]interface{}{} + + for key, value := range asMap { + if mref, ok := value.(models.MultipleRef); ok { + out[key] = deepCopyMRef(mref) + continue + } + + // Note: This is not a true deep copy, value could still be a pointer type, + // such as *models.GeoCoordinates, thus leading to passing a reference + // instead of actually making a copy. However, for the purposes we need + // this method for this is acceptable based on our current knowledge + out[key] = value + } + + return out +} + +func deepCopyMRef(orig models.MultipleRef) models.MultipleRef { + if orig == nil { + return nil + } + + out := make(models.MultipleRef, len(orig)) + for i, ref := range orig { + // models.SingleRef contains only pass-by-value props, so a simple deref as + // the struct creates a copy + copiedRef := *ref + out[i] = &copiedRef + } + + return out +} diff --git a/platform/dbops/binaries/weaviate-src/entities/storobj/storage_object_test.go b/platform/dbops/binaries/weaviate-src/entities/storobj/storage_object_test.go new file mode 100644 index 0000000000000000000000000000000000000000..01a375d6d37380b48a4d616fee2e162b43e8341f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/storobj/storage_object_test.go @@ -0,0 +1,1629 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package storobj + +import ( + cryptorand "crypto/rand" + "encoding/binary" + "fmt" + "math/rand" + "strings" + "testing" + "time" + + "github.com/weaviate/weaviate/usecases/byteops" + + "github.com/go-openapi/strfmt" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/additional" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" +) + +func TestStorageObjectMarshalling(t *testing.T) { + before := FromObject( + &models.Object{ + Class: "MyFavoriteClass", + CreationTimeUnix: 123456, + LastUpdateTimeUnix: 56789, + ID: strfmt.UUID("73f2eb5f-5abf-447a-81ca-74b1dd168247"), + Additional: models.AdditionalProperties{ + "classification": &additional.Classification{ + BasedOn: []string{"some", "fields"}, + }, + "interpretation": map[string]interface{}{ + "Source": []interface{}{ + map[string]interface{}{ + "concept": "foo", + "occurrence": float64(7), + "weight": float64(3), + }, + }, + }, + }, + Properties: map[string]interface{}{ + "name": "MyName", + "foo": float64(17), + }, + }, + []float32{1, 2, 0.7}, + map[string][]float32{ + "vector1": {1, 2, 3}, + "vector2": {4, 5, 6}, + }, + nil, + ) + before.DocID = 7 + + asBinary, err := before.MarshalBinary() + require.Nil(t, err) + + after, err := FromBinary(asBinary) + require.Nil(t, err) + + t.Run("compare", func(t *testing.T) { + assert.Equal(t, before, after) + }) + + t.Run("extract only doc id and compare", func(t *testing.T) { + id, updateTime, err := DocIDAndTimeFromBinary(asBinary) + require.Nil(t, err) + assert.Equal(t, uint64(7), id) + assert.Equal(t, before.LastUpdateTimeUnix(), updateTime) + }) + + t.Run("extract single text prop", func(t *testing.T) { + prop, ok, err := ParseAndExtractTextProp(asBinary, "name") + require.Nil(t, err) + require.True(t, ok) + require.NotEmpty(t, prop) + assert.Equal(t, "MyName", prop[0]) + }) + + t.Run("extract non-existing text prop", func(t *testing.T) { + prop, ok, err := ParseAndExtractTextProp(asBinary, "IDoNotExist") + require.Nil(t, err) + require.True(t, ok) + require.Empty(t, prop) + }) +} + +func TestStorageObjectMarshallingMultiVector(t *testing.T) { + before := FromObject( + &models.Object{ + Class: "MyFavoriteClass", + CreationTimeUnix: 123456, + LastUpdateTimeUnix: 56789, + ID: strfmt.UUID("73f2eb5f-5abf-447a-81ca-74b1dd168247"), + Additional: models.AdditionalProperties{ + "classification": &additional.Classification{ + BasedOn: []string{"some", "fields"}, + }, + "interpretation": map[string]interface{}{ + "Source": []interface{}{ + map[string]interface{}{ + "concept": "foo", + "occurrence": float64(7), + "weight": float64(3), + }, + }, + }, + }, + Properties: map[string]interface{}{ + "name": "MyName", + "foo": float64(17), + }, + }, + []float32{1, 2, 0.7}, + map[string][]float32{ + "vector1": {1, 2, 3}, + "vector2": {4, 5, 6}, + }, + map[string][][]float32{ + "vector3": {{7, 8, 9}, {10, 11, 12}}, + "vector4": {{13, 14, 15}, {16, 17, 18}, {16, 1}, {1}}, + "vector5": {{19, 20, 21}, {22, 23, 24}}, + }, + ) + before.DocID = 7 + + asBinary, err := before.MarshalBinary() + require.Nil(t, err) + + after, err := FromBinary(asBinary) + require.Nil(t, err) + + t.Run("compare", func(t *testing.T) { + assert.Equal(t, before, after) + }) + + t.Run("extract only doc id and compare", func(t *testing.T) { + id, updateTime, err := DocIDAndTimeFromBinary(asBinary) + require.Nil(t, err) + assert.Equal(t, uint64(7), id) + assert.Equal(t, before.LastUpdateTimeUnix(), updateTime) + }) + + t.Run("extract single text prop", func(t *testing.T) { + prop, ok, err := ParseAndExtractTextProp(asBinary, "name") + require.Nil(t, err) + require.True(t, ok) + require.NotEmpty(t, prop) + assert.Equal(t, "MyName", prop[0]) + }) + + t.Run("extract non-existing text prop", func(t *testing.T) { + prop, ok, err := ParseAndExtractTextProp(asBinary, "IDoNotExist") + require.Nil(t, err) + require.True(t, ok) + require.Empty(t, prop) + }) +} + +func TestStorageObjectUnMarshallingMultiVector(t *testing.T) { + t.Run("all vectors stored", func(t *testing.T) { + before := FromObject( + &models.Object{ + Class: "MyFavoriteClass", + CreationTimeUnix: 123456, + LastUpdateTimeUnix: 56789, + ID: strfmt.UUID("73f2eb5f-5abf-447a-81ca-74b1dd168247"), + Additional: models.AdditionalProperties{ + "classification": &additional.Classification{ + BasedOn: []string{"some", "fields"}, + }, + "interpretation": map[string]interface{}{ + "Source": []interface{}{ + map[string]interface{}{ + "concept": "foo", + "occurrence": float64(7), + "weight": float64(3), + }, + }, + }, + }, + Properties: map[string]interface{}{ + "name": "MyName", + "foo": float64(17), + }, + }, + []float32{1, 2, 0.7}, + map[string][]float32{ + "vector1": {1, 2, 3}, + "vector2": {4, 5, 6}, + }, + map[string][][]float32{ + "vector3": {{7, 8, 9}, {10, 11, 12}}, + "vector4": {{13, 14, 15}, {16, 17, 18}, {16, 1}, {1}}, + "vector5": {{19, 20, 21}, {22, 23, 24}, {22, 23, 24}, {22, 23, 24}, {22, 23, 24}}, + }, + ) + before.DocID = 7 + + asBinary, err := before.MarshalBinary() + require.Nil(t, err) + + after := &Object{} + after.UnmarshalBinary(asBinary) + require.Nil(t, err) + + t.Run("compare", func(t *testing.T) { + assert.Equal(t, before, after) + }) + + t.Run("check vector", func(t *testing.T) { + require.NotEmpty(t, after.Vector) + assert.ElementsMatch(t, after.Vector, before.Vector) + }) + + t.Run("check vectors", func(t *testing.T) { + require.NotEmpty(t, after.Vectors) + assert.ElementsMatch(t, after.Vectors["vector1"], before.Vectors["vector1"]) + assert.ElementsMatch(t, after.Vectors["vector2"], before.Vectors["vector2"]) + }) + + t.Run("check multi vectors", func(t *testing.T) { + require.NotEmpty(t, after.MultiVectors) + assert.ElementsMatch(t, after.MultiVectors["vector3"], before.MultiVectors["vector3"]) + assert.ElementsMatch(t, after.MultiVectors["vector4"], before.MultiVectors["vector4"]) + assert.ElementsMatch(t, after.MultiVectors["vector5"], before.MultiVectors["vector5"]) + }) + + t.Run("check multi vectors optional", func(t *testing.T) { + t.Run("FromBinaryOptional: empty additional", func(t *testing.T) { + afterMultiVectorsOptional, err := FromBinaryOptional(asBinary, additional.Properties{}, nil) + require.Nil(t, err) + require.Nil(t, afterMultiVectorsOptional.MultiVectors) + }) + + t.Run("FromBinaryOptional: multi vector in additional", func(t *testing.T) { + afterMultiVectorsOptional, err := FromBinaryOptional(asBinary, additional.Properties{ + Vectors: []string{"vector4"}, + }, nil) + require.Nil(t, err) + require.NotEmpty(t, afterMultiVectorsOptional.MultiVectors) + require.Len(t, afterMultiVectorsOptional.MultiVectors, 1) + require.Equal(t, before.MultiVectors["vector4"], afterMultiVectorsOptional.MultiVectors["vector4"]) + }) + + t.Run("FromBinaryOptional: named vector and multi vector in additional", func(t *testing.T) { + afterMultiVectorsOptional, err := FromBinaryOptional(asBinary, additional.Properties{ + Vectors: []string{"vector2", "vector4"}, + }, nil) + require.Nil(t, err) + require.NotEmpty(t, afterMultiVectorsOptional.Vectors) + require.NotEmpty(t, afterMultiVectorsOptional.MultiVectors) + require.Len(t, afterMultiVectorsOptional.Vectors, 2) + require.Len(t, afterMultiVectorsOptional.MultiVectors, 1) + require.Equal(t, before.Vectors["vector1"], afterMultiVectorsOptional.Vectors["vector1"]) + require.Equal(t, before.Vectors["vector2"], afterMultiVectorsOptional.Vectors["vector2"]) + require.Equal(t, before.MultiVectors["vector4"], afterMultiVectorsOptional.MultiVectors["vector4"]) + }) + }) + }) + + t.Run("only vectors and multivectors", func(t *testing.T) { + before := FromObject( + &models.Object{ + Class: "MyFavoriteClass", + CreationTimeUnix: 123456, + LastUpdateTimeUnix: 56789, + ID: strfmt.UUID("73f2eb5f-5abf-447a-81ca-74b1dd168247"), + Additional: models.AdditionalProperties{ + "classification": &additional.Classification{ + BasedOn: []string{"some", "fields"}, + }, + "interpretation": map[string]interface{}{ + "Source": []interface{}{ + map[string]interface{}{ + "concept": "foo", + "occurrence": float64(7), + "weight": float64(3), + }, + }, + }, + }, + Properties: map[string]interface{}{ + "name": "MyName", + "foo": float64(17), + }, + }, + nil, + map[string][]float32{ + "vector1": {1, 2, 3}, + "vector2": {4, 5, 6}, + }, + map[string][][]float32{ + "vector3": {{7, 8, 9}, {10, 11, 12}}, + "vector4": {{13, 14, 15}, {16, 17, 18}, {16, 1}, {1}}, + "vector5": {{19, 20, 21}, {22, 23, 24}, {22, 23, 24}, {22, 23, 24}, {22, 23, 24}}, + }, + ) + before.DocID = 7 + + asBinary, err := before.MarshalBinary() + require.Nil(t, err) + + after := &Object{} + after.UnmarshalBinary(asBinary) + require.Nil(t, err) + + t.Run("check vector", func(t *testing.T) { + require.Empty(t, after.Vector) + }) + + t.Run("check vectors", func(t *testing.T) { + require.NotEmpty(t, after.Vectors) + assert.ElementsMatch(t, after.Vectors["vector1"], before.Vectors["vector1"]) + assert.ElementsMatch(t, after.Vectors["vector2"], before.Vectors["vector2"]) + }) + + t.Run("check multivectors", func(t *testing.T) { + require.NotEmpty(t, after.MultiVectors) + assert.ElementsMatch(t, after.MultiVectors["vector3"], before.MultiVectors["vector3"]) + assert.ElementsMatch(t, after.MultiVectors["vector4"], before.MultiVectors["vector4"]) + assert.ElementsMatch(t, after.MultiVectors["vector5"], before.MultiVectors["vector5"]) + }) + }) + + t.Run("only multi vectors", func(t *testing.T) { + before := FromObject( + &models.Object{ + Class: "MyFavoriteClass", + CreationTimeUnix: 123456, + LastUpdateTimeUnix: 56789, + ID: strfmt.UUID("73f2eb5f-5abf-447a-81ca-74b1dd168247"), + Additional: models.AdditionalProperties{ + "classification": &additional.Classification{ + BasedOn: []string{"some", "fields"}, + }, + "interpretation": map[string]interface{}{ + "Source": []interface{}{ + map[string]interface{}{ + "concept": "foo", + "occurrence": float64(7), + "weight": float64(3), + }, + }, + }, + }, + Properties: map[string]interface{}{ + "name": "MyName", + "foo": float64(17), + }, + }, + nil, + nil, + map[string][][]float32{ + "vector3": {{7, 8, 9}, {10, 11, 12}}, + "vector4": {{13, 14, 15}, {16, 17, 18}, {16, 1}, {1}}, + "vector5": {{19, 20, 21}, {22, 23, 24}, {22, 23, 24}, {22, 23, 24}, {22, 23, 24}}, + }, + ) + before.DocID = 7 + + asBinary, err := before.MarshalBinary() + require.Nil(t, err) + + after := &Object{} + after.UnmarshalBinary(asBinary) + require.Nil(t, err) + + t.Run("check vector", func(t *testing.T) { + require.Empty(t, after.Vector) + }) + + t.Run("check vectors", func(t *testing.T) { + require.Nil(t, after.Vectors) + }) + + t.Run("check multi vectors", func(t *testing.T) { + require.NotEmpty(t, after.MultiVectors) + assert.ElementsMatch(t, after.MultiVectors["vector3"], before.MultiVectors["vector3"]) + assert.ElementsMatch(t, after.MultiVectors["vector4"], before.MultiVectors["vector4"]) + assert.ElementsMatch(t, after.MultiVectors["vector5"], before.MultiVectors["vector5"]) + }) + }) +} + +func TestFilteringNilProperty(t *testing.T) { + object := FromObject( + &models.Object{ + Class: "MyFavoriteClass", + ID: "73f2eb5f-5abf-447a-81ca-74b1dd168247", + Properties: map[string]interface{}{ + "IWillBeRemoved": nil, + "IWillStay": float64(17), + }, + }, + []float32{1, 2, 0.7}, + nil, + nil, + ) + props := object.Properties() + propsTyped, ok := props.(map[string]interface{}) + require.True(t, ok) + assert.Equal(t, propsTyped["IWillStay"], float64(17)) + + elem, ok := propsTyped["IWillBeRemoved"] + require.False(t, ok) + require.Nil(t, elem) +} + +func TestStorageObjectUnmarshallingSpecificProps(t *testing.T) { + before := FromObject( + &models.Object{ + Class: "MyFavoriteClass", + CreationTimeUnix: 123456, + LastUpdateTimeUnix: 56789, + ID: strfmt.UUID("73f2eb5f-5abf-447a-81ca-74b1dd168247"), + Additional: models.AdditionalProperties{ + "classification": &additional.Classification{ + BasedOn: []string{"some", "fields"}, + }, + "interpretation": map[string]interface{}{ + "Source": []interface{}{ + map[string]interface{}{ + "concept": "foo", + "occurrence": float64(7), + "weight": float64(3), + }, + }, + }, + }, + Properties: map[string]interface{}{ + "name": "MyName", + "foo": float64(17), + }, + }, + []float32{1, 2, 0.7}, + map[string][]float32{ + "vector1": {1, 2, 3}, + "vector2": {4, 5, 6}, + "vector3": {7, 8, 9}, + }, + nil, + ) + before.DocID = 7 + + asBinary, err := before.MarshalBinary() + require.Nil(t, err) + + t.Run("without any optional", func(t *testing.T) { + after, err := FromBinaryOptional(asBinary, additional.Properties{}, nil) + require.Nil(t, err) + + t.Run("compare", func(t *testing.T) { + // modify before to match expectations of after + before.Object.Additional = nil + before.Vector = nil + before.VectorLen = 3 + before.Vectors = nil + assert.Equal(t, before, after) + + assert.Equal(t, before.DocID, after.DocID) + + // The vector length should always be returned (for usage metrics + // purposes) even if the vector itself is skipped + assert.Equal(t, after.VectorLen, 3) + }) + }) +} + +func TestNewStorageObject(t *testing.T) { + t.Run("objects", func(t *testing.T) { + so := New(12) + + t.Run("check index id", func(t *testing.T) { + assert.Equal(t, uint64(12), so.DocID) + }) + + t.Run("is invalid without required params", func(t *testing.T) { + assert.False(t, so.Valid()) + }) + + t.Run("reassign index id", func(t *testing.T) { + so.DocID = 13 + assert.Equal(t, uint64(13), so.DocID) + }) + + t.Run("assign class", func(t *testing.T) { + so.SetClass("MyClass") + assert.Equal(t, schema.ClassName("MyClass"), so.Class()) + }) + + t.Run("assign uuid", func(t *testing.T) { + id := strfmt.UUID("bf706904-8618-463f-899c-4a2aafd48d56") + so.SetID(id) + assert.Equal(t, id, so.ID()) + }) + + t.Run("assign uuid", func(t *testing.T) { + schema := map[string]interface{}{ + "foo": "bar", + } + so.SetProperties(schema) + assert.Equal(t, schema, so.Properties()) + }) + + t.Run("must now be valid", func(t *testing.T) { + assert.True(t, so.Valid()) + }) + + t.Run("make sure it's identical with an object created from an existing object", + func(t *testing.T) { + alt := FromObject(&models.Object{ + Class: "MyClass", + ID: "bf706904-8618-463f-899c-4a2aafd48d56", + Properties: map[string]interface{}{ + "foo": "bar", + }, + }, nil, nil, nil) + alt.DocID = 13 + + assert.Equal(t, so, alt) + }) + }) + + t.Run("objects", func(t *testing.T) { + so := New(12) + + t.Run("check index id", func(t *testing.T) { + assert.Equal(t, uint64(12), so.DocID) + }) + + t.Run("is invalid without required params", func(t *testing.T) { + assert.False(t, so.Valid()) + }) + + t.Run("reassign index id", func(t *testing.T) { + so.DocID = 13 + assert.Equal(t, uint64(13), so.DocID) + }) + + t.Run("assign class", func(t *testing.T) { + so.SetClass("MyClass") + assert.Equal(t, schema.ClassName("MyClass"), so.Class()) + }) + + t.Run("assign uuid", func(t *testing.T) { + id := strfmt.UUID("bf706904-8618-463f-899c-4a2aafd48d56") + so.SetID(id) + assert.Equal(t, id, so.ID()) + }) + + t.Run("assign uuid", func(t *testing.T) { + schema := map[string]interface{}{ + "foo": "bar", + } + so.SetProperties(schema) + assert.Equal(t, schema, so.Properties()) + }) + + t.Run("must now be valid", func(t *testing.T) { + assert.True(t, so.Valid()) + }) + + t.Run("make sure it's identical with an object created from an existing action", + func(t *testing.T) { + alt := FromObject(&models.Object{ + Class: "MyClass", + ID: "bf706904-8618-463f-899c-4a2aafd48d56", + Properties: map[string]interface{}{ + "foo": "bar", + }, + }, nil, nil, nil) + alt.DocID = 13 + + assert.Equal(t, so, alt) + }) + }) +} + +func TestStorageArrayObjectMarshalling(t *testing.T) { + before := FromObject( + &models.Object{ + Class: "MyFavoriteClass", + CreationTimeUnix: 123456, + LastUpdateTimeUnix: 56789, + ID: strfmt.UUID("73f2eb5f-5abf-447a-81ca-74b1dd168247"), + Additional: models.AdditionalProperties{ + "classification": &additional.Classification{ + BasedOn: []string{"some", "fields"}, + }, + "interpretation": map[string]interface{}{ + "Source": []interface{}{ + map[string]interface{}{ + "concept": "foo", + "occurrence": float64(7), + "weight": float64(3), + }, + }, + }, + }, + Properties: map[string]interface{}{ + "textArray": []string{"c", "d"}, + "numberArray": []float64{1.1, 2.1}, + "foo": float64(17), + }, + }, + []float32{1, 2, 0.7}, + map[string][]float32{ + "vector1": {1, 2, 3}, + "vector2": {4, 5, 6}, + "vector3": {7, 8, 9}, + }, + nil, + ) + before.DocID = 7 + + asBinary, err := before.MarshalBinary() + require.Nil(t, err) + + after, err := FromBinary(asBinary) + require.Nil(t, err) + + t.Run("compare", func(t *testing.T) { + assert.Equal(t, before, after) + }) + + t.Run("extract only doc id and compare", func(t *testing.T) { + id, updateTime, err := DocIDAndTimeFromBinary(asBinary) + require.Nil(t, err) + assert.Equal(t, uint64(7), id) + assert.Equal(t, before.LastUpdateTimeUnix(), updateTime) + }) + + t.Run("extract text array prop", func(t *testing.T) { + prop, ok, err := ParseAndExtractTextProp(asBinary, "textArray") + require.Nil(t, err) + require.True(t, ok) + assert.Equal(t, []string{"c", "d"}, prop) + }) + + t.Run("extract number array prop", func(t *testing.T) { + prop, ok, err := ParseAndExtractNumberArrayProp(asBinary, "numberArray") + require.Nil(t, err) + require.True(t, ok) + assert.Equal(t, []float64{1.1, 2.1}, prop) + }) +} + +func TestExtractionOfSingleProperties(t *testing.T) { + expected := map[string]interface{}{ + "numberArray": []interface{}{1.1, 2.1}, + "intArray": []interface{}{1., 2., 5000.}, + "textArrayUTF": []interface{}{"語", "b"}, + "textArray": []interface{}{"hello", ",", "I", "am", "a", "veeery", "long", "Array", "with some text."}, + "foo": float64(17), + "text": "single string", + "bool": true, + "time": "2011-11-23T01:52:23.000004234Z", + "boolArray": []interface{}{true, false, true}, + "beacon": []interface{}{map[string]interface{}{"beacon": "weaviate://localhost/SomeClass/3453/73f4eb5f-5abf-447a-81ca-74b1dd168247"}}, + "ref": []interface{}{ + map[string]interface{}{"beacon": "weaviate://localhost/SomeClass/3453/73f4eb5f-5abf-447a-81ca-74b1dd168247"}, + map[string]interface{}{"beacon": "weaviate://localhost/SomeClass/3453/73f4eb5f-5abf-447a-81ca-74b1dd168248"}, + }, + "nested": map[string]interface{}{"test": map[string]interface{}{"innerInt": float64(3), "innerStr": "avc"}}, + "nestedArray": []interface{}{map[string]interface{}{"test": map[string]interface{}{"innerArray": float64(3), "innerStr": "avc"}}}, + } + properties := map[string]interface{}{ + "numberArray": []float64{1.1, 2.1}, + "intArray": []int32{1, 2, 5000}, + "textArrayUTF": []string{"語", "b"}, + "textArray": []string{"hello", ",", "I", "am", "a", "veeery", "long", "Array", "with some text."}, + "foo": float64(17), + "text": "single string", + "bool": true, + "time": time.Date(2011, 11, 23, 1, 52, 23, 4234, time.UTC), + "boolArray": []bool{true, false, true}, + "beacon": []map[string]interface{}{{"beacon": "weaviate://localhost/SomeClass/3453/73f4eb5f-5abf-447a-81ca-74b1dd168247"}}, + "ref": []models.SingleRef{ + {Beacon: "weaviate://localhost/SomeClass/3453/73f4eb5f-5abf-447a-81ca-74b1dd168247", Class: "OtherClass", Href: "/v1/f81bfe5e-16ba-4615-a516-46c2ae2e5a80"}, + {Beacon: "weaviate://localhost/SomeClass/3453/73f4eb5f-5abf-447a-81ca-74b1dd168248", Class: "OtherClass", Href: "/v1/f81bfe5e-16ba-4615-a516-46c2ae2e5a81"}, + }, + "nested": map[string]interface{}{"test": map[string]interface{}{"innerInt": 3, "innerStr": "avc"}}, + "nestedArray": []interface{}{map[string]interface{}{"test": map[string]interface{}{"innerArray": float64(3), "innerStr": "avc"}}}, + } + before := FromObject( + &models.Object{ + Class: "MyFavoriteClass", + CreationTimeUnix: 123456, + LastUpdateTimeUnix: 56789, + ID: "73f2eb5f-5abf-447a-81ca-74b1dd168247", + Properties: properties, + }, + []float32{1, 2, 0.7}, + nil, + nil, + ) + + before.DocID = 7 + byteObject, err := before.MarshalBinary() + require.Nil(t, err) + + var propStrings [][]string + for key := range properties { + propStrings = append(propStrings, []string{key}) + } + + extractedProperties := map[string]interface{}{} + + // test with reused property map + for i := 0; i < 2; i++ { + require.Nil(t, UnmarshalPropertiesFromObject(byteObject, extractedProperties, propStrings)) + for key := range expected { + require.Equal(t, expected[key], extractedProperties[key]) + } + } +} + +func TestStorageObjectMarshallingWithGroup(t *testing.T) { + before := FromObject( + &models.Object{ + Class: "MyFavoriteClass", + CreationTimeUnix: 123456, + LastUpdateTimeUnix: 56789, + ID: strfmt.UUID("73f2eb5f-5abf-447a-81ca-74b1dd168247"), + Additional: models.AdditionalProperties{ + "classification": &additional.Classification{ + BasedOn: []string{"some", "fields"}, + }, + "interpretation": map[string]interface{}{ + "Source": []interface{}{ + map[string]interface{}{ + "concept": "foo", + "occurrence": float64(7), + "weight": float64(3), + }, + }, + }, + "group": &additional.Group{ + ID: 100, + GroupedBy: &additional.GroupedBy{ + Value: "group-by-some-property", + Path: []string{"property-path"}, + }, + MaxDistance: 0.1, + MinDistance: 0.2, + Count: 200, + Hits: []map[string]interface{}{ + { + "property1": "value1", + "_additional": &additional.GroupHitAdditional{ + ID: "2c76ca18-2073-4c48-aa52-7f444d2f5b80", + Distance: 0.24, + }, + }, + { + "property1": "value2", + }, + }, + }, + }, + Properties: map[string]interface{}{ + "name": "MyName", + "foo": float64(17), + }, + }, + []float32{1, 2, 0.7}, + map[string][]float32{ + "vector1": {1, 2, 3}, + "vector2": {4, 5, 6}, + "vector3": {7, 8, 9}, + }, + nil, + ) + before.DocID = 7 + + asBinary, err := before.MarshalBinary() + require.Nil(t, err) + + after, err := FromBinary(asBinary) + require.Nil(t, err) + + t.Run("compare", func(t *testing.T) { + assert.Equal(t, before, after) + }) + + t.Run("extract only doc id and compare", func(t *testing.T) { + id, updateTime, err := DocIDAndTimeFromBinary(asBinary) + require.Nil(t, err) + assert.Equal(t, uint64(7), id) + assert.Equal(t, before.LastUpdateTimeUnix(), updateTime) + }) + + t.Run("extract single text prop", func(t *testing.T) { + prop, ok, err := ParseAndExtractTextProp(asBinary, "name") + require.Nil(t, err) + require.True(t, ok) + require.NotEmpty(t, prop) + assert.Equal(t, "MyName", prop[0]) + }) + + t.Run("extract non-existing text prop", func(t *testing.T) { + prop, ok, err := ParseAndExtractTextProp(asBinary, "IDoNotExist") + require.Nil(t, err) + require.True(t, ok) + require.Empty(t, prop) + }) + + t.Run("extract group additional property", func(t *testing.T) { + require.NotNil(t, after.AdditionalProperties()) + require.NotNil(t, after.AdditionalProperties()["group"]) + group, ok := after.AdditionalProperties()["group"].(*additional.Group) + require.True(t, ok) + assert.Equal(t, 100, group.ID) + assert.NotNil(t, group.GroupedBy) + assert.Equal(t, "group-by-some-property", group.GroupedBy.Value) + assert.Equal(t, []string{"property-path"}, group.GroupedBy.Path) + assert.Equal(t, 200, group.Count) + assert.Equal(t, float32(0.1), group.MaxDistance) + assert.Equal(t, float32(0.2), group.MinDistance) + require.Len(t, group.Hits, 2) + require.NotNil(t, group.Hits[0]["_additional"]) + groupHitAdditional, ok := group.Hits[0]["_additional"].(*additional.GroupHitAdditional) + require.True(t, ok) + assert.Equal(t, strfmt.UUID("2c76ca18-2073-4c48-aa52-7f444d2f5b80"), groupHitAdditional.ID) + assert.Equal(t, float32(0.24), groupHitAdditional.Distance) + assert.Equal(t, "value1", group.Hits[0]["property1"]) + require.Nil(t, group.Hits[1]["_additional"]) + assert.Equal(t, "value2", group.Hits[1]["property1"]) + }) +} + +func TestStorageMaxVectorDimensionsObjectMarshalling(t *testing.T) { + generateVector := func(dims uint16) []float32 { + vector := make([]float32, dims) + for i := range vector { + vector[i] = 0.1 + } + return vector + } + // 65535 is max uint16 number + edgeVectorLengths := []uint16{0, 1, 768, 50000, 65535} + for _, vectorLength := range edgeVectorLengths { + t.Run(fmt.Sprintf("%v vector dimensions", vectorLength), func(t *testing.T) { + t.Run("marshal binary", func(t *testing.T) { + vector := generateVector(vectorLength) + before := FromObject( + &models.Object{ + Class: "MyFavoriteClass", + CreationTimeUnix: 123456, + ID: strfmt.UUID("73f2eb5f-5abf-447a-81ca-74b1dd168247"), + Properties: map[string]interface{}{ + "name": "myName", + }, + }, + vector, + nil, + nil, + ) + before.DocID = 7 + + asBinary, err := before.MarshalBinary() + require.Nil(t, err) + + after, err := FromBinary(asBinary) + require.Nil(t, err) + + t.Run("compare", func(t *testing.T) { + assert.Equal(t, before, after) + }) + + t.Run("try to extract a property", func(t *testing.T) { + prop, ok, err := ParseAndExtractTextProp(asBinary, "name") + require.Nil(t, err) + require.True(t, ok) + assert.Equal(t, []string{"myName"}, prop) + }) + }) + + t.Run("marshal optional binary", func(t *testing.T) { + vector := generateVector(vectorLength) + before := FromObject( + &models.Object{ + Class: "MyFavoriteClass", + CreationTimeUnix: 123456, + ID: strfmt.UUID("73f2eb5f-5abf-447a-81ca-74b1dd168247"), + Properties: map[string]interface{}{ + "name": "myName", + "second": "entry", + }, + }, + vector, + nil, + nil, + ) + before.DocID = 7 + + asBinary, err := before.MarshalBinary() + require.Nil(t, err) + + t.Run("get without additional properties", func(t *testing.T) { + after, err := FromBinaryOptional(asBinary, additional.Properties{}, nil) + require.Nil(t, err) + // modify before to match expectations of after + before.Object.Additional = nil + before.Vector = nil + before.VectorLen = int(vectorLength) + assert.Equal(t, before, after) + + assert.Equal(t, before.DocID, after.DocID) + + // The vector length should always be returned (for usage metrics + // purposes) even if the vector itself is skipped + assert.Equal(t, after.VectorLen, int(vectorLength)) + }) + + t.Run("get with additional property vector", func(t *testing.T) { + after, err := FromBinaryOptional(asBinary, additional.Properties{Vector: true}, nil) + require.Nil(t, err) + // modify before to match expectations of after + before.Object.Additional = nil + before.Vector = vector + before.VectorLen = int(vectorLength) + + assert.Equal(t, before, after) + + assert.Equal(t, before.DocID, after.DocID) + + // The vector length should always be returned (for usage metrics + // purposes) even if the vector itself is skipped + assert.Equal(t, after.VectorLen, int(vectorLength)) + assert.Equal(t, vector, after.Vector) + }) + + t.Run("with explicit properties", func(t *testing.T) { + after, err := FromBinaryOptional(asBinary, additional.Properties{}, + &PropertyExtraction{PropertyPaths: [][]string{{"name"}}}, + ) + require.Nil(t, err) + + assert.Equal(t, before.DocID, after.DocID) + // second property is not included + assert.Equal(t, map[string]interface{}{"name": "myName"}, after.Properties()) + }) + + t.Run("test no props and moduleparams", func(t *testing.T) { + after, err := FromBinaryOptional(asBinary, additional.Properties{ + NoProps: true, + ModuleParams: map[string]interface{}{"foo": "bar"}, // this causes the property extraction code to run + }, + &PropertyExtraction{PropertyPaths: nil}, + ) + require.Nil(t, err) + + assert.Equal(t, before.DocID, after.DocID) + var emptyProps map[string]interface{} + assert.Equal(t, emptyProps, after.Properties()) + }) + }) + }) + } +} + +func TestVectorFromBinary(t *testing.T) { + vector1 := []float32{1, 2, 3} + vector2 := []float32{4, 5, 6} + vector3 := []float32{7, 8, 9} + before := FromObject( + &models.Object{ + Class: "MyFavoriteClass", + CreationTimeUnix: 123456, + LastUpdateTimeUnix: 56789, + ID: strfmt.UUID("73f2eb5f-5abf-447a-81ca-74b1dd168247"), + Additional: models.AdditionalProperties{ + "classification": &additional.Classification{ + BasedOn: []string{"some", "fields"}, + }, + "interpretation": map[string]interface{}{ + "Source": []interface{}{ + map[string]interface{}{ + "concept": "foo", + "occurrence": float64(7), + "weight": float64(3), + }, + }, + }, + "group": &additional.Group{ + ID: 100, + GroupedBy: &additional.GroupedBy{ + Value: "group-by-some-property", + Path: []string{"property-path"}, + }, + MaxDistance: 0.1, + MinDistance: 0.2, + Count: 200, + Hits: []map[string]interface{}{ + { + "property1": "value1", + "_additional": &additional.GroupHitAdditional{ + ID: "2c76ca18-2073-4c48-aa52-7f444d2f5b80", + Distance: 0.24, + }, + }, + { + "property1": "value2", + }, + }, + }, + }, + Properties: map[string]interface{}{ + "name": "MyName", + "foo": float64(17), + }, + }, + []float32{1, 2, 0.7}, + map[string][]float32{ + "vector1": vector1, + "vector2": vector2, + "vector3": vector3, + }, + nil, + ) + before.DocID = 7 + + asBinary, err := before.MarshalBinary() + require.Nil(t, err) + + outVector1, err := VectorFromBinary(asBinary, nil, "vector1") + require.Nil(t, err) + assert.Equal(t, vector1, outVector1) + + outVector2, err := VectorFromBinary(asBinary, nil, "vector2") + require.Nil(t, err) + assert.Equal(t, vector2, outVector2) + + outVector3, err := VectorFromBinary(asBinary, nil, "vector3") + require.Nil(t, err) + assert.Equal(t, vector3, outVector3) +} + +func TestMultiVectorFromBinary(t *testing.T) { + vector1 := [][]float32{{1, 2, 3}, {4, 5, 6}} + vector2 := [][]float32{{4, 5, 6}, {7, 8, 9}} + vector3 := [][]float32{{7, 8, 9}, {10, 11, 12}, {13, 14, 15}} + vector4 := []float32{1, 2, 3} + before := FromObject( + &models.Object{ + Class: "MyFavoriteClass", + CreationTimeUnix: 123456, + LastUpdateTimeUnix: 56789, + ID: strfmt.UUID("73f2eb5f-5abf-447a-81ca-74b1dd168247"), + Additional: models.AdditionalProperties{ + "classification": &additional.Classification{ + BasedOn: []string{"some", "fields"}, + }, + "interpretation": map[string]interface{}{ + "Source": []interface{}{ + map[string]interface{}{ + "concept": "foo", + "occurrence": float64(7), + "weight": float64(3), + }, + }, + }, + "group": &additional.Group{ + ID: 100, + GroupedBy: &additional.GroupedBy{ + Value: "group-by-some-property", + Path: []string{"property-path"}, + }, + MaxDistance: 0.1, + MinDistance: 0.2, + Count: 200, + Hits: []map[string]interface{}{ + { + "property1": "value1", + "_additional": &additional.GroupHitAdditional{ + ID: "2c76ca18-2073-4c48-aa52-7f444d2f5b80", + Distance: 0.24, + }, + }, + { + "property1": "value2", + }, + }, + }, + }, + Properties: map[string]interface{}{ + "name": "MyName", + "foo": float64(17), + }, + }, + []float32{1, 2, 0.7}, + map[string][]float32{ + "vector4": vector4, + }, + map[string][][]float32{ + "vector1": vector1, + "vector2": vector2, + "vector3": vector3, + }, + ) + before.DocID = 7 + + asBinary, err := before.MarshalBinary() + require.Nil(t, err) + + outVector1, err := MultiVectorFromBinary(asBinary, nil, "vector1") + require.Nil(t, err) + assert.Equal(t, vector1, outVector1) + + outVector2, err := MultiVectorFromBinary(asBinary, nil, "vector2") + require.Nil(t, err) + assert.Equal(t, vector2, outVector2) + + outVector3, err := MultiVectorFromBinary(asBinary, nil, "vector3") + require.Nil(t, err) + assert.Equal(t, vector3, outVector3) + + outVector4, err := VectorFromBinary(asBinary, nil, "vector4") + require.Nil(t, err) + assert.Equal(t, vector4, outVector4) +} + +func TestStorageInvalidObjectMarshalling(t *testing.T) { + t.Run("invalid className", func(t *testing.T) { + invalidClassName := make([]byte, maxClassNameLength+1) + cryptorand.Read(invalidClassName[:]) + + invalidObj := FromObject( + &models.Object{ + Class: string(invalidClassName), + CreationTimeUnix: 123456, + LastUpdateTimeUnix: 56789, + ID: strfmt.UUID("73f2eb5f-5abf-447a-81ca-74b1dd168247"), + }, + nil, + nil, + nil, + ) + + _, err := invalidObj.MarshalBinary() + require.ErrorContains(t, err, "could not marshal 'className' max length exceeded") + }) + + t.Run("invalid vector", func(t *testing.T) { + invalidObj := FromObject( + &models.Object{ + Class: "classA", + CreationTimeUnix: 123456, + LastUpdateTimeUnix: 56789, + ID: strfmt.UUID("73f2eb5f-5abf-447a-81ca-74b1dd168247"), + }, + make([]float32, maxVectorLength+1), + nil, + nil, + ) + + _, err := invalidObj.MarshalBinary() + require.ErrorContains(t, err, "could not marshal 'vector' max length exceeded") + }) + + t.Run("invalid named vector size", func(t *testing.T) { + invalidObj := FromObject( + &models.Object{ + Class: "classA", + CreationTimeUnix: 123456, + LastUpdateTimeUnix: 56789, + ID: strfmt.UUID("73f2eb5f-5abf-447a-81ca-74b1dd168247"), + }, + nil, + map[string][]float32{ + "vector1": make([]float32, maxVectorLength+1), + }, + nil, + ) + + _, err := invalidObj.MarshalBinary() + require.ErrorContains(t, err, "could not marshal 'vector' max length exceeded") + }) +} + +// Test that using the same buffer as source for unmarshalling does not cause problems due to memoy reuse +func TestMemoryReuse(t *testing.T) { + props := []map[string]interface{}{ + { + "numberArray": []interface{}{1.1, 2.1}, + "intArray": []interface{}{1., 2., 5000.}, + "textArrayUTF": []interface{}{"語", "b"}, + "boolArray": []interface{}{true, false}, + "textArray": []interface{}{"hello", ",", "I", "am", "a", "veeery", "long", "Array", "with some text."}, + "ref": []interface{}{map[string]interface{}{"beacon": "weaviate://localhost/SomeClass/73f4eb5f-5abf-447a-81ca-74b1dd168247"}}, + "foo": float64(17), + }, + { + "numberArray": []interface{}{1.4, 6.1}, + "intArray": []interface{}{4., 3., 9000.}, + "textArrayUTF": []interface{}{"a", "c"}, + "boolArray": []interface{}{true, true}, + "textArray": []interface{}{"I", "contain", "other", "text"}, + "ref": []interface{}{map[string]interface{}{"beacon": "weaviate://localhost/SomeClass/73f4eb5f-5abf-447a-81ca-74b1dd168248"}}, + "foo": float64(5), + }, + { + "numberArray": []interface{}{1.4, 6.1, 8.9}, + "intArray": []interface{}{4., 3., 9000., 1.}, + "textArrayUTF": []interface{}{"a", "c", "d"}, + "boolArray": []interface{}{true, true, false}, + "textArray": []interface{}{"I", "contain", "other", "text", "too"}, + "ref": []interface{}{map[string]interface{}{"beacon": "weaviate://localhost/SomeClass/73f4eb5f-5abf-447a-81ca-74b1dd168249"}}, + "foo": float64(9), + }, + } + + largestSize := 0 + for i, prop := range props { + obj := models.Object{ + Class: "something", + ID: strfmt.UUID(fmt.Sprintf("73f4eb5f-5abf-447a-81ca-74b1dd16824%v", i)), + Properties: prop, + } + before := FromObject(&obj, nil, nil, nil) + asBinary, err := before.MarshalBinary() + require.Nil(t, err) + if len(asBinary) > largestSize { + largestSize = len(asBinary) + } + } + + reuseableBuff := make([]byte, largestSize) + afterProps := []map[string]interface{}{} + for i, beforeProp := range props { + obj := models.Object{ + Class: "something", + ID: strfmt.UUID(fmt.Sprintf("73f4eb5f-5abf-447a-81ca-74b1dd16824%v", i)), + Properties: beforeProp, + } + + propertyPaths := make([][]string, 0, len(beforeProp)) + for j := range beforeProp { + propertyPaths = append(propertyPaths, []string{j}) + } + + before := FromObject(&obj, nil, nil, nil) + asBinary, err := before.MarshalBinary() + require.Nil(t, err) + reuseableBuff = reuseableBuff[:len(asBinary)] + copy(reuseableBuff, asBinary) + + afterProp := map[string]interface{}{} + require.Nil(t, UnmarshalProperties(reuseableBuff, afterProp, propertyPaths)) + afterProps = append(afterProps, afterProp) + } + + for i, afterProp := range afterProps { + assert.Equal(t, props[i], afterProp) + } +} + +func BenchmarkUnmarshalPropertiesFullObject(b *testing.B) { + benchmarkExtraction(b, nil) +} + +func BenchmarkUnmarshalPropertiesExplicitOnlySome(b *testing.B) { + benchmarkExtraction(b, []string{"name", "second"}) +} + +func BenchmarkUnmarshalPropertiesExplicitAll(b *testing.B) { + benchmarkExtraction(b, []string{"name", "second", "number", "bool", "array"}) +} + +func benchmarkExtraction(b *testing.B, propStrings []string) { + beforeProps := map[string]interface{}{ + "name": "some long string", + "second": "other very long string", + "number": float64(17), + "bool": false, + "array": []string{"a", "b", "c", "d", "e", "f", "g", "h", "i", "j"}, + } + before := FromObject( + &models.Object{ + Class: "MyFavoriteClass", + CreationTimeUnix: 123456, + ID: "73f2eb5f-5abf-447a-81ca-74b1dd168247", + Properties: beforeProps, + }, + nil, + nil, + nil, + ) + before.DocID = 7 + var props *PropertyExtraction + + if len(propStrings) > 0 { + propertyPaths := make([][]string, len(propStrings)) + for i, prop := range propStrings { + propertyPaths[i] = []string{prop} + } + + props = &PropertyExtraction{ + PropertyPaths: propertyPaths, + } + } + + asBinary, err := before.MarshalBinary() + require.Nil(b, err) + + b.ResetTimer() + + for n := 0; n < b.N; n++ { + after, err := FromBinaryOptional(asBinary, additional.Properties{}, props) + require.Nil(b, err) + require.NotNil(b, after) + } +} + +func TestObjectsByDocID(t *testing.T) { + // the main variable is the input length here which has an effect on chunking + // and parallelization + tests := []struct { + name string + inputIDs []uint64 + // there is no flag for expected output as that is deterministic based on + // the doc ID, we use a convention for the UUID and set a specific prop + // exactly to the doc ID. + }{ + { + name: "1 object - sequential code path", + inputIDs: []uint64{0}, + }, + { + name: "2 objects - concurrent code path", + inputIDs: []uint64{0, 1}, + }, + { + name: "10 objects - consecutive from beginning", + inputIDs: []uint64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}, + }, + { + name: "30 objects - consecutive from beginning", + inputIDs: []uint64{ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, + 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, + }, + }, + { + name: "100 objects - random - should perfectly match to divide into groups", + inputIDs: pickRandomIDsBetween(0, 1000, 100), + }, + { + name: "99 objects - random - uneven groups slightly below perfect chunk size", + inputIDs: pickRandomIDsBetween(0, 1000, 99), + }, + { + name: "101 objects - random - uneven groups slightly above perfect chunk size", + inputIDs: pickRandomIDsBetween(0, 1000, 101), + }, + { + name: "117 objects - random - because why not", + inputIDs: pickRandomIDsBetween(0, 1000, 117), + }, + } + + logger, _ := test.NewNullLogger() + + bucket := genFakeBucket(t, 1000) + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + res, err := ObjectsByDocID(bucket, test.inputIDs, additional.Properties{}, nil, logger) + require.Nil(t, err) + require.Len(t, res, len(test.inputIDs)) + + for i, obj := range res { + expectedDocID := test.inputIDs[i] + assert.Equal(t, expectedDocID, uint64(obj.Properties().(map[string]any)["i"].(float64))) + expectedUUID := strfmt.UUID(fmt.Sprintf("73f2eb5f-5abf-447a-81ca-74b1dd1%05d", expectedDocID)) + assert.Equal(t, expectedUUID, obj.ID()) + } + }) + } +} + +func TestSkipMissingObjects(t *testing.T) { + bucket := genFakeBucket(t, 1000) + logger, _ := test.NewNullLogger() + ids := pickRandomIDsBetween(0, 1000, 100) + ids = append(ids, 1001, 1002, 1003) + objs, err := objectsByDocIDParallel(bucket, ids, additional.Properties{}, nil, logger) + require.Nil(t, err) + require.Len(t, objs, 100) + for _, obj := range objs { + require.NotNil(t, obj) + } +} + +func TestIterateThroughVectorDimensions(t *testing.T) { + for _, tt := range []struct { + name string + object Object + + expect map[string]int + }{ + { + name: "empty", + object: Object{}, + expect: map[string]int{}, + }, + { + name: "legacy", + object: Object{ + Vector: make([]float32, 100), + }, + expect: map[string]int{ + "": 100, + }, + }, + { + name: "named", + object: Object{ + Vectors: map[string][]float32{ + "vec1": make([]float32, 100), + "vec2": make([]float32, 200), + }, + }, + expect: map[string]int{ + "vec1": 100, + "vec2": 200, + }, + }, + { + name: "multi", + object: Object{ + MultiVectors: map[string][][]float32{ + "vec1": { + make([]float32, 100), + make([]float32, 200), + make([]float32, 300), + }, + "vec2": { + make([]float32, 400), + make([]float32, 500), + }, + }, + }, + expect: map[string]int{ + "vec1": 600, + "vec2": 900, + }, + }, + { + name: "mixed", + object: Object{ + Vector: make([]float32, 100), + Vectors: map[string][]float32{ + "vec1": make([]float32, 200), + "vec2": make([]float32, 300), + }, + MultiVectors: map[string][][]float32{ + "vec3": { + make([]float32, 400), + }, + "vec4": { + make([]float32, 500), + }, + }, + }, + expect: map[string]int{ + "": 100, + "vec1": 200, + "vec2": 300, + "vec3": 400, + "vec4": 500, + }, + }, + } { + t.Run(tt.name, func(t *testing.T) { + captured := map[string]int{} + err := tt.object.IterateThroughVectorDimensions(func(targetVector string, dims int) error { + captured[targetVector] += dims + return nil + }) + require.NoError(t, err) + require.Equal(t, tt.expect, captured) + }) + } +} + +func BenchmarkObjectsByDocID(b *testing.B) { + bucket := genFakeBucket(b, 10000) + logger, _ := test.NewNullLogger() + ids := pickRandomIDsBetween(0, 10000, 100) + + tests := []struct { + concurrent bool + amount int + }{ + {concurrent: true, amount: 1}, + {concurrent: false, amount: 1}, + {concurrent: true, amount: 2}, + {concurrent: false, amount: 2}, + {concurrent: true, amount: 10}, + {concurrent: false, amount: 10}, + {concurrent: true, amount: 100}, + {concurrent: false, amount: 100}, + } + b.ResetTimer() + + for _, tt := range tests { + b.Run(fmt.Sprintf("Concurrent: %v with amount: %v", tt.concurrent, tt.amount), func(t *testing.B) { + for i := 0; i < b.N; i++ { + if tt.concurrent { + _, err := objectsByDocIDParallel(bucket, ids[:tt.amount], additional.Properties{}, nil, logger) + require.Nil(t, err) + + } else { + _, err := objectsByDocIDSequential(bucket, ids[:tt.amount], additional.Properties{}, nil) + require.Nil(t, err) + } + } + }) + } +} + +func intsToBytes(ints ...uint64) []byte { + byteOps := byteops.NewReadWriter(make([]byte, len(ints)*8)) + for _, i := range ints { + byteOps.WriteUint64(i) + } + return byteOps.Buffer +} + +func FuzzObjectGet(f *testing.F) { + maxSize := uint64(9999) + logger, _ := test.NewNullLogger() + bucket := genFakeBucket(f, maxSize) + + readTests := []struct { + ids []uint64 + }{ + {ids: []uint64{0}}, + {ids: []uint64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}}, + {ids: pickRandomIDsBetween(0, 1000, 100)}, + } + for _, tc := range readTests { + f.Add(intsToBytes(tc.ids...)) // Use f.Add to provide a seed corpus + } + f.Fuzz(func(t *testing.T, data []byte) { + if len(data) == 0 || len(data)%8 != 0 { + return + } + ids := make([]uint64, len(data)/8) + for i := 0; i < len(ids); i++ { + ids[i] = binary.LittleEndian.Uint64(data[i*8 : (i+1)*8]) + if ids[i] >= maxSize { + return + } + } + + res, err := ObjectsByDocID(bucket, ids, additional.Properties{}, nil, logger) + require.Nil(t, err) + require.Len(t, res, len(ids)) + for i, obj := range res { + expectedDocID := ids[i] + assert.Equal(t, expectedDocID, uint64(obj.Properties().(map[string]any)["i"].(float64))) + expectedUUID := strfmt.UUID(fmt.Sprintf("73f2eb5f-5abf-447a-81ca-74b1dd1%05d", expectedDocID)) + assert.Equal(t, expectedUUID, obj.ID()) + } + }) +} + +type fakeBucket struct { + objects map[uint64][]byte +} + +func (f *fakeBucket) GetBySecondary(_ int, _ []byte) ([]byte, error) { + panic("not implemented") +} + +func (f *fakeBucket) GetBySecondaryWithBuffer(indexID int, docIDBytes []byte, lsmBuf []byte) ([]byte, []byte, error) { + docID := binary.LittleEndian.Uint64(docIDBytes) + objBytes, ok := f.objects[docID] + if !ok { + return nil, nil, nil + } + if len(lsmBuf) < len(objBytes) { + lsmBuf = make([]byte, len(objBytes)) + } + + copy(lsmBuf, objBytes) + return lsmBuf[:len(objBytes)], lsmBuf, nil +} + +func genFakeBucket(t testing.TB, maxSize uint64) *fakeBucket { + bucket := &fakeBucket{objects: map[uint64][]byte{}} + for i := uint64(0); i < maxSize; i++ { + obj := New(i) + obj.SetProperties(map[string]any{"i": i, "foo": strings.Repeat("bar", int(i))}) + obj.SetClass("MyClass") + obj.SetID(strfmt.UUID(fmt.Sprintf("73f2eb5f-5abf-447a-81ca-74b1dd1%05d", i))) + objBytes, err := obj.MarshalBinary() + require.Nil(t, err) + bucket.objects[i] = objBytes + } + + return bucket +} + +func pickRandomIDsBetween(start, end uint64, count int) []uint64 { + ids := make([]uint64, count) + for i := 0; i < count; i++ { + ids[i] = start + uint64(rand.Intn(int(end-start))) + } + return ids +} diff --git a/platform/dbops/binaries/weaviate-src/entities/sync/sync.go b/platform/dbops/binaries/weaviate-src/entities/sync/sync.go new file mode 100644 index 0000000000000000000000000000000000000000..dd35a645354f0b6125a4f2b03073c55090acda1f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/sync/sync.go @@ -0,0 +1,112 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package sync + +import ( + "sync" +) + +// KeyLocker it is a thread safe wrapper of sync.Map +// Usage: it's used in order to lock specific key in a map +// to synchronize concurrent access to a code block. +// +// locker.Lock(id) +// defer locker.Unlock(id) +type KeyLocker struct { + m sync.Map +} + +// NewKeyLocker creates Keylocker +func NewKeyLocker() *KeyLocker { + return &KeyLocker{ + m: sync.Map{}, + } +} + +// Lock it locks a specific bucket by it's ID +// to hold ant concurrent access to that specific item +// +// do not forget calling Unlock() after locking it. +func (s *KeyLocker) Lock(ID string) { + iLock := &sync.Mutex{} + iLocks, _ := s.m.LoadOrStore(ID, iLock) + + iLock = iLocks.(*sync.Mutex) + iLock.Lock() +} + +// Unlock it unlocks a specific item by it's ID +func (s *KeyLocker) Unlock(ID string) { + iLocks, _ := s.m.Load(ID) + iLock := iLocks.(*sync.Mutex) + iLock.Unlock() +} + +// KeyRWLocker it is a thread safe wrapper of sync.Map +// Usage: it's used in order to lock/rlock specific key in a map +// to synchronize concurrent access to a code block. +// +// locker.Lock(id) +// defer locker.Unlock(id) +// +// or +// +// locker.RLock(id) +// defer locker.RUnlock(id) +type KeyRWLocker struct { + m sync.Map +} + +// NewKeyLocker creates Keylocker +func NewKeyRWLocker() *KeyRWLocker { + return &KeyRWLocker{ + m: sync.Map{}, + } +} + +// Lock it locks a specific bucket by it's ID +// to hold ant concurrent access to that specific item +// +// do not forget calling Unlock() after locking it. +func (s *KeyRWLocker) Lock(ID string) { + iLock := &sync.RWMutex{} + iLocks, _ := s.m.LoadOrStore(ID, iLock) + + iLock = iLocks.(*sync.RWMutex) + iLock.Lock() +} + +// Unlock it unlocks a specific item by it's ID +func (s *KeyRWLocker) Unlock(ID string) { + iLocks, _ := s.m.Load(ID) + iLock := iLocks.(*sync.RWMutex) + iLock.Unlock() +} + +// RLock it rlocks a specific bucket by it's ID +// to hold ant concurrent access to that specific item +// +// do not forget calling RUnlock() after rlocking it. +func (s *KeyRWLocker) RLock(ID string) { + iLock := &sync.RWMutex{} + iLocks, _ := s.m.LoadOrStore(ID, iLock) + + iLock = iLocks.(*sync.RWMutex) + iLock.RLock() +} + +// RUnlock it runlocks a specific item by it's ID +func (s *KeyRWLocker) RUnlock(ID string) { + iLocks, _ := s.m.Load(ID) + iLock := iLocks.(*sync.RWMutex) + iLock.RUnlock() +} diff --git a/platform/dbops/binaries/weaviate-src/entities/sync/sync_test.go b/platform/dbops/binaries/weaviate-src/entities/sync/sync_test.go new file mode 100644 index 0000000000000000000000000000000000000000..6c1d675c5347dda4341a88c127f9a1471940518d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/sync/sync_test.go @@ -0,0 +1,116 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package sync + +import ( + "sync" + "testing" + + "github.com/stretchr/testify/require" +) + +func mutexLocked(m *sync.Mutex) bool { + rlocked := m.TryLock() + if rlocked { + defer m.Unlock() + } + return !rlocked +} + +func rwMutexLocked(m *sync.RWMutex) bool { + // can not RLock + rlocked := m.TryRLock() + if rlocked { + defer m.RUnlock() + } + return !rlocked +} + +func rwMutexRLocked(m *sync.RWMutex) bool { + // can not Lock, but can RLock + locked := m.TryLock() + if locked { + defer m.Unlock() + return false + } + rlocked := m.TryRLock() + if rlocked { + defer m.RUnlock() + } + return rlocked +} + +func TestKeyLockerLockUnlock(t *testing.T) { + r := require.New(t) + s := NewKeyLocker() + + s.Lock("t1") + lock, _ := s.m.Load("t1") + r.True(mutexLocked(lock.(*sync.Mutex))) + + s.Unlock("t1") + lock, _ = s.m.Load("t1") + r.False(mutexLocked(lock.(*sync.Mutex))) + + s.Lock("t2") + lock, _ = s.m.Load("t2") + r.True(mutexLocked(lock.(*sync.Mutex))) + + s.Unlock("t2") + lock, _ = s.m.Load("t2") + r.False(mutexLocked(lock.(*sync.Mutex))) +} + +func TestKeyRWLockerLockUnlock(t *testing.T) { + r := require.New(t) + s := NewKeyRWLocker() + + s.Lock("t1") + lock, _ := s.m.Load("t1") + r.True(rwMutexLocked(lock.(*sync.RWMutex))) + r.False(rwMutexRLocked(lock.(*sync.RWMutex))) + + s.Unlock("t1") + lock, _ = s.m.Load("t1") + r.False(rwMutexLocked(lock.(*sync.RWMutex))) + r.False(rwMutexRLocked(lock.(*sync.RWMutex))) + + s.Lock("t2") + lock, _ = s.m.Load("t2") + r.True(rwMutexLocked(lock.(*sync.RWMutex))) + r.False(rwMutexRLocked(lock.(*sync.RWMutex))) + + s.Unlock("t2") + lock, _ = s.m.Load("t2") + r.False(rwMutexLocked(lock.(*sync.RWMutex))) + r.False(rwMutexRLocked(lock.(*sync.RWMutex))) + + s.RLock("t1") + lock, _ = s.m.Load("t1") + r.False(rwMutexLocked(lock.(*sync.RWMutex))) + r.True(rwMutexRLocked(lock.(*sync.RWMutex))) + + s.RUnlock("t1") + lock, _ = s.m.Load("t1") + r.False(rwMutexLocked(lock.(*sync.RWMutex))) + r.False(rwMutexRLocked(lock.(*sync.RWMutex))) + + s.RLock("t2") + lock, _ = s.m.Load("t2") + r.False(rwMutexLocked(lock.(*sync.RWMutex))) + r.True(rwMutexRLocked(lock.(*sync.RWMutex))) + + s.RUnlock("t2") + lock, _ = s.m.Load("t2") + r.False(rwMutexLocked(lock.(*sync.RWMutex))) + r.False(rwMutexRLocked(lock.(*sync.RWMutex))) +} diff --git a/platform/dbops/binaries/weaviate-src/entities/tenantactivity/types.go b/platform/dbops/binaries/weaviate-src/entities/tenantactivity/types.go new file mode 100644 index 0000000000000000000000000000000000000000..666f7f7970fbf57a605b0dda9cfa46683cb338b3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/tenantactivity/types.go @@ -0,0 +1,26 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package tenantactivity + +import "time" + +type ByCollection map[string]ByTenant + +type ByTenant map[string]time.Time + +type UsageFilter int + +const ( + UsageFilterAll UsageFilter = iota + UsageFilterOnlyReads + UsageFilterOnlyWrites +) diff --git a/platform/dbops/binaries/weaviate-src/entities/tokenizer/tokenizer.go b/platform/dbops/binaries/weaviate-src/entities/tokenizer/tokenizer.go new file mode 100644 index 0000000000000000000000000000000000000000..7be94f6ae44dba8dba75792fa2524662615bfd74 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/tokenizer/tokenizer.go @@ -0,0 +1,452 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package tokenizer + +import ( + "os" + "runtime" + "strconv" + "strings" + "sync" + "time" + "unicode" + + entcfg "github.com/weaviate/weaviate/entities/config" + + "github.com/go-ego/gse" + koDict "github.com/ikawaha/kagome-dict-ko" + "github.com/ikawaha/kagome-dict/ipa" + kagomeTokenizer "github.com/ikawaha/kagome/v2/tokenizer" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/usecases/monitoring" +) + +var ( + gseTokenizer *gse.Segmenter // Japanese + gseTokenizerCh *gse.Segmenter // Chinese + gseLock = &sync.Mutex{} // Lock for gse + UseGse = false // Load Japanese dictionary and prepare tokenizer + UseGseCh = false // Load Chinese dictionary and prepare tokenizer + KagomeKrEnabled = false // Load Korean dictionary and prepare tokenizer + KagomeJaEnabled = false // Load Japanese dictionary and prepare tokenizer + // The Tokenizer Libraries can consume a lot of memory, so we limit the number of parallel tokenizers + ApacTokenizerThrottle = chan struct{}(nil) // Throttle for tokenizers + tokenizers KagomeTokenizers // Tokenizers for Korean and Japanese + kagomeInitLock sync.Mutex // Lock for kagome initialization +) + +type KagomeTokenizers struct { + Korean *kagomeTokenizer.Tokenizer + Japanese *kagomeTokenizer.Tokenizer +} + +// Optional tokenizers can be enabled with an environment variable like: +// 'ENABLE_TOKENIZER_XXX', e.g. 'ENABLE_TOKENIZER_GSE', 'ENABLE_TOKENIZER_KAGOME_KR', 'ENABLE_TOKENIZER_KAGOME_JA' +var Tokenizations []string = []string{ + models.PropertyTokenizationWord, + models.PropertyTokenizationLowercase, + models.PropertyTokenizationWhitespace, + models.PropertyTokenizationField, + models.PropertyTokenizationTrigram, +} + +func init() { + numParallel := runtime.GOMAXPROCS(0) + numParallelStr := os.Getenv("TOKENIZER_CONCURRENCY_COUNT") + if numParallelStr != "" { + x, err := strconv.Atoi(numParallelStr) + if err == nil { + numParallel = x + } + } + ApacTokenizerThrottle = make(chan struct{}, numParallel) + if entcfg.Enabled(os.Getenv("USE_GSE")) || entcfg.Enabled(os.Getenv("ENABLE_TOKENIZER_GSE")) { + UseGse = true + Tokenizations = append(Tokenizations, models.PropertyTokenizationGse) + init_gse() + } + if entcfg.Enabled(os.Getenv("ENABLE_TOKENIZER_GSE_CH")) { + Tokenizations = append(Tokenizations, models.PropertyTokenizationGseCh) + UseGseCh = true + init_gse_ch() + } + if entcfg.Enabled(os.Getenv("ENABLE_TOKENIZER_KAGOME_KR")) { + Tokenizations = append(Tokenizations, models.PropertyTokenizationKagomeKr) + } + if entcfg.Enabled(os.Getenv("ENABLE_TOKENIZER_KAGOME_JA")) { + Tokenizations = append(Tokenizations, models.PropertyTokenizationKagomeJa) + } + _ = initializeKagomeTokenizerKr() + _ = initializeKagomeTokenizerJa() +} + +func init_gse() { + gseLock.Lock() + defer gseLock.Unlock() + if gseTokenizer == nil { + startTime := time.Now() + seg, err := gse.New("ja") + if err != nil { + return + } + gseTokenizer = &seg + monitoring.GetMetrics().TokenizerInitializeDuration.WithLabelValues("gse").Observe(time.Since(startTime).Seconds()) + } +} + +func init_gse_ch() { + gseLock.Lock() + defer gseLock.Unlock() + if gseTokenizerCh == nil { + startTime := time.Now() + seg, err := gse.New("zh") + if err != nil { + return + } + gseTokenizerCh = &seg + monitoring.GetMetrics().TokenizerInitializeDuration.WithLabelValues("gse").Observe(time.Since(startTime).Seconds()) + } +} + +func Tokenize(tokenization string, in string) []string { + switch tokenization { + case models.PropertyTokenizationWord: + return tokenizeWord(in) + case models.PropertyTokenizationLowercase: + return tokenizeLowercase(in) + case models.PropertyTokenizationWhitespace: + return tokenizeWhitespace(in) + case models.PropertyTokenizationField: + return tokenizeField(in) + case models.PropertyTokenizationTrigram: + return tokenizetrigram(in) + case models.PropertyTokenizationGse: + ApacTokenizerThrottle <- struct{}{} + defer func() { <-ApacTokenizerThrottle }() + return tokenizeGSE(in) + case models.PropertyTokenizationGseCh: + ApacTokenizerThrottle <- struct{}{} + defer func() { <-ApacTokenizerThrottle }() + return tokenizeGseCh(in) + case models.PropertyTokenizationKagomeKr: + ApacTokenizerThrottle <- struct{}{} + defer func() { <-ApacTokenizerThrottle }() + return tokenizeKagomeKr(in) + case models.PropertyTokenizationKagomeJa: + ApacTokenizerThrottle <- struct{}{} + defer func() { <-ApacTokenizerThrottle }() + return tokenizeKagomeJa(in) + default: + return []string{} + } +} + +func TokenizeWithWildcards(tokenization string, in string) []string { + switch tokenization { + case models.PropertyTokenizationWord: + return tokenizeWordWithWildcards(in) + case models.PropertyTokenizationLowercase: + return tokenizeLowercase(in) + case models.PropertyTokenizationWhitespace: + return tokenizeWhitespace(in) + case models.PropertyTokenizationField: + return tokenizeField(in) + case models.PropertyTokenizationTrigram: + return tokenizetrigramWithWildcards(in) + case models.PropertyTokenizationGse: + ApacTokenizerThrottle <- struct{}{} + defer func() { <-ApacTokenizerThrottle }() + return tokenizeGSE(in) + case models.PropertyTokenizationGseCh: + ApacTokenizerThrottle <- struct{}{} + defer func() { <-ApacTokenizerThrottle }() + return tokenizeGseCh(in) + case models.PropertyTokenizationKagomeKr: + ApacTokenizerThrottle <- struct{}{} + defer func() { <-ApacTokenizerThrottle }() + return tokenizeKagomeKr(in) + case models.PropertyTokenizationKagomeJa: + ApacTokenizerThrottle <- struct{}{} + defer func() { <-ApacTokenizerThrottle }() + return tokenizeKagomeJa(in) + default: + return []string{} + } +} + +func removeEmptyStrings(terms []string) []string { + for i := 0; i < len(terms); i++ { + if terms[i] == "" || terms[i] == " " { + terms = append(terms[:i], terms[i+1:]...) + i-- + } + } + return terms +} + +// tokenizeField trims white spaces +// (former DataTypeString/Field) +func tokenizeField(in string) []string { + startTime := time.Now() + ret := []string{strings.TrimFunc(in, unicode.IsSpace)} + monitoring.GetMetrics().TokenizerDuration.WithLabelValues("field").Observe(float64(time.Since(startTime).Seconds())) + monitoring.GetMetrics().TokenCount.WithLabelValues("field").Add(float64(len(ret))) + monitoring.GetMetrics().TokenCountPerRequest.WithLabelValues("field").Observe(float64(len(ret))) + return ret +} + +// tokenizeWhitespace splits on white spaces, does not alter casing +// (former DataTypeString/Word) +func tokenizeWhitespace(in string) []string { + startTime := time.Now() + ret := strings.FieldsFunc(in, unicode.IsSpace) + monitoring.GetMetrics().TokenizerDuration.WithLabelValues("whitespace").Observe(float64(time.Since(startTime).Seconds())) + monitoring.GetMetrics().TokenCount.WithLabelValues("whitespace").Add(float64(len(ret))) + monitoring.GetMetrics().TokenCountPerRequest.WithLabelValues("whitespace").Observe(float64(len(ret))) + return ret +} + +// tokenizeLowercase splits on white spaces and lowercases the words +func tokenizeLowercase(in string) []string { + startTime := time.Now() + terms := tokenizeWhitespace(in) + ret := lowercase(terms) + monitoring.GetMetrics().TokenizerDuration.WithLabelValues("lowercase").Observe(float64(time.Since(startTime).Seconds())) + return ret +} + +// tokenizeWord splits on any non-alphanumerical and lowercases the words +// (former DataTypeText/Word) +func tokenizeWord(in string) []string { + startTime := time.Now() + terms := strings.FieldsFunc(in, func(r rune) bool { + return !unicode.IsLetter(r) && !unicode.IsNumber(r) + }) + ret := lowercase(terms) + monitoring.GetMetrics().TokenizerDuration.WithLabelValues("word").Observe(float64(time.Since(startTime).Seconds())) + monitoring.GetMetrics().TokenCount.WithLabelValues("word").Add(float64(len(ret))) + monitoring.GetMetrics().TokenCountPerRequest.WithLabelValues("word").Observe(float64(len(ret))) + return ret +} + +// tokenizetrigram splits on any non-alphanumerical and lowercases the words, joins them together, then groups them into trigrams +func tokenizetrigram(in string) []string { + startTime := time.Now() + // Strip whitespace and punctuation from the input string + inputString := strings.ToLower(strings.Join(strings.FieldsFunc(in, func(r rune) bool { + return !unicode.IsLetter(r) && !unicode.IsNumber(r) + }), "")) + runes := []rune(inputString) + var trirunes [][]rune + for i := 0; i < len(runes)-2; i++ { + trirunes = append(trirunes, runes[i:i+3]) + } + + var trigrams []string + for _, trirune := range trirunes { + trigrams = append(trigrams, string(trirune)) + } + monitoring.GetMetrics().TokenizerDuration.WithLabelValues("trigram").Observe(float64(time.Since(startTime).Seconds())) + monitoring.GetMetrics().TokenCount.WithLabelValues("trigram").Add(float64(len(trigrams))) + monitoring.GetMetrics().TokenCountPerRequest.WithLabelValues("trigram").Observe(float64(len(trigrams))) + return trigrams +} + +// tokenizeGSE uses the gse tokenizer to tokenise Japanese +func tokenizeGSE(in string) []string { + if !UseGse { + return []string{} + } + startTime := time.Now() + gseLock.Lock() + defer gseLock.Unlock() + terms := gseTokenizer.CutAll(in) + + ret := removeEmptyStrings(terms) + + monitoring.GetMetrics().TokenizerDuration.WithLabelValues("gse").Observe(float64(time.Since(startTime).Seconds())) + monitoring.GetMetrics().TokenCount.WithLabelValues("gse").Add(float64(len(ret))) + monitoring.GetMetrics().TokenCountPerRequest.WithLabelValues("gse").Observe(float64(len(ret))) + return ret +} + +// tokenizeGSE uses the gse tokenizer to tokenise Chinese +func tokenizeGseCh(in string) []string { + if !UseGseCh { + return []string{} + } + gseLock.Lock() + defer gseLock.Unlock() + startTime := time.Now() + terms := gseTokenizerCh.CutAll(in) + ret := removeEmptyStrings(terms) + + monitoring.GetMetrics().TokenizerDuration.WithLabelValues("gse").Observe(float64(time.Since(startTime).Seconds())) + monitoring.GetMetrics().TokenCount.WithLabelValues("gse").Add(float64(len(ret))) + monitoring.GetMetrics().TokenCountPerRequest.WithLabelValues("gse").Observe(float64(len(ret))) + return ret +} + +func initializeKagomeTokenizerKr() error { + // Acquire lock to prevent initialization race + kagomeInitLock.Lock() + defer kagomeInitLock.Unlock() + + if entcfg.Enabled(os.Getenv("ENABLE_TOKENIZER_KAGOME_KR")) { + if tokenizers.Korean != nil { + return nil + } + startTime := time.Now() + + dictInstance := koDict.Dict() + tokenizer, err := kagomeTokenizer.New(dictInstance) + if err != nil { + return err + } + + tokenizers.Korean = tokenizer + KagomeKrEnabled = true + monitoring.GetMetrics().TokenizerInitializeDuration.WithLabelValues("kagome_kr").Observe(float64(time.Since(startTime).Seconds())) + return nil + } + + return nil +} + +func tokenizeKagomeKr(in string) []string { + tokenizer := tokenizers.Korean + if tokenizer == nil || !KagomeKrEnabled { + return []string{} + } + startTime := time.Now() + + kagomeTokens := tokenizer.Tokenize(in) + terms := make([]string, 0, len(kagomeTokens)) + + for _, token := range kagomeTokens { + if token.Surface != "EOS" && token.Surface != "BOS" { + terms = append(terms, token.Surface) + } + } + + ret := removeEmptyStrings(terms) + monitoring.GetMetrics().TokenizerDuration.WithLabelValues("kagome_kr").Observe(float64(time.Since(startTime).Seconds())) + monitoring.GetMetrics().TokenCount.WithLabelValues("kagome_kr").Add(float64(len(ret))) + monitoring.GetMetrics().TokenCountPerRequest.WithLabelValues("kagome_kr").Observe(float64(len(ret))) + return ret +} + +func initializeKagomeTokenizerJa() error { + // Acquire lock to prevent initialization race + kagomeInitLock.Lock() + defer kagomeInitLock.Unlock() + + if entcfg.Enabled(os.Getenv("ENABLE_TOKENIZER_KAGOME_JA")) { + if tokenizers.Japanese != nil { + return nil + } + startTime := time.Now() + dictInstance := ipa.Dict() + tokenizer, err := kagomeTokenizer.New(dictInstance) + if err != nil { + return err + } + + tokenizers.Japanese = tokenizer + KagomeJaEnabled = true + monitoring.GetMetrics().TokenizerInitializeDuration.WithLabelValues("kagome_ja").Observe(float64(time.Since(startTime).Seconds())) + return nil + } + + return nil +} + +func tokenizeKagomeJa(in string) []string { + tokenizer := tokenizers.Japanese + if tokenizer == nil || !KagomeJaEnabled { + return []string{} + } + + startTime := time.Now() + kagomeTokens := tokenizer.Analyze(in, kagomeTokenizer.Search) + terms := make([]string, 0, len(kagomeTokens)) + + for _, token := range kagomeTokens { + if token.Surface != "EOS" && token.Surface != "BOS" { + terms = append(terms, strings.ToLower(token.Surface)) + } + } + + ret := removeEmptyStrings(terms) + monitoring.GetMetrics().TokenizerDuration.WithLabelValues("kagome_ja").Observe(float64(time.Since(startTime).Seconds())) + monitoring.GetMetrics().TokenCount.WithLabelValues("kagome_ja").Add(float64(len(ret))) + monitoring.GetMetrics().TokenCountPerRequest.WithLabelValues("kagome_ja").Observe(float64(len(ret))) + return ret +} + +// tokenizeWordWithWildcards splits on any non-alphanumerical except wildcard-symbols and +// lowercases the words +func tokenizeWordWithWildcards(in string) []string { + startTime := time.Now() + terms := strings.FieldsFunc(in, func(r rune) bool { + return !unicode.IsLetter(r) && !unicode.IsNumber(r) && r != '?' && r != '*' + }) + ret := lowercase(terms) + monitoring.GetMetrics().TokenizerDuration.WithLabelValues("word_with_wildcards").Observe(float64(time.Since(startTime).Seconds())) + monitoring.GetMetrics().TokenCount.WithLabelValues("word_with_wildcards").Add(float64(len(ret))) + monitoring.GetMetrics().TokenCountPerRequest.WithLabelValues("word_with_wildcards").Observe(float64(len(ret))) + return ret +} + +// tokenizetrigramWithWildcards splits on any non-alphanumerical and lowercases the words, applies any wildcards, then joins them together, then groups them into trigrams +// this is unlikely to be useful, but is included for completeness +func tokenizetrigramWithWildcards(in string) []string { + startTime := time.Now() + terms := tokenizeWordWithWildcards(in) + inputString := strings.Join(terms, "") + var trigrams []string + for i := 0; i < len(inputString)-2; i++ { + trigrams = append(trigrams, inputString[i:i+3]) + } + monitoring.GetMetrics().TokenizerDuration.WithLabelValues("trigram_with_wildcards").Observe(float64(time.Since(startTime).Seconds())) + monitoring.GetMetrics().TokenCount.WithLabelValues("trigram_with_wildcards").Add(float64(len(trigrams))) + monitoring.GetMetrics().TokenCountPerRequest.WithLabelValues("trigram_with_wildcards").Observe(float64(len(trigrams))) + return trigrams +} + +func lowercase(terms []string) []string { + for i := range terms { + terms[i] = strings.ToLower(terms[i]) + } + monitoring.GetMetrics().TokenCount.WithLabelValues("lowercase").Add(float64(len(terms))) + monitoring.GetMetrics().TokenCountPerRequest.WithLabelValues("lowercase").Observe(float64(len(terms))) + return terms +} + +func TokenizeAndCountDuplicates(tokenization string, in string) ([]string, []int) { + counts := map[string]int{} + for _, term := range Tokenize(tokenization, in) { + counts[term]++ + } + + unique := make([]string, len(counts)) + boosts := make([]int, len(counts)) + + i := 0 + for term, boost := range counts { + unique[i] = term + boosts[i] = boost + i++ + } + + return unique, boosts +} diff --git a/platform/dbops/binaries/weaviate-src/entities/tokenizer/tokenizer_test.go b/platform/dbops/binaries/weaviate-src/entities/tokenizer/tokenizer_test.go new file mode 100644 index 0000000000000000000000000000000000000000..71ff3d83e83bff930ea605bcd582fff724d6ef86 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/tokenizer/tokenizer_test.go @@ -0,0 +1,267 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package tokenizer + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/models" +) + +func TestTokeniseParallel(t *testing.T) { + UseGse = true + init_gse() + UseGseCh = true + init_gse_ch() + // Kagome tokenizer for Korean + t.Setenv("ENABLE_TOKENIZER_KAGOME_KR", "true") + _ = initializeKagomeTokenizerKr() + + // Kagome tokenizer for Japanese + t.Setenv("ENABLE_TOKENIZER_KAGOME_JA", "true") + _ = initializeKagomeTokenizerJa() + for i := 0; i < 1000; i++ { + go SingleTokenise(t) + } +} + +func SingleTokenise(t *testing.T) { + tokens := Tokenize(models.PropertyTokenizationTrigram, "Thequickbrownfoxjumpsoverthelazydog") + assert.Equal(t, []string{"the", "heq", "equ", "qui", "uic", "ick", "ckb", "kbr", "bro", "row", "own", "wnf", "nfo", "fox", "oxj", "xju", "jum", "ump", "mps", "pso", "sov", "ove", "ver", "ert", "rth", "the", "hel", "ela", "laz", "azy", "zyd", "ydo", "dog"}, tokens) + + tokens = Tokenize(models.PropertyTokenizationTrigram, "The quick brown fox jumps over the lazy dog") + assert.Equal(t, []string{"the", "heq", "equ", "qui", "uic", "ick", "ckb", "kbr", "bro", "row", "own", "wnf", "nfo", "fox", "oxj", "xju", "jum", "ump", "mps", "pso", "sov", "ove", "ver", "ert", "rth", "the", "hel", "ela", "laz", "azy", "zyd", "ydo", "dog"}, tokens) + + tokens = Tokenize(models.PropertyTokenizationTrigram, "いろはにほへとちりぬるをわかよたれそつねならむうゐのおくやまけふこえてあさきゆめみしゑひもせす") + assert.Equal(t, []string{"いろは", "ろはに", "はにほ", "にほへ", "ほへと", "へとち", "とちり", "ちりぬ", "りぬる", "ぬるを", "るをわ", "をわか", "わかよ", "かよた", "よたれ", "たれそ", "れそつ", "そつね", "つねな", "ねなら", "ならむ", "らむう", "むうゐ", "うゐの", "ゐのお", "のおく", "おくや", "くやま", "やまけ", "まけふ", "けふこ", "ふこえ", "こえて", "えてあ", "てあさ", "あさき", "さきゆ", "きゆめ", "ゆめみ", "めみし", "みしゑ", "しゑひ", "ゑひも", "ひもせ", "もせす"}, tokens) + + tokens = Tokenize(models.PropertyTokenizationTrigram, `春の夜の夢はうつつよりもかなしき + 夏の夜の夢はうつつに似たり + 秋の夜の夢はうつつを超え + 冬の夜の夢は心に響く + + 山のあなたに小さな村が見える + 川の音が静かに耳に届く + 風が木々を通り抜ける音 + 星空の下、すべてが平和である`) + assert.Equal(t, []string{"春の夜", "の夜の", "夜の夢", "の夢は", "夢はう", "はうつ", "うつつ", "つつよ", "つより", "よりも", "りもか", "もかな", "かなし", "なしき", "しき夏", "き夏の", "夏の夜", "の夜の", "夜の夢", "の夢は", "夢はう", "はうつ", "うつつ", "つつに", "つに似", "に似た", "似たり", "たり秋", "り秋の", "秋の夜", "の夜の", "夜の夢", "の夢は", "夢はう", "はうつ", "うつつ", "つつを", "つを超", "を超え", "超え冬", "え冬の", "冬の夜", "の夜の", "夜の夢", "の夢は", "夢は心", "は心に", "心に響", "に響く", "響く山", "く山の", "山のあ", "のあな", "あなた", "なたに", "たに小", "に小さ", "小さな", "さな村", "な村が", "村が見", "が見え", "見える", "える川", "る川の", "川の音", "の音が", "音が静", "が静か", "静かに", "かに耳", "に耳に", "耳に届", "に届く", "届く風", "く風が", "風が木", "が木々", "木々を", "々を通", "を通り", "通り抜", "り抜け", "抜ける", "ける音", "る音星", "音星空", "星空の", "空の下", "の下す", "下すべ", "すべて", "べてが", "てが平", "が平和", "平和で", "和であ", "である"}, tokens) + + tokens = Tokenize(models.PropertyTokenizationGse, `春の夜の夢はうつつよりもかなしき + 夏の夜の夢はうつつに似たり + 秋の夜の夢はうつつを超え + 冬の夜の夢は心に響く + + 山のあなたに小さな村が見える + 川の音が静かに耳に届く + 風が木々を通り抜ける音 + 星空の下、すべてが平和である`) + assert.Equal(t, []string{"春の", "夜", "の", "夢", "はう", "うつ", "うつつ", "つつ", "つよ", "より", "も", "かな", "かなし", "かなしき", "なし", "しき", "\n", "\t", "夏", "の", "夜", "の", "夢", "はう", "うつ", "うつつ", "つつ", "に", "似", "たり", "\n", "\t", "秋", "の", "夜", "の", "夢", "はう", "うつ", "うつつ", "つつ", "を", "超え", "\n", "\t", "冬", "の", "夜", "の", "夢", "は", "心", "に", "響く", "\n", "\n", "\t", "山", "の", "あな", "あなた", "に", "小さ", "小さな", "村", "が", "見え", "見える", "える", "\n", "\t", "川", "の", "音", "が", "静か", "かに", "耳", "に", "届く", "\n", "\t", "風", "が", "木々", "を", "通り", "通り抜け", "通り抜ける", "抜け", "抜ける", "ける", "音", "\n", "\t", "星空", "の", "下", "、", "すべ", "すべて", "が", "平和", "で", "ある"}, tokens) + + tokens = Tokenize(models.PropertyTokenizationGse, "素早い茶色の狐が怠けた犬を飛び越えた") + assert.Equal(t, []string{"素早", "素早い", "早い", "茶色", "の", "狐", "が", "怠け", "けた", "犬", "を", "飛び", "飛び越え", "越え", "た"}, tokens) + + tokens = Tokenize(models.PropertyTokenizationGse, "すばやいちゃいろのきつねがなまけたいぬをとびこえた") + assert.Equal(t, []string{"すばや", "すばやい", "やい", "いち", "ちゃ", "ちゃい", "ちゃいろ", "いろ", "のき", "きつ", "きつね", "つね", "ねが", "がな", "なま", "なまけ", "まけ", "けた", "けたい", "たい", "いぬ", "を", "とび", "とびこえ", "こえ", "た"}, tokens) + + tokens = Tokenize(models.PropertyTokenizationGse, "スバヤイチャイロノキツネガナマケタイヌヲトビコエタ") + assert.Equal(t, []string{"スバ", "ヤイ", "イチ", "チャイ", "チャイロ", "ノ", "キツ", "キツネ", "ツネ", "ネガ", "ナマ", "ケタ", "タイ", "イヌ", "ヲ", "トビ", "コ", "エ", "タ"}, tokens) + + tokens = Tokenize(models.PropertyTokenizationGse, "The quick brown fox jumps over the lazy dog") + assert.Equal(t, []string{"t", "h", "e", "q", "u", "i", "c", "k", "b", "r", "o", "w", "n", "f", "o", "x", "j", "u", "m", "p", "s", "o", "v", "e", "r", "t", "h", "e", "l", "a", "z", "y", "d", "o", "g"}, tokens) + + tokens = Tokenize(models.PropertyTokenizationGseCh, "施氏食狮史石室诗士施氏,嗜狮,誓食十狮。氏时时适市视狮。十时,适十狮适市。是时,适施氏适市。氏视是十狮,恃矢势,使是十狮逝世。氏拾是十狮尸,适石室。石室湿,氏使侍拭石室。石室拭,氏始试食是十狮尸。食时,始识是十狮尸,实十石狮尸。试释是事。") + assert.Equal(t, []string{"施", "氏", "食", "狮", "史", "石室", "诗", "士", "施", "氏", ",", "嗜", "狮", ",", "誓", "食", "十", "狮", "。", "氏", "时时", "适", "市", "视", "狮", "。", "十时", ",", "适", "十", "狮", "适", "市", "。", "是", "时", ",", "适", "施", "氏", "适", "市", "。", "氏", "视", "是", "十", "狮", ",", "恃", "矢", "势", ",", "使", "是", "十", "狮", "逝世", "。", "氏", "拾", "是", "十", "狮", "尸", ",", "适", "石室", "。", "石室", "湿", ",", "氏", "使", "侍", "拭", "石室", "。", "石室", "拭", ",", "氏", "始", "试", "食", "是", "十", "狮", "尸", "。", "食", "时", ",", "始", "识", "是", "十", "狮", "尸", ",", "实", "十", "石狮", "尸", "。", "试", "释", "是", "事", "。"}, tokens) + + tokens = Tokenize(models.PropertyTokenizationKagomeKr, "아버지가방에들어가신다") + assert.Equal(t, []string{"아버지", "가", "방", "에", "들어가", "신다"}, tokens) + + tokens = Tokenize(models.PropertyTokenizationKagomeKr, "아버지가 방에 들어가신다") + assert.Equal(t, []string{"아버지", "가", "방", "에", "들어가", "신다"}, tokens) + + tokens = Tokenize(models.PropertyTokenizationKagomeKr, "결정하겠다") + assert.Equal(t, []string{"결정", "하", "겠", "다"}, tokens) + + tokens = Tokenize(models.PropertyTokenizationKagomeKr, "한국어를처리하는예시입니다") + assert.Equal(t, []string{"한국어", "를", "처리", "하", "는", "예시", "입니다"}, tokens) + + tokens = Tokenize(models.PropertyTokenizationKagomeKr, "한국어를 처리하는 예시입니다") + assert.Equal(t, []string{"한국어", "를", "처리", "하", "는", "예시", "입니다"}, tokens) + + tokens = Tokenize(models.PropertyTokenizationKagomeJa, `春の夜の夢はうつつよりもかなしき + 夏の夜の夢はうつつに似たり + 秋の夜の夢はうつつを超え + 冬の夜の夢は心に響く + + 山のあなたに小さな村が見える + 川の音が静かに耳に届く + 風が木々を通り抜ける音 + 星空の下、すべてが平和である`) + assert.Equal(t, []string{"春", "の", "夜", "の", "夢", "は", "うつつ", "より", "も", "かなしき", "\n\t", "夏", "の", "夜", "の", "夢", "は", "うつつ", "に", "似", "たり", "\n\t", "秋", "の", "夜", "の", "夢", "は", "うつつ", "を", "超え", "\n\t", "冬", "の", "夜", "の", "夢", "は", "心", "に", "響く", "\n\n\t", "山", "の", "あなた", "に", "小さな", "村", "が", "見える", "\n\t", "川", "の", "音", "が", "静か", "に", "耳", "に", "届く", "\n\t", "風", "が", "木々", "を", "通り抜ける", "音", "\n\t", "星空", "の", "下", "、", "すべて", "が", "平和", "で", "ある"}, tokens) + + tokens = Tokenize(models.PropertyTokenizationKagomeJa, "素早い茶色の狐が怠けた犬を飛び越えた") + assert.Equal(t, []string{"素早い", "茶色", "の", "狐", "が", "怠け", "た", "犬", "を", "飛び越え", "た"}, tokens) + + tokens = Tokenize(models.PropertyTokenizationKagomeJa, "すばやいちゃいろのきつねがなまけたいぬをとびこえた") + assert.Equal(t, []string{"すばやい", "ちゃ", "いろ", "の", "きつね", "が", "なまけ", "た", "いぬ", "を", "とびこえ", "た"}, tokens) + + tokens = Tokenize(models.PropertyTokenizationKagomeJa, "スバヤイチャイロノキツネガナマケタイヌヲトビコエタ") + assert.Equal(t, []string{"スバ", "ヤイ", "チャイ", "ロノキツネガナマケタイヌヲトビコエタ"}, tokens) + + tokens = Tokenize(models.PropertyTokenizationKagomeJa, "The quick brown fox jumps over the lazy dog") + assert.Equal(t, []string{"the", "quick", "brown", "fox", "jumps", "over", "the", "lazy", "dog"}, tokens) +} + +func TestTokenize(t *testing.T) { + input := " Hello You*-beautiful_world?!" + + type testCase struct { + tokenization string + expected []string + } + + t.Run("tokenize", func(t *testing.T) { + testCases := []testCase{ + { + tokenization: models.PropertyTokenizationField, + expected: []string{"Hello You*-beautiful_world?!"}, + }, + { + tokenization: models.PropertyTokenizationWhitespace, + expected: []string{"Hello", "You*-beautiful_world?!"}, + }, + { + tokenization: models.PropertyTokenizationLowercase, + expected: []string{"hello", "you*-beautiful_world?!"}, + }, + { + tokenization: models.PropertyTokenizationWord, + expected: []string{"hello", "you", "beautiful", "world"}, + }, + } + + for _, tc := range testCases { + terms := Tokenize(tc.tokenization, input) + assert.ElementsMatch(t, tc.expected, terms) + } + }) + + t.Run("tokenize with wildcards", func(t *testing.T) { + testCases := []testCase{ + { + tokenization: models.PropertyTokenizationField, + expected: []string{"Hello You*-beautiful_world?!"}, + }, + { + tokenization: models.PropertyTokenizationWhitespace, + expected: []string{"Hello", "You*-beautiful_world?!"}, + }, + { + tokenization: models.PropertyTokenizationLowercase, + expected: []string{"hello", "you*-beautiful_world?!"}, + }, + { + tokenization: models.PropertyTokenizationWord, + expected: []string{"hello", "you*", "beautiful", "world?"}, + }, + } + + for _, tc := range testCases { + terms := TokenizeWithWildcards(tc.tokenization, input) + assert.ElementsMatch(t, tc.expected, terms) + } + }) +} + +func TestTokenizeAndCountDuplicates(t *testing.T) { + type testCase struct { + input string + tokenization string + expected map[string]int + } + + t.Setenv("ENABLE_TOKENIZER_KAGOME_KR", "true") + _ = initializeKagomeTokenizerKr() + + alphaInput := "Hello You Beautiful World! hello you beautiful world!" + + testCases := []testCase{ + { + input: alphaInput, + tokenization: models.PropertyTokenizationField, + expected: map[string]int{ + "Hello You Beautiful World! hello you beautiful world!": 1, + }, + }, + { + input: alphaInput, + tokenization: models.PropertyTokenizationWhitespace, + expected: map[string]int{ + "Hello": 1, + "You": 1, + "Beautiful": 1, + "World!": 1, + "hello": 1, + "you": 1, + "beautiful": 1, + "world!": 1, + }, + }, + { + input: alphaInput, + tokenization: models.PropertyTokenizationLowercase, + expected: map[string]int{ + "hello": 2, + "you": 2, + "beautiful": 2, + "world!": 2, + }, + }, + { + input: alphaInput, + tokenization: models.PropertyTokenizationWord, + expected: map[string]int{ + "hello": 2, + "you": 2, + "beautiful": 2, + "world": 2, + }, + }, + { + input: "한국어를 처리하는 예시입니다 한국어를 처리하는 예시입니다", + tokenization: models.PropertyTokenizationKagomeKr, + expected: map[string]int{ + "한국어": 2, + "를": 2, + "처리": 2, + "하": 2, + "는": 2, + "예시": 2, + "입니다": 2, + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.tokenization, func(t *testing.T) { + terms, dups := TokenizeAndCountDuplicates(tc.tokenization, tc.input) + + assert.Len(t, terms, len(tc.expected)) + assert.Len(t, dups, len(tc.expected)) + + for i := range terms { + assert.Contains(t, tc.expected, terms[i]) + assert.Equal(t, tc.expected[terms[i]], dups[i]) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/entities/vectorindex/common/config.go b/platform/dbops/binaries/weaviate-src/entities/vectorindex/common/config.go new file mode 100644 index 0000000000000000000000000000000000000000..5b1a639f02c8350c01fa1a2d43b9541541a7591e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/vectorindex/common/config.go @@ -0,0 +1,110 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package common + +import ( + "encoding/json" + "math" + "strconv" + + "github.com/pkg/errors" +) + +const ( + DistanceCosine = "cosine" + DistanceDot = "dot" + DistanceL2Squared = "l2-squared" + DistanceManhattan = "manhattan" + DistanceHamming = "hamming" + + // Set these defaults if the user leaves them blank + DefaultVectorCacheMaxObjects = 1e12 + DefaultDistanceMetric = DistanceCosine +) + +const ( + CompressionBQ = "bq" + CompressionPQ = "pq" + CompressionSQ = "sq" + CompressionRQ = "rq" + NoCompression = "none" +) + +// Tries to parse the int value from the map, if it overflows math.MaxInt64, it +// uses math.MaxInt64 instead. This is to protect from rounding errors from +// json marshalling where the type may be assumed as float64 +func OptionalIntFromMap(in map[string]interface{}, name string, + setFn func(v int), +) error { + value, ok := in[name] + if !ok { + return nil + } + + var asInt64 int64 + var err error + + // depending on whether we get the results from disk or from the REST API, + // numbers may be represented slightly differently + switch typed := value.(type) { + case json.Number: + asInt64, err = typed.Int64() + case float64: + asInt64 = int64(typed) + } + if err != nil { + // try to recover from error + if errors.Is(err, strconv.ErrRange) { + setFn(int(math.MaxInt64)) + return nil + } + + return errors.Wrapf(err, "json.Number to int64 for %q", name) + } + + setFn(int(asInt64)) + return nil +} + +func OptionalBoolFromMap(in map[string]interface{}, name string, + setFn func(v bool), +) error { + value, ok := in[name] + if !ok { + return nil + } + + asBool, ok := value.(bool) + if !ok { + return nil + } + + setFn(asBool) + return nil +} + +func OptionalStringFromMap(in map[string]interface{}, name string, + setFn func(v string), +) error { + value, ok := in[name] + if !ok { + return nil + } + + asString, ok := value.(string) + if !ok { + return nil + } + + setFn(asString) + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/entities/vectorindex/config.go b/platform/dbops/binaries/weaviate-src/entities/vectorindex/config.go new file mode 100644 index 0000000000000000000000000000000000000000..4e92d42595940e351a6c5f787bb054faf84aacfa --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/vectorindex/config.go @@ -0,0 +1,47 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorindex + +import ( + "fmt" + + schemaConfig "github.com/weaviate/weaviate/entities/schema/config" + "github.com/weaviate/weaviate/entities/vectorindex/dynamic" + "github.com/weaviate/weaviate/entities/vectorindex/flat" + "github.com/weaviate/weaviate/entities/vectorindex/hnsw" +) + +const ( + DefaultVectorIndexType = VectorIndexTypeHNSW + VectorIndexTypeHNSW = "hnsw" + VectorIndexTypeFLAT = "flat" + VectorIndexTypeDYNAMIC = "dynamic" +) + +// ParseAndValidateConfig from an unknown input value, as this is not further +// specified in the API to allow of exchanging the index type +func ParseAndValidateConfig(input interface{}, vectorIndexType string, isMultiVector bool) (schemaConfig.VectorIndexConfig, error) { + if len(vectorIndexType) == 0 { + vectorIndexType = DefaultVectorIndexType + } + + switch vectorIndexType { + case VectorIndexTypeHNSW: + return hnsw.ParseAndValidateConfig(input, isMultiVector) + case VectorIndexTypeFLAT: + return flat.ParseAndValidateConfig(input) + case VectorIndexTypeDYNAMIC: + return dynamic.ParseAndValidateConfig(input, isMultiVector) + default: + return nil, fmt.Errorf("invalid vector index %q. Supported types are hnsw and flat", vectorIndexType) + } +} diff --git a/platform/dbops/binaries/weaviate-src/entities/vectorindex/dynamic/config.go b/platform/dbops/binaries/weaviate-src/entities/vectorindex/dynamic/config.go new file mode 100644 index 0000000000000000000000000000000000000000..9bfc9f6253dbed1aa92622dc08cc47b26089130c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/vectorindex/dynamic/config.go @@ -0,0 +1,124 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package dynamic + +import ( + "fmt" + + schemaConfig "github.com/weaviate/weaviate/entities/schema/config" + "github.com/weaviate/weaviate/entities/vectorindex/common" + "github.com/weaviate/weaviate/entities/vectorindex/flat" + "github.com/weaviate/weaviate/entities/vectorindex/hnsw" +) + +const ( + DefaultThreshold = 10_000 +) + +type UserConfig struct { + Distance string `json:"distance"` + Threshold uint64 `json:"threshold"` + HnswUC hnsw.UserConfig `json:"hnsw"` + FlatUC flat.UserConfig `json:"flat"` +} + +// IndexType returns the type of the underlying vector index, thus making sure +// the schema.VectorIndexConfig interface is implemented +func (u UserConfig) IndexType() string { + return "dynamic" +} + +func (u UserConfig) DistanceName() string { + return u.Distance +} + +func (u UserConfig) IsMultiVector() bool { + return false +} + +// SetDefaults in the user-specifyable part of the config +func (u *UserConfig) SetDefaults() { + u.Threshold = DefaultThreshold + u.Distance = common.DefaultDistanceMetric + u.HnswUC = hnsw.NewDefaultUserConfig() + u.FlatUC = flat.NewDefaultUserConfig() +} + +func NewDefaultUserConfig() UserConfig { + uc := UserConfig{} + uc.SetDefaults() + return uc +} + +// ParseAndValidateConfig from an unknown input value, as this is not further +// specified in the API to allow of exchanging the index type +func ParseAndValidateConfig(input interface{}, isMultiVector bool) (schemaConfig.VectorIndexConfig, error) { + uc := UserConfig{} + uc.SetDefaults() + + if input == nil { + return uc, nil + } + + asMap, ok := input.(map[string]interface{}) + if !ok || asMap == nil { + return uc, fmt.Errorf("input must be a non-nil map") + } + + if err := common.OptionalStringFromMap(asMap, "distance", func(v string) { + uc.Distance = v + }); err != nil { + return uc, err + } + + if err := common.OptionalIntFromMap(asMap, "threshold", func(v int) { + uc.Threshold = uint64(v) + }); err != nil { + return uc, err + } + + hnswConfig, ok := asMap["hnsw"] + if ok && hnswConfig != nil { + hnswUC, err := hnsw.ParseAndValidateConfig(hnswConfig, isMultiVector) + if err != nil { + return uc, err + } + + castedHnswUC, ok := hnswUC.(hnsw.UserConfig) + if !ok { + return uc, fmt.Errorf("invalid hnsw configuration") + } + uc.HnswUC = castedHnswUC + if uc.HnswUC.Multivector.Enabled { + return uc, fmt.Errorf("multi vector index is not supported for dynamic index") + } + + } + + flatConfig, ok := asMap["flat"] + if !ok || flatConfig == nil { + return uc, nil + } + + flatUC, err := flat.ParseAndValidateConfig(flatConfig) + if err != nil { + return uc, err + } + + castedFlatUC, ok := flatUC.(flat.UserConfig) + if !ok { + return uc, fmt.Errorf("invalid flat configuration") + } + uc.FlatUC = castedFlatUC + + return uc, nil +} diff --git a/platform/dbops/binaries/weaviate-src/entities/vectorindex/dynamic/config_test.go b/platform/dbops/binaries/weaviate-src/entities/vectorindex/dynamic/config_test.go new file mode 100644 index 0000000000000000000000000000000000000000..4887b5721571ec8833c106c1482ab96cda830ad1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/vectorindex/dynamic/config_test.go @@ -0,0 +1,395 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package dynamic + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/vectorindex/common" + "github.com/weaviate/weaviate/entities/vectorindex/flat" + "github.com/weaviate/weaviate/entities/vectorindex/hnsw" +) + +func Test_DynamicUserConfig(t *testing.T) { + type test struct { + name string + input interface{} + expected UserConfig + expectErr bool + expectErrMsg string + } + + tests := []test{ + { + name: "nothing specified, all defaults", + input: nil, + expected: UserConfig{ + Distance: common.DefaultDistanceMetric, + Threshold: DefaultThreshold, + HnswUC: hnsw.UserConfig{ + CleanupIntervalSeconds: hnsw.DefaultCleanupIntervalSeconds, + MaxConnections: hnsw.DefaultMaxConnections, + EFConstruction: hnsw.DefaultEFConstruction, + VectorCacheMaxObjects: common.DefaultVectorCacheMaxObjects, + EF: hnsw.DefaultEF, + Skip: hnsw.DefaultSkip, + FlatSearchCutoff: hnsw.DefaultFlatSearchCutoff, + DynamicEFMin: hnsw.DefaultDynamicEFMin, + DynamicEFMax: hnsw.DefaultDynamicEFMax, + DynamicEFFactor: hnsw.DefaultDynamicEFFactor, + Distance: common.DefaultDistanceMetric, + PQ: hnsw.PQConfig{ + Enabled: hnsw.DefaultPQEnabled, + Segments: hnsw.DefaultPQSegments, + Centroids: hnsw.DefaultPQCentroids, + TrainingLimit: hnsw.DefaultPQTrainingLimit, + Encoder: hnsw.PQEncoder{ + Type: hnsw.DefaultPQEncoderType, + Distribution: hnsw.DefaultPQEncoderDistribution, + }, + }, + SQ: hnsw.SQConfig{ + Enabled: hnsw.DefaultSQEnabled, + TrainingLimit: hnsw.DefaultSQTrainingLimit, + RescoreLimit: hnsw.DefaultSQRescoreLimit, + }, + RQ: hnsw.RQConfig{ + Enabled: hnsw.DefaultRQEnabled, + Bits: hnsw.DefaultRQBits, + RescoreLimit: hnsw.DefaultRQRescoreLimit, + }, + FilterStrategy: hnsw.DefaultFilterStrategy, + Multivector: hnsw.MultivectorConfig{ + Enabled: hnsw.DefaultMultivectorEnabled, + Aggregation: hnsw.DefaultMultivectorAggregation, + MuveraConfig: hnsw.MuveraConfig{ + Enabled: hnsw.DefaultMultivectorMuveraEnabled, + KSim: hnsw.DefaultMultivectorKSim, + DProjections: hnsw.DefaultMultivectorDProjections, + Repetitions: hnsw.DefaultMultivectorRepetitions, + }, + }, + }, + FlatUC: flat.UserConfig{ + VectorCacheMaxObjects: common.DefaultVectorCacheMaxObjects, + Distance: common.DefaultDistanceMetric, + PQ: flat.CompressionUserConfig{ + Enabled: flat.DefaultCompressionEnabled, + RescoreLimit: flat.DefaultCompressionRescore, + Cache: flat.DefaultVectorCache, + }, + BQ: flat.CompressionUserConfig{ + Enabled: flat.DefaultCompressionEnabled, + RescoreLimit: flat.DefaultCompressionRescore, + Cache: flat.DefaultVectorCache, + }, + SQ: flat.CompressionUserConfig{ + Enabled: flat.DefaultCompressionEnabled, + RescoreLimit: flat.DefaultCompressionRescore, + Cache: flat.DefaultVectorCache, + }, + }, + }, + }, + { + name: "threshold is properly set", + input: map[string]interface{}{ + "threshold": float64(100), + }, + expected: UserConfig{ + Distance: common.DefaultDistanceMetric, + Threshold: 100, + HnswUC: hnsw.UserConfig{ + CleanupIntervalSeconds: hnsw.DefaultCleanupIntervalSeconds, + MaxConnections: hnsw.DefaultMaxConnections, + EFConstruction: hnsw.DefaultEFConstruction, + VectorCacheMaxObjects: common.DefaultVectorCacheMaxObjects, + EF: hnsw.DefaultEF, + Skip: hnsw.DefaultSkip, + FlatSearchCutoff: hnsw.DefaultFlatSearchCutoff, + DynamicEFMin: hnsw.DefaultDynamicEFMin, + DynamicEFMax: hnsw.DefaultDynamicEFMax, + DynamicEFFactor: hnsw.DefaultDynamicEFFactor, + Distance: common.DefaultDistanceMetric, + PQ: hnsw.PQConfig{ + Enabled: hnsw.DefaultPQEnabled, + Segments: hnsw.DefaultPQSegments, + Centroids: hnsw.DefaultPQCentroids, + TrainingLimit: hnsw.DefaultPQTrainingLimit, + Encoder: hnsw.PQEncoder{ + Type: hnsw.DefaultPQEncoderType, + Distribution: hnsw.DefaultPQEncoderDistribution, + }, + }, + SQ: hnsw.SQConfig{ + Enabled: hnsw.DefaultSQEnabled, + TrainingLimit: hnsw.DefaultSQTrainingLimit, + RescoreLimit: hnsw.DefaultSQRescoreLimit, + }, + RQ: hnsw.RQConfig{ + Enabled: hnsw.DefaultRQEnabled, + Bits: hnsw.DefaultRQBits, + RescoreLimit: hnsw.DefaultRQRescoreLimit, + }, + FilterStrategy: hnsw.DefaultFilterStrategy, + Multivector: hnsw.MultivectorConfig{ + Enabled: hnsw.DefaultMultivectorEnabled, + Aggregation: hnsw.DefaultMultivectorAggregation, + MuveraConfig: hnsw.MuveraConfig{ + Enabled: hnsw.DefaultMultivectorMuveraEnabled, + KSim: hnsw.DefaultMultivectorKSim, + DProjections: hnsw.DefaultMultivectorDProjections, + Repetitions: hnsw.DefaultMultivectorRepetitions, + }, + }, + }, + FlatUC: flat.UserConfig{ + VectorCacheMaxObjects: common.DefaultVectorCacheMaxObjects, + Distance: common.DefaultDistanceMetric, + PQ: flat.CompressionUserConfig{ + Enabled: flat.DefaultCompressionEnabled, + RescoreLimit: flat.DefaultCompressionRescore, + Cache: flat.DefaultVectorCache, + }, + BQ: flat.CompressionUserConfig{ + Enabled: flat.DefaultCompressionEnabled, + RescoreLimit: flat.DefaultCompressionRescore, + Cache: flat.DefaultVectorCache, + }, + SQ: flat.CompressionUserConfig{ + Enabled: flat.DefaultCompressionEnabled, + RescoreLimit: flat.DefaultCompressionRescore, + Cache: flat.DefaultVectorCache, + }, + }, + }, + }, + { + name: "hnsw is properly set", + input: map[string]interface{}{ + "hnsw": map[string]interface{}{ + "cleanupIntervalSeconds": float64(11), + "maxConnections": float64(12), + "efConstruction": float64(13), + "vectorCacheMaxObjects": float64(14), + "ef": float64(15), + "flatSearchCutoff": float64(16), + "dynamicEfMin": float64(17), + "dynamicEfMax": float64(18), + "dynamicEfFactor": float64(19), + "pq": map[string]interface{}{ + "enabled": true, + "bitCompression": false, + "segments": float64(64), + "centroids": float64(200), + "trainingLimit": float64(100), + "encoder": map[string]interface{}{ + "type": hnsw.PQEncoderTypeKMeans, + }, + }, + "filterStrategy": hnsw.FilterStrategyAcorn, + }, + }, + expected: UserConfig{ + Distance: common.DefaultDistanceMetric, + Threshold: DefaultThreshold, + HnswUC: hnsw.UserConfig{ + CleanupIntervalSeconds: 11, + MaxConnections: 12, + EFConstruction: 13, + VectorCacheMaxObjects: 14, + EF: 15, + FlatSearchCutoff: 16, + DynamicEFMin: 17, + DynamicEFMax: 18, + DynamicEFFactor: 19, + Distance: common.DefaultDistanceMetric, + PQ: hnsw.PQConfig{ + Enabled: true, + Segments: 64, + Centroids: 200, + TrainingLimit: 100, + Encoder: hnsw.PQEncoder{ + Type: hnsw.DefaultPQEncoderType, + Distribution: hnsw.DefaultPQEncoderDistribution, + }, + }, + SQ: hnsw.SQConfig{ + Enabled: hnsw.DefaultSQEnabled, + TrainingLimit: hnsw.DefaultSQTrainingLimit, + RescoreLimit: hnsw.DefaultSQRescoreLimit, + }, + RQ: hnsw.RQConfig{ + Enabled: hnsw.DefaultRQEnabled, + Bits: hnsw.DefaultRQBits, + RescoreLimit: hnsw.DefaultRQRescoreLimit, + }, + FilterStrategy: hnsw.FilterStrategyAcorn, + Multivector: hnsw.MultivectorConfig{ + Enabled: hnsw.DefaultMultivectorEnabled, + Aggregation: hnsw.DefaultMultivectorAggregation, + MuveraConfig: hnsw.MuveraConfig{ + Enabled: hnsw.DefaultMultivectorMuveraEnabled, + KSim: hnsw.DefaultMultivectorKSim, + DProjections: hnsw.DefaultMultivectorDProjections, + Repetitions: hnsw.DefaultMultivectorRepetitions, + }, + }, + }, + FlatUC: flat.UserConfig{ + VectorCacheMaxObjects: common.DefaultVectorCacheMaxObjects, + Distance: common.DefaultDistanceMetric, + PQ: flat.CompressionUserConfig{ + Enabled: flat.DefaultCompressionEnabled, + RescoreLimit: flat.DefaultCompressionRescore, + Cache: flat.DefaultVectorCache, + }, + BQ: flat.CompressionUserConfig{ + Enabled: flat.DefaultCompressionEnabled, + RescoreLimit: flat.DefaultCompressionRescore, + Cache: flat.DefaultVectorCache, + }, + SQ: flat.CompressionUserConfig{ + Enabled: flat.DefaultCompressionEnabled, + RescoreLimit: flat.DefaultCompressionRescore, + Cache: flat.DefaultVectorCache, + }, + }, + }, + }, + { + name: "dynamic index is set with multivector", + input: map[string]interface{}{ + "hnsw": map[string]interface{}{ + "multivector": map[string]interface{}{ + "enabled": true, + }, + }, + }, + expectErr: true, + expectErrMsg: "multi vector index is not supported for dynamic index", + }, + { + name: "flat is properly set", + input: map[string]interface{}{ + "flat": map[string]interface{}{ + "vectorCacheMaxObjects": float64(100), + "distance": "cosine", + "bq": map[string]interface{}{ + "enabled": true, + "rescoreLimit": float64(100), + "cache": true, + }, + }, + }, + expected: UserConfig{ + Distance: common.DefaultDistanceMetric, + Threshold: DefaultThreshold, + HnswUC: hnsw.UserConfig{ + CleanupIntervalSeconds: hnsw.DefaultCleanupIntervalSeconds, + MaxConnections: hnsw.DefaultMaxConnections, + EFConstruction: hnsw.DefaultEFConstruction, + VectorCacheMaxObjects: common.DefaultVectorCacheMaxObjects, + EF: hnsw.DefaultEF, + Skip: hnsw.DefaultSkip, + FlatSearchCutoff: hnsw.DefaultFlatSearchCutoff, + DynamicEFMin: hnsw.DefaultDynamicEFMin, + DynamicEFMax: hnsw.DefaultDynamicEFMax, + DynamicEFFactor: hnsw.DefaultDynamicEFFactor, + Distance: common.DefaultDistanceMetric, + PQ: hnsw.PQConfig{ + Enabled: hnsw.DefaultPQEnabled, + Segments: hnsw.DefaultPQSegments, + Centroids: hnsw.DefaultPQCentroids, + TrainingLimit: hnsw.DefaultPQTrainingLimit, + Encoder: hnsw.PQEncoder{ + Type: hnsw.DefaultPQEncoderType, + Distribution: hnsw.DefaultPQEncoderDistribution, + }, + }, + SQ: hnsw.SQConfig{ + Enabled: hnsw.DefaultSQEnabled, + TrainingLimit: hnsw.DefaultSQTrainingLimit, + RescoreLimit: hnsw.DefaultSQRescoreLimit, + }, + RQ: hnsw.RQConfig{ + Enabled: hnsw.DefaultRQEnabled, + Bits: hnsw.DefaultRQBits, + RescoreLimit: hnsw.DefaultRQRescoreLimit, + }, + FilterStrategy: hnsw.DefaultFilterStrategy, + Multivector: hnsw.MultivectorConfig{ + Enabled: hnsw.DefaultMultivectorEnabled, + Aggregation: hnsw.DefaultMultivectorAggregation, + MuveraConfig: hnsw.MuveraConfig{ + Enabled: hnsw.DefaultMultivectorMuveraEnabled, + KSim: hnsw.DefaultMultivectorKSim, + DProjections: hnsw.DefaultMultivectorDProjections, + Repetitions: hnsw.DefaultMultivectorRepetitions, + }, + }, + }, + FlatUC: flat.UserConfig{ + VectorCacheMaxObjects: 100, + Distance: common.DefaultDistanceMetric, + PQ: flat.CompressionUserConfig{ + Enabled: false, + RescoreLimit: flat.DefaultCompressionRescore, + Cache: flat.DefaultVectorCache, + }, + BQ: flat.CompressionUserConfig{ + Enabled: true, + RescoreLimit: 100, + Cache: true, + }, + SQ: flat.CompressionUserConfig{ + Enabled: flat.DefaultCompressionEnabled, + RescoreLimit: flat.DefaultCompressionRescore, + Cache: flat.DefaultVectorCache, + }, + }, + }, + }, + { + name: "pq enabled with flat returns error", + input: map[string]interface{}{ + "flat": map[string]interface{}{ + "vectorCacheMaxObjects": float64(100), + "distance": "cosine", + "pq": map[string]interface{}{ + "enabled": true, + "rescoreLimit": float64(100), + "cache": true, + }, + }, + }, + expectErr: true, + expectErrMsg: "PQ is not currently supported for flat indices", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + cfg, err := ParseAndValidateConfig(test.input, false) + if test.expectErr { + require.NotNil(t, err) + assert.Contains(t, err.Error(), test.expectErrMsg) + return + } else { + assert.Nil(t, err) + assert.Equal(t, test.expected, cfg) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/entities/vectorindex/flat/config.go b/platform/dbops/binaries/weaviate-src/entities/vectorindex/flat/config.go new file mode 100644 index 0000000000000000000000000000000000000000..d689c43875f6d09646bc045e3c8fc94ae463f113 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/vectorindex/flat/config.go @@ -0,0 +1,190 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package flat + +import ( + "errors" + "fmt" + + schemaConfig "github.com/weaviate/weaviate/entities/schema/config" + vectorindexcommon "github.com/weaviate/weaviate/entities/vectorindex/common" +) + +const ( + DefaultVectorCache = false + DefaultVectorCacheMaxObjects = 1e12 + DefaultCompressionEnabled = false + DefaultCompressionRescore = -1 // indicates "let Weaviate pick" +) + +type CompressionUserConfig struct { + Enabled bool `json:"enabled"` + RescoreLimit int `json:"rescoreLimit"` + Cache bool `json:"cache"` +} + +type UserConfig struct { + Distance string `json:"distance"` + VectorCacheMaxObjects int `json:"vectorCacheMaxObjects"` + PQ CompressionUserConfig `json:"pq"` + BQ CompressionUserConfig `json:"bq"` + SQ CompressionUserConfig `json:"sq"` +} + +// IndexType returns the type of the underlying vector index, thus making sure +// the schema.VectorIndexConfig interface is implemented +func (u UserConfig) IndexType() string { + return "flat" +} + +func (u UserConfig) DistanceName() string { + return u.Distance +} + +func (u UserConfig) IsMultiVector() bool { + return false +} + +// SetDefaults in the user-specifyable part of the config +func (u *UserConfig) SetDefaults() { + u.PQ.Cache = DefaultVectorCache + u.BQ.Cache = DefaultVectorCache + u.VectorCacheMaxObjects = DefaultVectorCacheMaxObjects + u.Distance = vectorindexcommon.DefaultDistanceMetric + u.PQ.Enabled = DefaultCompressionEnabled + u.PQ.RescoreLimit = DefaultCompressionRescore + u.BQ.Enabled = DefaultCompressionEnabled + u.BQ.RescoreLimit = DefaultCompressionRescore + u.SQ.Enabled = DefaultCompressionEnabled + u.SQ.RescoreLimit = DefaultCompressionRescore +} + +// ParseAndValidateConfig from an unknown input value, as this is not further +// specified in the API to allow of exchanging the index type +func ParseAndValidateConfig(input interface{}) (schemaConfig.VectorIndexConfig, error) { + uc := UserConfig{} + uc.SetDefaults() + + if input == nil { + return uc, nil + } + + asMap, ok := input.(map[string]interface{}) + if !ok || asMap == nil { + return uc, fmt.Errorf("input must be a non-nil map") + } + + if err := vectorindexcommon.OptionalStringFromMap(asMap, "distance", func(v string) { + uc.Distance = v + }); err != nil { + return uc, err + } + + if err := vectorindexcommon.OptionalIntFromMap(asMap, "vectorCacheMaxObjects", func(v int) { + uc.VectorCacheMaxObjects = v + }); err != nil { + return uc, err + } + + if err := parseCompression(asMap, &uc); err != nil { + return uc, err + } + + return uc, nil +} + +func parseCompressionMap(in interface{}, cuc *CompressionUserConfig) error { + configMap, ok := in.(map[string]interface{}) + if ok { + if err := vectorindexcommon.OptionalBoolFromMap(configMap, "enabled", func(v bool) { + cuc.Enabled = v + }); err != nil { + return err + } + + if err := vectorindexcommon.OptionalBoolFromMap(configMap, "cache", func(v bool) { + cuc.Cache = v + }); err != nil { + return err + } + + if err := vectorindexcommon.OptionalIntFromMap(configMap, "rescoreLimit", func(v int) { + cuc.RescoreLimit = v + }); err != nil { + return err + } + } + return nil +} + +func parseCompression(in map[string]interface{}, uc *UserConfig) error { + pqConfigValue, pqOk := in["pq"] + bqConfigValue, bqOk := in["bq"] + sqConfigValue, sqOk := in["sq"] + + if !pqOk && !bqOk && !sqOk { + return nil + } + + if pqOk { + err := parseCompressionMap(pqConfigValue, &uc.PQ) + if err != nil { + return err + } + } + + if bqOk { + err := parseCompressionMap(bqConfigValue, &uc.BQ) + if err != nil { + return err + } + } + + if sqOk { + err := parseCompressionMap(sqConfigValue, &uc.SQ) + if err != nil { + return err + } + } + + compressionConfigs := []CompressionUserConfig{uc.PQ, uc.BQ, uc.SQ} + totalEnabled := 0 + + for _, compressionConfig := range compressionConfigs { + if compressionConfig.Cache && !compressionConfig.Enabled { + return errors.New("not possible to use the cache without compression") + } + if compressionConfig.Enabled { + totalEnabled++ + } + } + + if totalEnabled > 1 { + return errors.New("cannot enable multiple quantization methods at the same time") + } + + // TODO: remove once PQ and SQ are supported + if uc.PQ.Enabled { + return errors.New("PQ is not currently supported for flat indices") + } + if uc.SQ.Enabled { + return errors.New("SQ is not currently supported for flat indices") + } + + return nil +} + +func NewDefaultUserConfig() UserConfig { + uc := UserConfig{} + uc.SetDefaults() + return uc +} diff --git a/platform/dbops/binaries/weaviate-src/entities/vectorindex/flat/config_test.go b/platform/dbops/binaries/weaviate-src/entities/vectorindex/flat/config_test.go new file mode 100644 index 0000000000000000000000000000000000000000..49842eb31ef54c16ea590f970b0905a88e579f99 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/vectorindex/flat/config_test.go @@ -0,0 +1,148 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package flat + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/vectorindex/common" +) + +func Test_FlatUserConfig(t *testing.T) { + type test struct { + name string + input interface{} + expected UserConfig + expectErr bool + expectErrMsg string + } + + tests := []test{ + { + name: "nothing specified, all defaults", + input: nil, + expected: UserConfig{ + VectorCacheMaxObjects: common.DefaultVectorCacheMaxObjects, + Distance: common.DefaultDistanceMetric, + PQ: CompressionUserConfig{ + Enabled: DefaultCompressionEnabled, + RescoreLimit: DefaultCompressionRescore, + Cache: DefaultVectorCache, + }, + BQ: CompressionUserConfig{ + Enabled: DefaultCompressionEnabled, + RescoreLimit: DefaultCompressionRescore, + Cache: DefaultVectorCache, + }, + SQ: CompressionUserConfig{ + Enabled: DefaultCompressionEnabled, + RescoreLimit: DefaultCompressionRescore, + Cache: DefaultVectorCache, + }, + }, + }, + { + name: "bq enabled", + input: map[string]interface{}{ + "vectorCacheMaxObjects": float64(100), + "distance": "cosine", + "bq": map[string]interface{}{ + "enabled": true, + "rescoreLimit": float64(100), + "cache": true, + }, + }, + expected: UserConfig{ + VectorCacheMaxObjects: 100, + Distance: common.DefaultDistanceMetric, + PQ: CompressionUserConfig{ + Enabled: false, + RescoreLimit: DefaultCompressionRescore, + Cache: DefaultVectorCache, + }, + BQ: CompressionUserConfig{ + Enabled: true, + RescoreLimit: 100, + Cache: true, + }, + SQ: CompressionUserConfig{ + Enabled: DefaultCompressionEnabled, + RescoreLimit: DefaultCompressionRescore, + Cache: DefaultVectorCache, + }, + }, + }, + { + name: "sq enabled", + input: map[string]interface{}{ + "vectorCacheMaxObjects": float64(100), + "distance": "cosine", + "sq": map[string]interface{}{ + "enabled": true, + "rescoreLimit": float64(20), + "cache": true, + }, + }, + expectErr: true, + expectErrMsg: "SQ is not currently supported for flat indices", + }, + { + name: "pq enabled", + input: map[string]interface{}{ + "vectorCacheMaxObjects": float64(100), + "distance": "cosine", + "pq": map[string]interface{}{ + "enabled": true, + "rescoreLimit": float64(100), + "cache": true, + }, + }, + expectErr: true, + expectErrMsg: "PQ is not currently supported for flat indices", + }, + { + name: "sq and bq enabled", + input: map[string]interface{}{ + "vectorCacheMaxObjects": float64(100), + "distance": "cosine", + "sq": map[string]interface{}{ + "enabled": true, + "rescoreLimit": float64(100), + "cache": true, + }, + "bq": map[string]interface{}{ + "enabled": true, + "rescoreLimit": float64(100), + "cache": true, + }, + }, + expectErr: true, + expectErrMsg: "cannot enable multiple quantization methods at the same time", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + cfg, err := ParseAndValidateConfig(test.input) + if test.expectErr { + require.NotNil(t, err) + assert.Contains(t, err.Error(), test.expectErrMsg) + return + } else { + assert.Nil(t, err) + assert.Equal(t, test.expected, cfg) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/bq_config.go b/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/bq_config.go new file mode 100644 index 0000000000000000000000000000000000000000..f5bab6310854861c3afa20a6a1adbf55a5d40089 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/bq_config.go @@ -0,0 +1,42 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package hnsw + +import "github.com/weaviate/weaviate/entities/vectorindex/common" + +const ( + DefaultBQEnabled = false +) + +type BQConfig struct { + Enabled bool `json:"enabled"` +} + +func parseBQMap(in map[string]interface{}, bq *BQConfig) error { + bqConfigValue, ok := in["bq"] + if !ok { + return nil + } + + bqConfigMap, ok := bqConfigValue.(map[string]interface{}) + if !ok { + return nil + } + + if err := common.OptionalBoolFromMap(bqConfigMap, "enabled", func(v bool) { + bq.Enabled = v + }); err != nil { + return err + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/config.go b/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/config.go new file mode 100644 index 0000000000000000000000000000000000000000..5a9b23a733aa00e823f26caa583d72d2c77183ca --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/config.go @@ -0,0 +1,331 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package hnsw + +import ( + "fmt" + "os" + "strings" + + "github.com/weaviate/weaviate/entities/schema/config" + vectorIndexCommon "github.com/weaviate/weaviate/entities/vectorindex/common" +) + +const ( + // Set these defaults if the user leaves them blank + DefaultCleanupIntervalSeconds = 5 * 60 + DefaultMaxConnections = 32 + DefaultEFConstruction = 128 + DefaultEF = -1 // indicates "let Weaviate pick" + DefaultDynamicEFMin = 100 + DefaultDynamicEFMax = 500 + DefaultDynamicEFFactor = 8 + DefaultSkip = false + DefaultFlatSearchCutoff = 40000 + + FilterStrategySweeping = "sweeping" + FilterStrategyAcorn = "acorn" + + DefaultFilterStrategy = FilterStrategySweeping + + // Fail validation if those criteria are not met + MinmumMaxConnections = 4 + MaximumMaxConnections = 2047 + MinmumEFConstruction = 4 +) + +// UserConfig bundles all values settable by a user in the per-class settings +type UserConfig struct { + Skip bool `json:"skip"` + CleanupIntervalSeconds int `json:"cleanupIntervalSeconds"` + MaxConnections int `json:"maxConnections"` + EFConstruction int `json:"efConstruction"` + EF int `json:"ef"` + DynamicEFMin int `json:"dynamicEfMin"` + DynamicEFMax int `json:"dynamicEfMax"` + DynamicEFFactor int `json:"dynamicEfFactor"` + VectorCacheMaxObjects int `json:"vectorCacheMaxObjects"` + FlatSearchCutoff int `json:"flatSearchCutoff"` + Distance string `json:"distance"` + PQ PQConfig `json:"pq"` + BQ BQConfig `json:"bq"` + SQ SQConfig `json:"sq"` + RQ RQConfig `json:"rq"` + FilterStrategy string `json:"filterStrategy"` + Multivector MultivectorConfig `json:"multivector"` + SkipDefaultQuantization bool `json:"skipDefaultQuantization"` + TrackDefaultQuantization bool `json:"trackDefaultQuantization"` +} + +// IndexType returns the type of the underlying vector index, thus making sure +// the schema.VectorIndexConfig interface is implemented +func (u UserConfig) IndexType() string { + return "hnsw" +} + +func (u UserConfig) DistanceName() string { + return u.Distance +} + +func (u UserConfig) IsMultiVector() bool { + return u.Multivector.Enabled +} + +// SetDefaults in the user-specifyable part of the config +func (u *UserConfig) SetDefaults() { + u.MaxConnections = DefaultMaxConnections + u.EFConstruction = DefaultEFConstruction + u.CleanupIntervalSeconds = DefaultCleanupIntervalSeconds + u.VectorCacheMaxObjects = vectorIndexCommon.DefaultVectorCacheMaxObjects + u.EF = DefaultEF + u.DynamicEFFactor = DefaultDynamicEFFactor + u.DynamicEFMax = DefaultDynamicEFMax + u.DynamicEFMin = DefaultDynamicEFMin + u.Skip = DefaultSkip + u.FlatSearchCutoff = DefaultFlatSearchCutoff + u.Distance = vectorIndexCommon.DefaultDistanceMetric + u.PQ = PQConfig{ + Enabled: DefaultPQEnabled, + BitCompression: DefaultPQBitCompression, + Segments: DefaultPQSegments, + Centroids: DefaultPQCentroids, + TrainingLimit: DefaultPQTrainingLimit, + Encoder: PQEncoder{ + Type: DefaultPQEncoderType, + Distribution: DefaultPQEncoderDistribution, + }, + } + u.BQ = BQConfig{ + Enabled: DefaultBQEnabled, + } + u.SQ = SQConfig{ + Enabled: DefaultSQEnabled, + TrainingLimit: DefaultSQTrainingLimit, + RescoreLimit: DefaultSQRescoreLimit, + } + u.RQ = RQConfig{ + Enabled: DefaultRQEnabled, + Bits: DefaultRQBits, + RescoreLimit: DefaultRQRescoreLimit, + } + if strategy := os.Getenv("HNSW_DEFAULT_FILTER_STRATEGY"); strategy == FilterStrategyAcorn { + u.FilterStrategy = FilterStrategyAcorn + } else { + u.FilterStrategy = FilterStrategySweeping + } + u.Multivector = MultivectorConfig{ + Aggregation: DefaultMultivectorAggregation, + Enabled: DefaultMultivectorEnabled, + MuveraConfig: MuveraConfig{ + Enabled: DefaultMultivectorMuveraEnabled, + KSim: DefaultMultivectorKSim, + DProjections: DefaultMultivectorDProjections, + Repetitions: DefaultMultivectorRepetitions, + }, + } +} + +// ParseAndValidateConfig from an unknown input value, as this is not further +// specified in the API to allow of exchanging the index type +func ParseAndValidateConfig(input interface{}, isMultiVector bool) (config.VectorIndexConfig, error) { + uc := UserConfig{} + uc.SetDefaults() + + if input == nil { + return uc, nil + } + + asMap, ok := input.(map[string]interface{}) + if !ok || asMap == nil { + return uc, fmt.Errorf("input must be a non-nil map") + } + + if err := vectorIndexCommon.OptionalIntFromMap(asMap, "maxConnections", func(v int) { + uc.MaxConnections = v + }); err != nil { + return uc, err + } + + if err := vectorIndexCommon.OptionalIntFromMap(asMap, "cleanupIntervalSeconds", func(v int) { + uc.CleanupIntervalSeconds = v + }); err != nil { + return uc, err + } + + if err := vectorIndexCommon.OptionalIntFromMap(asMap, "efConstruction", func(v int) { + uc.EFConstruction = v + }); err != nil { + return uc, err + } + + if err := vectorIndexCommon.OptionalIntFromMap(asMap, "ef", func(v int) { + uc.EF = v + }); err != nil { + return uc, err + } + + if err := vectorIndexCommon.OptionalIntFromMap(asMap, "dynamicEfFactor", func(v int) { + uc.DynamicEFFactor = v + }); err != nil { + return uc, err + } + + if err := vectorIndexCommon.OptionalIntFromMap(asMap, "dynamicEfMax", func(v int) { + uc.DynamicEFMax = v + }); err != nil { + return uc, err + } + + if err := vectorIndexCommon.OptionalIntFromMap(asMap, "dynamicEfMin", func(v int) { + uc.DynamicEFMin = v + }); err != nil { + return uc, err + } + + if err := vectorIndexCommon.OptionalIntFromMap(asMap, "vectorCacheMaxObjects", func(v int) { + uc.VectorCacheMaxObjects = v + }); err != nil { + return uc, err + } + + if err := vectorIndexCommon.OptionalIntFromMap(asMap, "flatSearchCutoff", func(v int) { + uc.FlatSearchCutoff = v + }); err != nil { + return uc, err + } + + if err := vectorIndexCommon.OptionalBoolFromMap(asMap, "skip", func(v bool) { + uc.Skip = v + }); err != nil { + return uc, err + } + + if err := vectorIndexCommon.OptionalStringFromMap(asMap, "distance", func(v string) { + uc.Distance = v + }); err != nil { + return uc, err + } + + if err := parsePQMap(asMap, &uc.PQ); err != nil { + return uc, err + } + + if err := parseBQMap(asMap, &uc.BQ); err != nil { + return uc, err + } + + if err := parseSQMap(asMap, &uc.SQ); err != nil { + return uc, err + } + + if err := parseRQMap(asMap, &uc.RQ); err != nil { + return uc, err + } + + if err := vectorIndexCommon.OptionalStringFromMap(asMap, "filterStrategy", func(v string) { + uc.FilterStrategy = v + }); err != nil { + return uc, err + } + + if err := parseMultivectorMap(asMap, &uc.Multivector, isMultiVector); err != nil { + return uc, err + } + + if err := vectorIndexCommon.OptionalBoolFromMap(asMap, "skipDefaultQuantization", func(v bool) { + uc.SkipDefaultQuantization = v + }); err != nil { + return uc, err + } + + if err := vectorIndexCommon.OptionalBoolFromMap(asMap, "trackDefaultQuantization", func(v bool) { + uc.TrackDefaultQuantization = v + }); err != nil { + return uc, err + } + + return uc, uc.validate() +} + +func (u *UserConfig) validate() error { + var errMsgs []string + if u.MaxConnections < MinmumMaxConnections { + errMsgs = append(errMsgs, fmt.Sprintf( + "maxConnections must be a positive integer with a minimum of %d", + MinmumMaxConnections, + )) + } + + if u.MaxConnections > MaximumMaxConnections { + errMsgs = append(errMsgs, fmt.Sprintf( + "maxConnections must be less than %d", + MaximumMaxConnections+1, + )) + } + + if u.EFConstruction < MinmumEFConstruction { + errMsgs = append(errMsgs, fmt.Sprintf( + "efConstruction must be a positive integer with a minimum of %d", + MinmumMaxConnections, + )) + } + + if u.FilterStrategy != FilterStrategySweeping && u.FilterStrategy != FilterStrategyAcorn { + errMsgs = append(errMsgs, "filterStrategy must be either 'sweeping' or 'acorn'") + } + + if len(errMsgs) > 0 { + return fmt.Errorf("invalid hnsw config: %s", + strings.Join(errMsgs, ", ")) + } + + enabled := 0 + if u.PQ.Enabled { + enabled++ + } + if u.BQ.Enabled { + enabled++ + } + if u.SQ.Enabled { + enabled++ + } + if u.RQ.Enabled { + enabled++ + } + if enabled > 1 { + return fmt.Errorf("invalid hnsw config: more than a single compression methods enabled") + } + + err := ValidateRQConfig(u.RQ) + if err != nil { + return err + } + + if u.Multivector.MuveraConfig.Enabled && u.Multivector.MuveraConfig.KSim > 10 { + return fmt.Errorf("invalid hnsw config: ksim must be less than 10") + } + + return nil +} + +func NewDefaultUserConfig() UserConfig { + uc := UserConfig{} + uc.SetDefaults() + return uc +} + +func NewDefaultMultiVectorUserConfig() UserConfig { + uc := UserConfig{} + uc.SetDefaults() + uc.Multivector = MultivectorConfig{Enabled: true} + return uc +} diff --git a/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/config_test.go b/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/config_test.go new file mode 100644 index 0000000000000000000000000000000000000000..41a704f854b7d4c0dfda113c7ff4cfe296928817 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/config_test.go @@ -0,0 +1,1148 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package hnsw + +import ( + "encoding/json" + "math" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/vectorindex/common" +) + +func Test_UserConfig(t *testing.T) { + type test struct { + name string + input interface{} + expected UserConfig + expectErr bool + expectErrMsg string + } + + tests := []test{ + { + name: "nothing specified, all defaults", + input: nil, + expected: UserConfig{ + CleanupIntervalSeconds: DefaultCleanupIntervalSeconds, + MaxConnections: DefaultMaxConnections, + EFConstruction: DefaultEFConstruction, + VectorCacheMaxObjects: common.DefaultVectorCacheMaxObjects, + EF: DefaultEF, + Skip: DefaultSkip, + FlatSearchCutoff: DefaultFlatSearchCutoff, + DynamicEFMin: DefaultDynamicEFMin, + DynamicEFMax: DefaultDynamicEFMax, + DynamicEFFactor: DefaultDynamicEFFactor, + Distance: common.DefaultDistanceMetric, + PQ: PQConfig{ + Enabled: DefaultPQEnabled, + BitCompression: DefaultPQBitCompression, + Segments: DefaultPQSegments, + Centroids: DefaultPQCentroids, + TrainingLimit: DefaultPQTrainingLimit, + Encoder: PQEncoder{ + Type: DefaultPQEncoderType, + Distribution: DefaultPQEncoderDistribution, + }, + }, + SQ: SQConfig{ + Enabled: DefaultSQEnabled, + TrainingLimit: DefaultSQTrainingLimit, + RescoreLimit: DefaultSQRescoreLimit, + }, + RQ: RQConfig{ + Enabled: DefaultRQEnabled, + Bits: DefaultRQBits, + RescoreLimit: DefaultRQRescoreLimit, + }, + FilterStrategy: DefaultFilterStrategy, + Multivector: MultivectorConfig{ + Enabled: DefaultMultivectorEnabled, + Aggregation: DefaultMultivectorAggregation, + MuveraConfig: MuveraConfig{ + Enabled: DefaultMultivectorMuveraEnabled, + KSim: DefaultMultivectorKSim, + DProjections: DefaultMultivectorDProjections, + Repetitions: DefaultMultivectorRepetitions, + }, + }, + }, + }, + + { + name: "with maximum connections", + input: map[string]interface{}{ + "maxConnections": json.Number("100"), + }, + expected: UserConfig{ + CleanupIntervalSeconds: DefaultCleanupIntervalSeconds, + MaxConnections: 100, + EFConstruction: DefaultEFConstruction, + VectorCacheMaxObjects: common.DefaultVectorCacheMaxObjects, + EF: DefaultEF, + FlatSearchCutoff: DefaultFlatSearchCutoff, + DynamicEFMin: DefaultDynamicEFMin, + DynamicEFMax: DefaultDynamicEFMax, + DynamicEFFactor: DefaultDynamicEFFactor, + Distance: common.DefaultDistanceMetric, + PQ: PQConfig{ + Enabled: DefaultPQEnabled, + BitCompression: DefaultPQBitCompression, + Segments: DefaultPQSegments, + Centroids: DefaultPQCentroids, + TrainingLimit: DefaultPQTrainingLimit, + Encoder: PQEncoder{ + Type: DefaultPQEncoderType, + Distribution: DefaultPQEncoderDistribution, + }, + }, + SQ: SQConfig{ + Enabled: DefaultSQEnabled, + TrainingLimit: DefaultSQTrainingLimit, + RescoreLimit: DefaultSQRescoreLimit, + }, + RQ: RQConfig{ + Enabled: DefaultRQEnabled, + Bits: DefaultRQBits, + RescoreLimit: DefaultRQRescoreLimit, + }, + FilterStrategy: DefaultFilterStrategy, + Multivector: MultivectorConfig{ + Enabled: DefaultMultivectorEnabled, + Aggregation: DefaultMultivectorAggregation, + MuveraConfig: MuveraConfig{ + Enabled: DefaultMultivectorMuveraEnabled, + KSim: DefaultMultivectorKSim, + DProjections: DefaultMultivectorDProjections, + Repetitions: DefaultMultivectorRepetitions, + }, + }, + }, + }, + + { + name: "with all optional fields", + input: map[string]interface{}{ + "cleanupIntervalSeconds": json.Number("11"), + "maxConnections": json.Number("12"), + "efConstruction": json.Number("13"), + "vectorCacheMaxObjects": json.Number("14"), + "ef": json.Number("15"), + "flatSearchCutoff": json.Number("16"), + "dynamicEfMin": json.Number("17"), + "dynamicEfMax": json.Number("18"), + "dynamicEfFactor": json.Number("19"), + "skip": true, + "distance": "l2-squared", + }, + expected: UserConfig{ + CleanupIntervalSeconds: 11, + MaxConnections: 12, + EFConstruction: 13, + VectorCacheMaxObjects: 14, + EF: 15, + FlatSearchCutoff: 16, + DynamicEFMin: 17, + DynamicEFMax: 18, + DynamicEFFactor: 19, + Skip: true, + Distance: "l2-squared", + PQ: PQConfig{ + Enabled: DefaultPQEnabled, + BitCompression: DefaultPQBitCompression, + Segments: DefaultPQSegments, + Centroids: DefaultPQCentroids, + TrainingLimit: DefaultPQTrainingLimit, + Encoder: PQEncoder{ + Type: DefaultPQEncoderType, + Distribution: DefaultPQEncoderDistribution, + }, + }, + SQ: SQConfig{ + Enabled: DefaultSQEnabled, + TrainingLimit: DefaultSQTrainingLimit, + RescoreLimit: DefaultSQRescoreLimit, + }, + RQ: RQConfig{ + Enabled: DefaultRQEnabled, + Bits: DefaultRQBits, + RescoreLimit: DefaultRQRescoreLimit, + }, + FilterStrategy: DefaultFilterStrategy, + Multivector: MultivectorConfig{ + Enabled: DefaultMultivectorEnabled, + Aggregation: DefaultMultivectorAggregation, + MuveraConfig: MuveraConfig{ + Enabled: DefaultMultivectorMuveraEnabled, + KSim: DefaultMultivectorKSim, + DProjections: DefaultMultivectorDProjections, + Repetitions: DefaultMultivectorRepetitions, + }, + }, + }, + }, + + { + name: "with all optional fields", + input: map[string]interface{}{ + "cleanupIntervalSeconds": json.Number("11"), + "maxConnections": json.Number("12"), + "efConstruction": json.Number("13"), + "vectorCacheMaxObjects": json.Number("14"), + "ef": json.Number("15"), + "flatSearchCutoff": json.Number("16"), + "dynamicEfMin": json.Number("17"), + "dynamicEfMax": json.Number("18"), + "dynamicEfFactor": json.Number("19"), + "skip": true, + "distance": "manhattan", + }, + expected: UserConfig{ + CleanupIntervalSeconds: 11, + MaxConnections: 12, + EFConstruction: 13, + VectorCacheMaxObjects: 14, + EF: 15, + FlatSearchCutoff: 16, + DynamicEFMin: 17, + DynamicEFMax: 18, + DynamicEFFactor: 19, + Skip: true, + Distance: "manhattan", + PQ: PQConfig{ + Enabled: DefaultPQEnabled, + BitCompression: DefaultPQBitCompression, + Segments: DefaultPQSegments, + Centroids: DefaultPQCentroids, + TrainingLimit: DefaultPQTrainingLimit, + Encoder: PQEncoder{ + Type: DefaultPQEncoderType, + Distribution: DefaultPQEncoderDistribution, + }, + }, + SQ: SQConfig{ + Enabled: DefaultSQEnabled, + TrainingLimit: DefaultSQTrainingLimit, + RescoreLimit: DefaultSQRescoreLimit, + }, + RQ: RQConfig{ + Enabled: DefaultRQEnabled, + Bits: DefaultRQBits, + RescoreLimit: DefaultRQRescoreLimit, + }, + FilterStrategy: DefaultFilterStrategy, + Multivector: MultivectorConfig{ + Enabled: DefaultMultivectorEnabled, + Aggregation: DefaultMultivectorAggregation, + MuveraConfig: MuveraConfig{ + Enabled: DefaultMultivectorMuveraEnabled, + KSim: DefaultMultivectorKSim, + DProjections: DefaultMultivectorDProjections, + Repetitions: DefaultMultivectorRepetitions, + }, + }, + }, + }, + + { + name: "with all optional fields", + input: map[string]interface{}{ + "cleanupIntervalSeconds": json.Number("11"), + "maxConnections": json.Number("12"), + "efConstruction": json.Number("13"), + "vectorCacheMaxObjects": json.Number("14"), + "ef": json.Number("15"), + "flatSearchCutoff": json.Number("16"), + "dynamicEfMin": json.Number("17"), + "dynamicEfMax": json.Number("18"), + "dynamicEfFactor": json.Number("19"), + "skip": true, + "distance": "hamming", + "filterStrategy": "sweeping", + }, + expected: UserConfig{ + CleanupIntervalSeconds: 11, + MaxConnections: 12, + EFConstruction: 13, + VectorCacheMaxObjects: 14, + EF: 15, + FlatSearchCutoff: 16, + DynamicEFMin: 17, + DynamicEFMax: 18, + DynamicEFFactor: 19, + Skip: true, + Distance: "hamming", + PQ: PQConfig{ + Enabled: DefaultPQEnabled, + BitCompression: DefaultPQBitCompression, + Segments: DefaultPQSegments, + Centroids: DefaultPQCentroids, + TrainingLimit: DefaultPQTrainingLimit, + Encoder: PQEncoder{ + Type: DefaultPQEncoderType, + Distribution: DefaultPQEncoderDistribution, + }, + }, + SQ: SQConfig{ + Enabled: DefaultSQEnabled, + TrainingLimit: DefaultSQTrainingLimit, + RescoreLimit: DefaultSQRescoreLimit, + }, + RQ: RQConfig{ + Enabled: DefaultRQEnabled, + Bits: DefaultRQBits, + RescoreLimit: DefaultRQRescoreLimit, + }, + FilterStrategy: DefaultFilterStrategy, + Multivector: MultivectorConfig{ + Enabled: DefaultMultivectorEnabled, + Aggregation: DefaultMultivectorAggregation, + MuveraConfig: MuveraConfig{ + Enabled: DefaultMultivectorMuveraEnabled, + KSim: DefaultMultivectorKSim, + DProjections: DefaultMultivectorDProjections, + Repetitions: DefaultMultivectorRepetitions, + }, + }, + }, + }, + + { + // opposed to from the API + name: "with raw data as floats", + input: map[string]interface{}{ + "cleanupIntervalSeconds": float64(11), + "maxConnections": float64(12), + "efConstruction": float64(13), + "vectorCacheMaxObjects": float64(14), + "ef": float64(15), + "flatSearchCutoff": float64(16), + "dynamicEfMin": float64(17), + "dynamicEfMax": float64(18), + "dynamicEfFactor": float64(19), + }, + expected: UserConfig{ + CleanupIntervalSeconds: 11, + MaxConnections: 12, + EFConstruction: 13, + VectorCacheMaxObjects: 14, + EF: 15, + FlatSearchCutoff: 16, + DynamicEFMin: 17, + DynamicEFMax: 18, + DynamicEFFactor: 19, + Distance: common.DefaultDistanceMetric, + PQ: PQConfig{ + Enabled: DefaultPQEnabled, + BitCompression: DefaultPQBitCompression, + Segments: DefaultPQSegments, + Centroids: DefaultPQCentroids, + TrainingLimit: DefaultPQTrainingLimit, + Encoder: PQEncoder{ + Type: DefaultPQEncoderType, + Distribution: DefaultPQEncoderDistribution, + }, + }, + SQ: SQConfig{ + Enabled: DefaultSQEnabled, + TrainingLimit: DefaultSQTrainingLimit, + RescoreLimit: DefaultSQRescoreLimit, + }, + RQ: RQConfig{ + Enabled: DefaultRQEnabled, + Bits: DefaultRQBits, + RescoreLimit: DefaultRQRescoreLimit, + }, + FilterStrategy: DefaultFilterStrategy, + Multivector: MultivectorConfig{ + Enabled: DefaultMultivectorEnabled, + Aggregation: DefaultMultivectorAggregation, + MuveraConfig: MuveraConfig{ + Enabled: DefaultMultivectorMuveraEnabled, + KSim: DefaultMultivectorKSim, + DProjections: DefaultMultivectorDProjections, + Repetitions: DefaultMultivectorRepetitions, + }, + }, + }, + }, + + { + name: "with pq tile normal encoder", + input: map[string]interface{}{ + "cleanupIntervalSeconds": float64(11), + "maxConnections": float64(12), + "efConstruction": float64(13), + "vectorCacheMaxObjects": float64(14), + "ef": float64(15), + "flatSearchCutoff": float64(16), + "dynamicEfMin": float64(17), + "dynamicEfMax": float64(18), + "dynamicEfFactor": float64(19), + "pq": map[string]interface{}{ + "enabled": true, + "bitCompression": false, + "segments": float64(64), + "centroids": float64(DefaultPQCentroids), + "trainingLimit": float64(DefaultPQTrainingLimit), + "encoder": map[string]interface{}{ + "type": "tile", + "distribution": "normal", + }, + }, + }, + expected: UserConfig{ + CleanupIntervalSeconds: 11, + MaxConnections: 12, + EFConstruction: 13, + VectorCacheMaxObjects: 14, + EF: 15, + FlatSearchCutoff: 16, + DynamicEFMin: 17, + DynamicEFMax: 18, + DynamicEFFactor: 19, + Distance: common.DefaultDistanceMetric, + PQ: PQConfig{ + Enabled: true, + Segments: 64, + Centroids: DefaultPQCentroids, + TrainingLimit: DefaultPQTrainingLimit, + Encoder: PQEncoder{ + Type: "tile", + Distribution: "normal", + }, + }, + SQ: SQConfig{ + Enabled: DefaultSQEnabled, + TrainingLimit: DefaultSQTrainingLimit, + RescoreLimit: DefaultSQRescoreLimit, + }, + RQ: RQConfig{ + Enabled: DefaultRQEnabled, + Bits: DefaultRQBits, + RescoreLimit: DefaultRQRescoreLimit, + }, + FilterStrategy: DefaultFilterStrategy, + Multivector: MultivectorConfig{ + Enabled: DefaultMultivectorEnabled, + Aggregation: DefaultMultivectorAggregation, + MuveraConfig: MuveraConfig{ + Enabled: DefaultMultivectorMuveraEnabled, + KSim: DefaultMultivectorKSim, + DProjections: DefaultMultivectorDProjections, + Repetitions: DefaultMultivectorRepetitions, + }, + }, + }, + }, + + { + name: "with pq kmeans normal encoder", + input: map[string]interface{}{ + "cleanupIntervalSeconds": float64(11), + "maxConnections": float64(12), + "efConstruction": float64(13), + "vectorCacheMaxObjects": float64(14), + "ef": float64(15), + "flatSearchCutoff": float64(16), + "dynamicEfMin": float64(17), + "dynamicEfMax": float64(18), + "dynamicEfFactor": float64(19), + "pq": map[string]interface{}{ + "enabled": true, + "bitCompression": false, + "segments": float64(64), + "centroids": float64(DefaultPQCentroids), + "trainingLimit": float64(DefaultPQTrainingLimit), + "encoder": map[string]interface{}{ + "type": PQEncoderTypeKMeans, + }, + }, + }, + expected: UserConfig{ + CleanupIntervalSeconds: 11, + MaxConnections: 12, + EFConstruction: 13, + VectorCacheMaxObjects: 14, + EF: 15, + FlatSearchCutoff: 16, + DynamicEFMin: 17, + DynamicEFMax: 18, + DynamicEFFactor: 19, + Distance: common.DefaultDistanceMetric, + PQ: PQConfig{ + Enabled: true, + Segments: 64, + Centroids: DefaultPQCentroids, + TrainingLimit: DefaultPQTrainingLimit, + Encoder: PQEncoder{ + Type: DefaultPQEncoderType, + Distribution: DefaultPQEncoderDistribution, + }, + }, + SQ: SQConfig{ + Enabled: DefaultSQEnabled, + TrainingLimit: DefaultSQTrainingLimit, + RescoreLimit: DefaultSQRescoreLimit, + }, + RQ: RQConfig{ + Enabled: DefaultRQEnabled, + Bits: DefaultRQBits, + RescoreLimit: DefaultRQRescoreLimit, + }, + FilterStrategy: DefaultFilterStrategy, + Multivector: MultivectorConfig{ + Enabled: DefaultMultivectorEnabled, + Aggregation: DefaultMultivectorAggregation, + MuveraConfig: MuveraConfig{ + Enabled: DefaultMultivectorMuveraEnabled, + KSim: DefaultMultivectorKSim, + DProjections: DefaultMultivectorDProjections, + Repetitions: DefaultMultivectorRepetitions, + }, + }, + }, + }, + + { + name: "with invalid encoder", + input: map[string]interface{}{ + "pq": map[string]interface{}{ + "enabled": true, + "encoder": map[string]interface{}{ + "type": "bernoulli", + }, + }, + }, + expectErr: true, + expectErrMsg: "invalid encoder type bernoulli", + }, + + { + name: "with invalid distribution", + input: map[string]interface{}{ + "pq": map[string]interface{}{ + "enabled": true, + "encoder": map[string]interface{}{ + "distribution": "lognormal", + }, + }, + }, + expectErr: true, + expectErrMsg: "invalid encoder distribution lognormal", + }, + + { + // opposed to from the API + name: "with rounded vectorCacheMaxObjects that would otherwise overflow", + input: map[string]interface{}{ + "cleanupIntervalSeconds": json.Number("11"), + "maxConnections": json.Number("12"), + "efConstruction": json.Number("13"), + "vectorCacheMaxObjects": json.Number("9223372036854776000"), + "ef": json.Number("15"), + "flatSearchCutoff": json.Number("16"), + "dynamicEfMin": json.Number("17"), + "dynamicEfMax": json.Number("18"), + "dynamicEfFactor": json.Number("19"), + }, + expected: UserConfig{ + CleanupIntervalSeconds: 11, + MaxConnections: 12, + EFConstruction: 13, + VectorCacheMaxObjects: math.MaxInt64, + EF: 15, + FlatSearchCutoff: 16, + DynamicEFMin: 17, + DynamicEFMax: 18, + DynamicEFFactor: 19, + Distance: common.DefaultDistanceMetric, + PQ: PQConfig{ + Enabled: DefaultPQEnabled, + BitCompression: DefaultPQBitCompression, + Segments: DefaultPQSegments, + Centroids: DefaultPQCentroids, + TrainingLimit: DefaultPQTrainingLimit, + Encoder: PQEncoder{ + Type: DefaultPQEncoderType, + Distribution: DefaultPQEncoderDistribution, + }, + }, + SQ: SQConfig{ + Enabled: DefaultSQEnabled, + TrainingLimit: DefaultSQTrainingLimit, + RescoreLimit: DefaultSQRescoreLimit, + }, + RQ: RQConfig{ + Enabled: DefaultRQEnabled, + Bits: DefaultRQBits, + RescoreLimit: DefaultRQRescoreLimit, + }, + FilterStrategy: DefaultFilterStrategy, + Multivector: MultivectorConfig{ + Enabled: DefaultMultivectorEnabled, + Aggregation: DefaultMultivectorAggregation, + MuveraConfig: MuveraConfig{ + Enabled: DefaultMultivectorMuveraEnabled, + KSim: DefaultMultivectorKSim, + DProjections: DefaultMultivectorDProjections, + Repetitions: DefaultMultivectorRepetitions, + }, + }, + }, + }, + { + name: "invalid max connections (json)", + input: map[string]interface{}{ + "maxConnections": json.Number("0"), + }, + expectErr: true, + expectErrMsg: "maxConnections must be a positive integer " + + "with a minimum of 4", + }, + { + name: "invalid max connections (float)", + input: map[string]interface{}{ + "maxConnections": float64(3), + }, + expectErr: true, + expectErrMsg: "maxConnections must be a positive integer " + + "with a minimum of 4", + }, + { + name: "invalid efConstruction (json)", + input: map[string]interface{}{ + "efConstruction": json.Number("0"), + }, + expectErr: true, + expectErrMsg: "efConstruction must be a positive integer " + + "with a minimum of 4", + }, + { + name: "invalid efConstruction (float)", + input: map[string]interface{}{ + "efConstruction": float64(3), + }, + expectErr: true, + expectErrMsg: "efConstruction must be a positive integer " + + "with a minimum of 4", + }, + { + name: "with bq", + input: map[string]interface{}{ + "cleanupIntervalSeconds": float64(11), + "maxConnections": float64(12), + "efConstruction": float64(13), + "vectorCacheMaxObjects": float64(14), + "ef": float64(15), + "flatSearchCutoff": float64(16), + "dynamicEfMin": float64(17), + "dynamicEfMax": float64(18), + "dynamicEfFactor": float64(19), + "bq": map[string]interface{}{ + "enabled": true, + }, + }, + expected: UserConfig{ + CleanupIntervalSeconds: 11, + MaxConnections: 12, + EFConstruction: 13, + VectorCacheMaxObjects: 14, + EF: 15, + FlatSearchCutoff: 16, + DynamicEFMin: 17, + DynamicEFMax: 18, + DynamicEFFactor: 19, + Distance: common.DefaultDistanceMetric, + PQ: PQConfig{ + Enabled: false, + Segments: 0, + Centroids: DefaultPQCentroids, + TrainingLimit: DefaultPQTrainingLimit, + Encoder: PQEncoder{ + Type: DefaultPQEncoderType, + Distribution: DefaultPQEncoderDistribution, + }, + }, + BQ: BQConfig{ + Enabled: true, + }, + SQ: SQConfig{ + Enabled: DefaultSQEnabled, + TrainingLimit: DefaultSQTrainingLimit, + RescoreLimit: DefaultSQRescoreLimit, + }, + RQ: RQConfig{ + Enabled: DefaultRQEnabled, + Bits: DefaultRQBits, + RescoreLimit: DefaultRQRescoreLimit, + }, + FilterStrategy: DefaultFilterStrategy, + Multivector: MultivectorConfig{ + Enabled: DefaultMultivectorEnabled, + Aggregation: DefaultMultivectorAggregation, + MuveraConfig: MuveraConfig{ + Enabled: DefaultMultivectorMuveraEnabled, + KSim: DefaultMultivectorKSim, + DProjections: DefaultMultivectorDProjections, + Repetitions: DefaultMultivectorRepetitions, + }, + }, + }, + }, + { + name: "with sq", + input: map[string]interface{}{ + "cleanupIntervalSeconds": float64(11), + "maxConnections": float64(12), + "efConstruction": float64(13), + "vectorCacheMaxObjects": float64(14), + "ef": float64(15), + "flatSearchCutoff": float64(16), + "dynamicEfMin": float64(17), + "dynamicEfMax": float64(18), + "dynamicEfFactor": float64(19), + "sq": map[string]interface{}{ + "enabled": true, + }, + }, + expected: UserConfig{ + CleanupIntervalSeconds: 11, + MaxConnections: 12, + EFConstruction: 13, + VectorCacheMaxObjects: 14, + EF: 15, + FlatSearchCutoff: 16, + DynamicEFMin: 17, + DynamicEFMax: 18, + DynamicEFFactor: 19, + Distance: common.DefaultDistanceMetric, + PQ: PQConfig{ + Enabled: false, + Segments: 0, + Centroids: DefaultPQCentroids, + TrainingLimit: DefaultPQTrainingLimit, + Encoder: PQEncoder{ + Type: DefaultPQEncoderType, + Distribution: DefaultPQEncoderDistribution, + }, + }, + SQ: SQConfig{ + Enabled: true, + TrainingLimit: DefaultSQTrainingLimit, + RescoreLimit: DefaultSQRescoreLimit, + }, + RQ: RQConfig{ + Enabled: DefaultRQEnabled, + Bits: DefaultRQBits, + RescoreLimit: DefaultRQRescoreLimit, + }, + FilterStrategy: DefaultFilterStrategy, + Multivector: MultivectorConfig{ + Enabled: DefaultMultivectorEnabled, + Aggregation: DefaultMultivectorAggregation, + MuveraConfig: MuveraConfig{ + Enabled: DefaultMultivectorMuveraEnabled, + KSim: DefaultMultivectorKSim, + DProjections: DefaultMultivectorDProjections, + Repetitions: DefaultMultivectorRepetitions, + }, + }, + }, + }, + { + name: "with invalid compression", + input: map[string]interface{}{ + "pq": map[string]interface{}{ + "enabled": true, + "encoder": map[string]interface{}{ + "type": "kmeans", + }, + }, + "bq": map[string]interface{}{ + "enabled": true, + }, + }, + expectErr: true, + expectErrMsg: "invalid hnsw config: more than a single compression methods enabled", + }, + { + name: "with invalid filter strategy", + input: map[string]interface{}{ + "filterStrategy": "chestnut", + }, + expectErr: true, + expectErrMsg: "invalid hnsw config: filterStrategy must be either 'sweeping' or 'acorn'", + }, + { + name: "acorn enabled, all defaults", + input: map[string]interface{}{ + "filterStrategy": "acorn", + }, + expected: UserConfig{ + CleanupIntervalSeconds: DefaultCleanupIntervalSeconds, + MaxConnections: DefaultMaxConnections, + EFConstruction: DefaultEFConstruction, + VectorCacheMaxObjects: common.DefaultVectorCacheMaxObjects, + EF: DefaultEF, + Skip: DefaultSkip, + FlatSearchCutoff: DefaultFlatSearchCutoff, + DynamicEFMin: DefaultDynamicEFMin, + DynamicEFMax: DefaultDynamicEFMax, + DynamicEFFactor: DefaultDynamicEFFactor, + Distance: common.DefaultDistanceMetric, + PQ: PQConfig{ + Enabled: DefaultPQEnabled, + BitCompression: DefaultPQBitCompression, + Segments: DefaultPQSegments, + Centroids: DefaultPQCentroids, + TrainingLimit: DefaultPQTrainingLimit, + Encoder: PQEncoder{ + Type: DefaultPQEncoderType, + Distribution: DefaultPQEncoderDistribution, + }, + }, + SQ: SQConfig{ + Enabled: DefaultSQEnabled, + TrainingLimit: DefaultSQTrainingLimit, + RescoreLimit: DefaultSQRescoreLimit, + }, + RQ: RQConfig{ + Enabled: DefaultRQEnabled, + Bits: DefaultRQBits, + RescoreLimit: DefaultRQRescoreLimit, + }, + FilterStrategy: FilterStrategyAcorn, + Multivector: MultivectorConfig{ + Enabled: DefaultMultivectorEnabled, + Aggregation: DefaultMultivectorAggregation, + MuveraConfig: MuveraConfig{ + Enabled: DefaultMultivectorMuveraEnabled, + KSim: DefaultMultivectorKSim, + DProjections: DefaultMultivectorDProjections, + Repetitions: DefaultMultivectorRepetitions, + }, + }, + }, + }, + { + name: "max connections at maximum allowed value (2047)", + input: map[string]interface{}{ + "maxConnections": json.Number("2047"), + }, + expected: UserConfig{ + CleanupIntervalSeconds: DefaultCleanupIntervalSeconds, + MaxConnections: 2047, + EFConstruction: DefaultEFConstruction, + VectorCacheMaxObjects: common.DefaultVectorCacheMaxObjects, + EF: DefaultEF, + Skip: DefaultSkip, + FlatSearchCutoff: DefaultFlatSearchCutoff, + DynamicEFMin: DefaultDynamicEFMin, + DynamicEFMax: DefaultDynamicEFMax, + DynamicEFFactor: DefaultDynamicEFFactor, + Distance: common.DefaultDistanceMetric, + PQ: PQConfig{ + Enabled: DefaultPQEnabled, + BitCompression: DefaultPQBitCompression, + Segments: DefaultPQSegments, + Centroids: DefaultPQCentroids, + TrainingLimit: DefaultPQTrainingLimit, + Encoder: PQEncoder{ + Type: DefaultPQEncoderType, + Distribution: DefaultPQEncoderDistribution, + }, + }, + SQ: SQConfig{ + Enabled: DefaultSQEnabled, + TrainingLimit: DefaultSQTrainingLimit, + RescoreLimit: DefaultSQRescoreLimit, + }, + RQ: RQConfig{ + Enabled: DefaultRQEnabled, + Bits: DefaultRQBits, + RescoreLimit: DefaultRQRescoreLimit, + }, + FilterStrategy: DefaultFilterStrategy, + Multivector: MultivectorConfig{ + Enabled: DefaultMultivectorEnabled, + Aggregation: DefaultMultivectorAggregation, + MuveraConfig: MuveraConfig{ + Enabled: DefaultMultivectorMuveraEnabled, + KSim: DefaultMultivectorKSim, + DProjections: DefaultMultivectorDProjections, + Repetitions: DefaultMultivectorRepetitions, + }, + }, + }, + }, + { + name: "max connections exceeds maximum allowed value (2048)", + input: map[string]interface{}{ + "maxConnections": json.Number("2048"), + }, + expectErr: true, + expectErrMsg: "maxConnections must be less than 2048", + }, + { + name: "with rq defaults", + input: map[string]interface{}{ + "rq": map[string]interface{}{ + "enabled": true, + }, + }, + expected: UserConfig{ + CleanupIntervalSeconds: DefaultCleanupIntervalSeconds, + MaxConnections: DefaultMaxConnections, + EFConstruction: DefaultEFConstruction, + VectorCacheMaxObjects: common.DefaultVectorCacheMaxObjects, + EF: DefaultEF, + Skip: DefaultSkip, + FlatSearchCutoff: DefaultFlatSearchCutoff, + DynamicEFMin: DefaultDynamicEFMin, + DynamicEFMax: DefaultDynamicEFMax, + DynamicEFFactor: DefaultDynamicEFFactor, + Distance: common.DefaultDistanceMetric, + PQ: PQConfig{ + Enabled: DefaultPQEnabled, + BitCompression: DefaultPQBitCompression, + Segments: DefaultPQSegments, + Centroids: DefaultPQCentroids, + TrainingLimit: DefaultPQTrainingLimit, + Encoder: PQEncoder{ + Type: DefaultPQEncoderType, + Distribution: DefaultPQEncoderDistribution, + }, + }, + SQ: SQConfig{ + Enabled: DefaultSQEnabled, + TrainingLimit: DefaultSQTrainingLimit, + RescoreLimit: DefaultSQRescoreLimit, + }, + RQ: RQConfig{ + Enabled: true, + Bits: DefaultRQBits, + RescoreLimit: DefaultRQRescoreLimit, + }, + FilterStrategy: DefaultFilterStrategy, + Multivector: MultivectorConfig{ + Enabled: DefaultMultivectorEnabled, + Aggregation: DefaultMultivectorAggregation, + MuveraConfig: MuveraConfig{ + Enabled: DefaultMultivectorMuveraEnabled, + KSim: DefaultMultivectorKSim, + DProjections: DefaultMultivectorDProjections, + Repetitions: DefaultMultivectorRepetitions, + }, + }, + }, + }, + { + name: "with rq 8 and rescore limit 0", + input: map[string]interface{}{ + "rq": map[string]interface{}{ + "enabled": true, + "bits": float64(8), + "rescoreLimit": float64(0), + }, + }, + expected: UserConfig{ + CleanupIntervalSeconds: DefaultCleanupIntervalSeconds, + MaxConnections: DefaultMaxConnections, + EFConstruction: DefaultEFConstruction, + VectorCacheMaxObjects: common.DefaultVectorCacheMaxObjects, + EF: DefaultEF, + Skip: DefaultSkip, + FlatSearchCutoff: DefaultFlatSearchCutoff, + DynamicEFMin: DefaultDynamicEFMin, + DynamicEFMax: DefaultDynamicEFMax, + DynamicEFFactor: DefaultDynamicEFFactor, + Distance: common.DefaultDistanceMetric, + PQ: PQConfig{ + Enabled: DefaultPQEnabled, + BitCompression: DefaultPQBitCompression, + Segments: DefaultPQSegments, + Centroids: DefaultPQCentroids, + TrainingLimit: DefaultPQTrainingLimit, + Encoder: PQEncoder{ + Type: DefaultPQEncoderType, + Distribution: DefaultPQEncoderDistribution, + }, + }, + SQ: SQConfig{ + Enabled: DefaultSQEnabled, + TrainingLimit: DefaultSQTrainingLimit, + RescoreLimit: DefaultSQRescoreLimit, + }, + RQ: RQConfig{ + Enabled: true, + Bits: DefaultRQBits, + RescoreLimit: 0, + }, + FilterStrategy: DefaultFilterStrategy, + Multivector: MultivectorConfig{ + Enabled: DefaultMultivectorEnabled, + Aggregation: DefaultMultivectorAggregation, + MuveraConfig: MuveraConfig{ + Enabled: DefaultMultivectorMuveraEnabled, + KSim: DefaultMultivectorKSim, + DProjections: DefaultMultivectorDProjections, + Repetitions: DefaultMultivectorRepetitions, + }, + }, + }, + }, + { + name: "with rq 1", + input: map[string]interface{}{ + "rq": map[string]interface{}{ + "enabled": true, + "bits": float64(1), + }, + }, + expected: UserConfig{ + CleanupIntervalSeconds: DefaultCleanupIntervalSeconds, + MaxConnections: DefaultMaxConnections, + EFConstruction: DefaultEFConstruction, + VectorCacheMaxObjects: common.DefaultVectorCacheMaxObjects, + EF: DefaultEF, + Skip: DefaultSkip, + FlatSearchCutoff: DefaultFlatSearchCutoff, + DynamicEFMin: DefaultDynamicEFMin, + DynamicEFMax: DefaultDynamicEFMax, + DynamicEFFactor: DefaultDynamicEFFactor, + Distance: common.DefaultDistanceMetric, + PQ: PQConfig{ + Enabled: DefaultPQEnabled, + BitCompression: DefaultPQBitCompression, + Segments: DefaultPQSegments, + Centroids: DefaultPQCentroids, + TrainingLimit: DefaultPQTrainingLimit, + Encoder: PQEncoder{ + Type: DefaultPQEncoderType, + Distribution: DefaultPQEncoderDistribution, + }, + }, + SQ: SQConfig{ + Enabled: DefaultSQEnabled, + TrainingLimit: DefaultSQTrainingLimit, + RescoreLimit: DefaultSQRescoreLimit, + }, + RQ: RQConfig{ + Enabled: true, + Bits: 1, + RescoreLimit: DefaultBRQRescoreLimit, + }, + FilterStrategy: DefaultFilterStrategy, + Multivector: MultivectorConfig{ + Enabled: DefaultMultivectorEnabled, + Aggregation: DefaultMultivectorAggregation, + MuveraConfig: MuveraConfig{ + Enabled: DefaultMultivectorMuveraEnabled, + KSim: DefaultMultivectorKSim, + DProjections: DefaultMultivectorDProjections, + Repetitions: DefaultMultivectorRepetitions, + }, + }, + }, + }, + { + name: "with rq 1 and rescore limit 100", + input: map[string]interface{}{ + "rq": map[string]interface{}{ + "enabled": true, + "bits": float64(1), + "rescoreLimit": float64(100), + }, + }, + expected: UserConfig{ + CleanupIntervalSeconds: DefaultCleanupIntervalSeconds, + MaxConnections: DefaultMaxConnections, + EFConstruction: DefaultEFConstruction, + VectorCacheMaxObjects: common.DefaultVectorCacheMaxObjects, + EF: DefaultEF, + Skip: DefaultSkip, + FlatSearchCutoff: DefaultFlatSearchCutoff, + DynamicEFMin: DefaultDynamicEFMin, + DynamicEFMax: DefaultDynamicEFMax, + DynamicEFFactor: DefaultDynamicEFFactor, + Distance: common.DefaultDistanceMetric, + PQ: PQConfig{ + Enabled: DefaultPQEnabled, + BitCompression: DefaultPQBitCompression, + Segments: DefaultPQSegments, + Centroids: DefaultPQCentroids, + TrainingLimit: DefaultPQTrainingLimit, + Encoder: PQEncoder{ + Type: DefaultPQEncoderType, + Distribution: DefaultPQEncoderDistribution, + }, + }, + SQ: SQConfig{ + Enabled: DefaultSQEnabled, + TrainingLimit: DefaultSQTrainingLimit, + RescoreLimit: DefaultSQRescoreLimit, + }, + RQ: RQConfig{ + Enabled: true, + Bits: 1, + RescoreLimit: 100, + }, + FilterStrategy: DefaultFilterStrategy, + Multivector: MultivectorConfig{ + Enabled: DefaultMultivectorEnabled, + Aggregation: DefaultMultivectorAggregation, + MuveraConfig: MuveraConfig{ + Enabled: DefaultMultivectorMuveraEnabled, + KSim: DefaultMultivectorKSim, + DProjections: DefaultMultivectorDProjections, + Repetitions: DefaultMultivectorRepetitions, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + cfg, err := ParseAndValidateConfig(test.input, false) + if test.expectErr { + require.NotNil(t, err) + assert.Contains(t, err.Error(), test.expectErrMsg) + return + } else { + assert.Nil(t, err) + assert.Equal(t, test.expected, cfg) + } + }) + } +} + +func Test_UserConfigFilterStrategy(t *testing.T) { + t.Run("default filter strategy is sweeping", func(t *testing.T) { + cfg := UserConfig{} + cfg.SetDefaults() + assert.Equal(t, FilterStrategySweeping, cfg.FilterStrategy) + }) + + t.Run("can override default strategy", func(t *testing.T) { + os.Setenv("HNSW_DEFAULT_FILTER_STRATEGY", FilterStrategyAcorn) + cfg := UserConfig{} + cfg.SetDefaults() + assert.Equal(t, FilterStrategyAcorn, cfg.FilterStrategy) + assert.Nil(t, os.Unsetenv("HNSW_DEFAULT_FILTER_STRATEGY")) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/multivector_config.go b/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/multivector_config.go new file mode 100644 index 0000000000000000000000000000000000000000..b058cafe42bb643e8c775481240cbd2cca6d8220 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/multivector_config.go @@ -0,0 +1,132 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package hnsw + +import ( + "fmt" + + "github.com/weaviate/weaviate/entities/vectorindex/common" +) + +const ( + MultivectorAggregationMaxSim = "maxSim" +) + +const ( + DefaultMultivectorEnabled = false + DefaultMultivectorMuveraEnabled = false + DefaultMultivectorKSim = 4 + DefaultMultivectorDProjections = 16 + DefaultMultivectorRepetitions = 10 + DefaultMultivectorAggregation = "maxSim" +) + +// Multivector configuration +type MultivectorConfig struct { + Enabled bool `json:"enabled"` + MuveraConfig MuveraConfig `json:"muvera"` + Aggregation string `json:"aggregation"` +} + +type MuveraConfig struct { + Enabled bool `json:"enabled"` + KSim int `json:"ksim"` + DProjections int `json:"dprojections"` + Repetitions int `json:"repetitions"` +} + +func validAggregation(v string) error { + switch v { + case MultivectorAggregationMaxSim: + default: + return fmt.Errorf("invalid aggregation type %s", v) + } + + return nil +} + +func ValidateMultivectorConfig(cfg MultivectorConfig) error { + if !cfg.Enabled { + return nil + } + err := validAggregation(cfg.Aggregation) + if err != nil { + return err + } + + return nil +} + +func parseMultivectorMap(in map[string]interface{}, multivector *MultivectorConfig, isMultiVector bool) error { + multivectorConfigValue, ok := in["multivector"] + if !ok { + return nil + } + + multivectorConfigMap, ok := multivectorConfigValue.(map[string]interface{}) + if !ok { + return nil + } + + if err := common.OptionalBoolFromMap(multivectorConfigMap, "enabled", func(v bool) { + if isMultiVector { + // vectorizer set is a multi vector vectorizer, enable multi vector index + multivector.Enabled = true + } else { + multivector.Enabled = v + } + }); err != nil { + return err + } + + muveraValue, ok := multivectorConfigMap["muvera"] + if !ok { + return nil + } + + muveraMap, ok := muveraValue.(map[string]interface{}) + if !ok { + return nil + } + + if err := common.OptionalBoolFromMap(muveraMap, "enabled", func(v bool) { + multivector.MuveraConfig.Enabled = v + }); err != nil { + return err + } + + if err := common.OptionalIntFromMap(muveraMap, "ksim", func(v int) { + multivector.MuveraConfig.KSim = v + }); err != nil { + return err + } + + if err := common.OptionalIntFromMap(muveraMap, "dprojections", func(v int) { + multivector.MuveraConfig.DProjections = v + }); err != nil { + return err + } + + if err := common.OptionalIntFromMap(muveraMap, "repetitions", func(v int) { + multivector.MuveraConfig.Repetitions = v + }); err != nil { + return err + } + + if err := common.OptionalStringFromMap(multivectorConfigMap, "aggregation", func(v string) { + multivector.Aggregation = v + }); err != nil { + return err + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/pq_config.go b/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/pq_config.go new file mode 100644 index 0000000000000000000000000000000000000000..c4687153ec96ea4c89c8eeff670e78d9b14d2552 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/pq_config.go @@ -0,0 +1,196 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package hnsw + +import ( + "fmt" + + "github.com/weaviate/weaviate/entities/vectorindex/common" +) + +const ( + PQEncoderTypeKMeans = "kmeans" + PQEncoderTypeTile = "tile" + PQEncoderDistributionLogNormal = "log-normal" + PQEncoderDistributionNormal = "normal" +) + +const ( + DefaultPQEnabled = false + DefaultPQBitCompression = false + DefaultPQSegments = 0 + DefaultPQEncoderType = PQEncoderTypeKMeans + DefaultPQEncoderDistribution = PQEncoderDistributionLogNormal + DefaultPQCentroids = 256 + DefaultPQTrainingLimit = 100000 +) + +// Product Quantization encoder configuration +type PQEncoder struct { + Type string `json:"type"` + Distribution string `json:"distribution,omitempty"` +} + +// Product Quantization configuration +type PQConfig struct { + Enabled bool `json:"enabled"` + BitCompression bool `json:"bitCompression"` + Segments int `json:"segments"` + Centroids int `json:"centroids"` + TrainingLimit int `json:"trainingLimit"` + Encoder PQEncoder `json:"encoder"` +} + +func validEncoder(v string) error { + switch v { + case PQEncoderTypeKMeans: + case PQEncoderTypeTile: + default: + return fmt.Errorf("invalid encoder type %s", v) + } + + return nil +} + +func validEncoderDistribution(v string) error { + switch v { + case PQEncoderDistributionLogNormal: + case PQEncoderDistributionNormal: + default: + return fmt.Errorf("invalid encoder distribution %s", v) + } + + return nil +} + +func ValidatePQConfig(cfg PQConfig) error { + if !cfg.Enabled { + return nil + } + err := validEncoder(cfg.Encoder.Type) + if err != nil { + return err + } + + err = validEncoderDistribution(cfg.Encoder.Distribution) + if err != nil { + return err + } + + return nil +} + +func encoderFromMap(in map[string]interface{}, setFn func(v string)) error { + value, ok := in["type"] + if !ok { + return nil + } + + asString, ok := value.(string) + if !ok { + return nil + } + + err := validEncoder(asString) + if err != nil { + return err + } + + setFn(asString) + return nil +} + +func encoderDistributionFromMap(in map[string]interface{}, setFn func(v string)) error { + value, ok := in["distribution"] + if !ok { + return nil + } + + asString, ok := value.(string) + if !ok { + return nil + } + + err := validEncoderDistribution(asString) + if err != nil { + return err + } + + setFn(asString) + return nil +} + +func parsePQMap(in map[string]interface{}, pq *PQConfig) error { + pqConfigValue, ok := in["pq"] + if !ok { + return nil + } + + pqConfigMap, ok := pqConfigValue.(map[string]interface{}) + if !ok { + return nil + } + + if err := common.OptionalBoolFromMap(pqConfigMap, "enabled", func(v bool) { + pq.Enabled = v + }); err != nil { + return err + } + + if err := common.OptionalBoolFromMap(pqConfigMap, "bitCompression", func(v bool) { + pq.BitCompression = v + }); err != nil { + return err + } + + if err := common.OptionalIntFromMap(pqConfigMap, "segments", func(v int) { + pq.Segments = v + }); err != nil { + return err + } + + if err := common.OptionalIntFromMap(pqConfigMap, "centroids", func(v int) { + pq.Centroids = v + }); err != nil { + return err + } + + if err := common.OptionalIntFromMap(pqConfigMap, "trainingLimit", func(v int) { + pq.TrainingLimit = v + }); err != nil { + return err + } + + pqEncoderValue, ok := pqConfigMap["encoder"] + if !ok { + return nil + } + + pqEncoderMap, ok := pqEncoderValue.(map[string]interface{}) + if !ok { + return nil + } + + if err := encoderFromMap(pqEncoderMap, func(v string) { + pq.Encoder.Type = v + }); err != nil { + return err + } + + if err := encoderDistributionFromMap(pqEncoderMap, func(v string) { + pq.Encoder.Distribution = v + }); err != nil { + return err + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/rq_config.go b/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/rq_config.go new file mode 100644 index 0000000000000000000000000000000000000000..50f6a0f20c76cde641bc6b061932a048df0043f4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/rq_config.go @@ -0,0 +1,87 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package hnsw + +import ( + "errors" + + "github.com/weaviate/weaviate/entities/schema/config" + "github.com/weaviate/weaviate/entities/vectorindex/common" +) + +const ( + DefaultRQEnabled = false + DefaultRQBits = 8 + DefaultRQRescoreLimit = 20 + DefaultBRQRescoreLimit = 512 +) + +type RQConfig struct { + Enabled bool `json:"enabled"` + Bits int16 `json:"bits"` + RescoreLimit int `json:"rescoreLimit"` +} + +func ValidateRQConfig(cfg RQConfig) error { + if !cfg.Enabled { + return nil + } + if cfg.Bits != 8 && cfg.Bits != 1 { + return errors.New("RQ bits must be 8 or 1") + } + + return nil +} + +func parseRQMap(in map[string]interface{}, rq *RQConfig) error { + rqConfigValue, ok := in["rq"] + if !ok { + return nil + } + + rqConfigMap, ok := rqConfigValue.(map[string]interface{}) + if !ok { + return nil + } + + if err := common.OptionalBoolFromMap(rqConfigMap, "enabled", func(v bool) { + rq.Enabled = v + }); err != nil { + return err + } + + if err := common.OptionalIntFromMap(rqConfigMap, "bits", func(v int) { + rq.Bits = int16(v) + }); err != nil { + return err + } + + if err := common.OptionalIntFromMap(rqConfigMap, "rescoreLimit", func(v int) { + rq.RescoreLimit = v + }); err != nil { + return err + } + + if rq.Bits == 1 && rqConfigMap["rescoreLimit"] == nil { + rq.RescoreLimit = DefaultBRQRescoreLimit + } + + return nil +} + +// GetRQBits returns the bits value for RQ compression, or 0 if not RQ +func GetRQBits(cfg config.VectorIndexConfig) int16 { + if hnswUserConfig, ok := cfg.(UserConfig); ok && hnswUserConfig.RQ.Enabled { + return hnswUserConfig.RQ.Bits + } + return 0 +} diff --git a/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/rq_config_test.go b/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/rq_config_test.go new file mode 100644 index 0000000000000000000000000000000000000000..532341cbc611a9ebc902684961f9fefd55ed9abc --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/rq_config_test.go @@ -0,0 +1,75 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package hnsw + +import ( + "testing" + + "github.com/weaviate/weaviate/entities/schema/config" +) + +func TestGetRQBits(t *testing.T) { + tests := []struct { + name string + config config.VectorIndexConfig + expected int16 + }{ + { + name: "RQ disabled should return 0", + config: UserConfig{ + RQ: RQConfig{ + Enabled: false, + Bits: 8, + }, + }, + expected: 0, + }, + { + name: "RQ enabled with bits=1 should return 1", + config: UserConfig{ + RQ: RQConfig{ + Enabled: true, + Bits: 1, + }, + }, + expected: 1, + }, + { + name: "RQ enabled with bits=8 should return 8", + config: UserConfig{ + RQ: RQConfig{ + Enabled: true, + Bits: 8, + }, + }, + expected: 8, + }, + { + name: "non-RQ config should return 0", + config: UserConfig{ + BQ: BQConfig{ + Enabled: true, + }, + }, + expected: 0, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := GetRQBits(tt.config) + if result != tt.expected { + t.Errorf("GetRQBits() = %v, want %v", result, tt.expected) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/sq_config.go b/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/sq_config.go new file mode 100644 index 0000000000000000000000000000000000000000..e4afd51e336385378284a8a698c38157548707b5 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/vectorindex/hnsw/sq_config.go @@ -0,0 +1,58 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package hnsw + +import "github.com/weaviate/weaviate/entities/vectorindex/common" + +const ( + DefaultSQEnabled = false + DefaultSQTrainingLimit = 100000 + DefaultSQRescoreLimit = 20 +) + +type SQConfig struct { + Enabled bool `json:"enabled"` + TrainingLimit int `json:"trainingLimit"` + RescoreLimit int `json:"rescoreLimit"` +} + +func parseSQMap(in map[string]interface{}, sq *SQConfig) error { + sqConfigValue, ok := in["sq"] + if !ok { + return nil + } + + sqConfigMap, ok := sqConfigValue.(map[string]interface{}) + if !ok { + return nil + } + + if err := common.OptionalBoolFromMap(sqConfigMap, "enabled", func(v bool) { + sq.Enabled = v + }); err != nil { + return err + } + + if err := common.OptionalIntFromMap(sqConfigMap, "trainingLimit", func(v int) { + sq.TrainingLimit = v + }); err != nil { + return err + } + + if err := common.OptionalIntFromMap(sqConfigMap, "rescoreLimit", func(v int) { + sq.RescoreLimit = v + }); err != nil { + return err + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/entities/verbosity/verbosity.go b/platform/dbops/binaries/weaviate-src/entities/verbosity/verbosity.go new file mode 100644 index 0000000000000000000000000000000000000000..73350ca36f50a4e6a8d8e6309b3b8e7562f4c85f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/verbosity/verbosity.go @@ -0,0 +1,34 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package verbosity + +import "fmt" + +const ( + OutputMinimal = "minimal" + OutputVerbose = "verbose" +) + +// ParseOutput extracts the verbosity value from the provided nullable string +// If `output` is nil, the default selection is "minimal" +func ParseOutput(output *string) (string, error) { + if output != nil { + switch *output { + case OutputMinimal, OutputVerbose: + return *output, nil + default: + return "", fmt.Errorf(`invalid output: "%s", possible values are: "%s", "%s"`, + *output, OutputMinimal, OutputVerbose) + } + } + return OutputMinimal, nil +} diff --git a/platform/dbops/binaries/weaviate-src/entities/versioned/class.go b/platform/dbops/binaries/weaviate-src/entities/versioned/class.go new file mode 100644 index 0000000000000000000000000000000000000000..fbbcef4f252dd1b54c75e0b84bcc27f742361c57 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/entities/versioned/class.go @@ -0,0 +1,23 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package versioned + +import "github.com/weaviate/weaviate/entities/models" + +// Class is a wrapper on top of class created by OpenAPI to be able +// to inject version to class +type Class struct { + *models.Class + Version uint64 +} + +type Classes map[string]Class diff --git a/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v0/batch.pb.go b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v0/batch.pb.go new file mode 100644 index 0000000000000000000000000000000000000000..0dd2c26be249c1631903122b202dc89d878264d6 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v0/batch.pb.go @@ -0,0 +1,149 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. + +package protocol + +import ( + reflect "reflect" + sync "sync" + unsafe "unsafe" + + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type BatchObjectsRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchObjectsRequest) Reset() { + *x = BatchObjectsRequest{} + mi := &file_v0_batch_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchObjectsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchObjectsRequest) ProtoMessage() {} + +func (x *BatchObjectsRequest) ProtoReflect() protoreflect.Message { + mi := &file_v0_batch_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchObjectsRequest.ProtoReflect.Descriptor instead. +func (*BatchObjectsRequest) Descriptor() ([]byte, []int) { + return file_v0_batch_proto_rawDescGZIP(), []int{0} +} + +type BatchObjectsReply struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchObjectsReply) Reset() { + *x = BatchObjectsReply{} + mi := &file_v0_batch_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchObjectsReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchObjectsReply) ProtoMessage() {} + +func (x *BatchObjectsReply) ProtoReflect() protoreflect.Message { + mi := &file_v0_batch_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchObjectsReply.ProtoReflect.Descriptor instead. +func (*BatchObjectsReply) Descriptor() ([]byte, []int) { + return file_v0_batch_proto_rawDescGZIP(), []int{1} +} + +var File_v0_batch_proto protoreflect.FileDescriptor + +const file_v0_batch_proto_rawDesc = "" + + "\n" + + "\x0ev0/batch.proto\x12\fweaviategrpc\"\x15\n" + + "\x13BatchObjectsRequest\"\x13\n" + + "\x11BatchObjectsReplyBo\n" + + "#io.weaviate.client.grpc.protocol.v0B\x12WeaviateProtoBatchZ4github.com/weaviate/weaviate/grpc/generated;protocolb\x06proto3" + +var ( + file_v0_batch_proto_rawDescOnce sync.Once + file_v0_batch_proto_rawDescData []byte +) + +func file_v0_batch_proto_rawDescGZIP() []byte { + file_v0_batch_proto_rawDescOnce.Do(func() { + file_v0_batch_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_v0_batch_proto_rawDesc), len(file_v0_batch_proto_rawDesc))) + }) + return file_v0_batch_proto_rawDescData +} + +var file_v0_batch_proto_msgTypes = make([]protoimpl.MessageInfo, 2) +var file_v0_batch_proto_goTypes = []any{ + (*BatchObjectsRequest)(nil), // 0: weaviategrpc.BatchObjectsRequest + (*BatchObjectsReply)(nil), // 1: weaviategrpc.BatchObjectsReply +} +var file_v0_batch_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_v0_batch_proto_init() } +func file_v0_batch_proto_init() { + if File_v0_batch_proto != nil { + return + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: unsafe.Slice(unsafe.StringData(file_v0_batch_proto_rawDesc), len(file_v0_batch_proto_rawDesc)), + NumEnums: 0, + NumMessages: 2, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_v0_batch_proto_goTypes, + DependencyIndexes: file_v0_batch_proto_depIdxs, + MessageInfos: file_v0_batch_proto_msgTypes, + }.Build() + File_v0_batch_proto = out.File + file_v0_batch_proto_goTypes = nil + file_v0_batch_proto_depIdxs = nil +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v0/search_get.pb.go b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v0/search_get.pb.go new file mode 100644 index 0000000000000000000000000000000000000000..21230e5233544d706873e3da1bd0e8b93764270e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v0/search_get.pb.go @@ -0,0 +1,149 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. + +package protocol + +import ( + reflect "reflect" + sync "sync" + unsafe "unsafe" + + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type SearchRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *SearchRequest) Reset() { + *x = SearchRequest{} + mi := &file_v0_search_get_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *SearchRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SearchRequest) ProtoMessage() {} + +func (x *SearchRequest) ProtoReflect() protoreflect.Message { + mi := &file_v0_search_get_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SearchRequest.ProtoReflect.Descriptor instead. +func (*SearchRequest) Descriptor() ([]byte, []int) { + return file_v0_search_get_proto_rawDescGZIP(), []int{0} +} + +type SearchReply struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *SearchReply) Reset() { + *x = SearchReply{} + mi := &file_v0_search_get_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *SearchReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SearchReply) ProtoMessage() {} + +func (x *SearchReply) ProtoReflect() protoreflect.Message { + mi := &file_v0_search_get_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SearchReply.ProtoReflect.Descriptor instead. +func (*SearchReply) Descriptor() ([]byte, []int) { + return file_v0_search_get_proto_rawDescGZIP(), []int{1} +} + +var File_v0_search_get_proto protoreflect.FileDescriptor + +const file_v0_search_get_proto_rawDesc = "" + + "\n" + + "\x13v0/search_get.proto\x12\fweaviategrpc\"\x0f\n" + + "\rSearchRequest\"\r\n" + + "\vSearchReplyBs\n" + + "#io.weaviate.client.grpc.protocol.v0B\x16WeaviateProtoSearchGetZ4github.com/weaviate/weaviate/grpc/generated;protocolb\x06proto3" + +var ( + file_v0_search_get_proto_rawDescOnce sync.Once + file_v0_search_get_proto_rawDescData []byte +) + +func file_v0_search_get_proto_rawDescGZIP() []byte { + file_v0_search_get_proto_rawDescOnce.Do(func() { + file_v0_search_get_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_v0_search_get_proto_rawDesc), len(file_v0_search_get_proto_rawDesc))) + }) + return file_v0_search_get_proto_rawDescData +} + +var file_v0_search_get_proto_msgTypes = make([]protoimpl.MessageInfo, 2) +var file_v0_search_get_proto_goTypes = []any{ + (*SearchRequest)(nil), // 0: weaviategrpc.SearchRequest + (*SearchReply)(nil), // 1: weaviategrpc.SearchReply +} +var file_v0_search_get_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_v0_search_get_proto_init() } +func file_v0_search_get_proto_init() { + if File_v0_search_get_proto != nil { + return + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: unsafe.Slice(unsafe.StringData(file_v0_search_get_proto_rawDesc), len(file_v0_search_get_proto_rawDesc)), + NumEnums: 0, + NumMessages: 2, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_v0_search_get_proto_goTypes, + DependencyIndexes: file_v0_search_get_proto_depIdxs, + MessageInfos: file_v0_search_get_proto_msgTypes, + }.Build() + File_v0_search_get_proto = out.File + file_v0_search_get_proto_goTypes = nil + file_v0_search_get_proto_depIdxs = nil +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v0/weaviate.pb.go b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v0/weaviate.pb.go new file mode 100644 index 0000000000000000000000000000000000000000..193b87973231fe52ad2c4a4a35b8ff940f48058f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v0/weaviate.pb.go @@ -0,0 +1,71 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. + +package protocol + +import ( + reflect "reflect" + unsafe "unsafe" + + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +var File_v0_weaviate_proto protoreflect.FileDescriptor + +const file_v0_weaviate_proto_rawDesc = "" + + "\n" + + "\x11v0/weaviate.proto\x12\fweaviategrpc\x1a\x0ev0/batch.proto\x1a\x13v0/search_get.proto2\xa4\x01\n" + + "\bWeaviate\x12B\n" + + "\x06Search\x12\x1b.weaviategrpc.SearchRequest\x1a\x19.weaviategrpc.SearchReply\"\x00\x12T\n" + + "\fBatchObjects\x12!.weaviategrpc.BatchObjectsRequest\x1a\x1f.weaviategrpc.BatchObjectsReply\"\x00Bj\n" + + "#io.weaviate.client.grpc.protocol.v0B\rWeaviateProtoZ4github.com/weaviate/weaviate/grpc/generated;protocolb\x06proto3" + +var file_v0_weaviate_proto_goTypes = []any{ + (*SearchRequest)(nil), // 0: weaviategrpc.SearchRequest + (*BatchObjectsRequest)(nil), // 1: weaviategrpc.BatchObjectsRequest + (*SearchReply)(nil), // 2: weaviategrpc.SearchReply + (*BatchObjectsReply)(nil), // 3: weaviategrpc.BatchObjectsReply +} +var file_v0_weaviate_proto_depIdxs = []int32{ + 0, // 0: weaviategrpc.Weaviate.Search:input_type -> weaviategrpc.SearchRequest + 1, // 1: weaviategrpc.Weaviate.BatchObjects:input_type -> weaviategrpc.BatchObjectsRequest + 2, // 2: weaviategrpc.Weaviate.Search:output_type -> weaviategrpc.SearchReply + 3, // 3: weaviategrpc.Weaviate.BatchObjects:output_type -> weaviategrpc.BatchObjectsReply + 2, // [2:4] is the sub-list for method output_type + 0, // [0:2] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_v0_weaviate_proto_init() } +func file_v0_weaviate_proto_init() { + if File_v0_weaviate_proto != nil { + return + } + file_v0_batch_proto_init() + file_v0_search_get_proto_init() + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: unsafe.Slice(unsafe.StringData(file_v0_weaviate_proto_rawDesc), len(file_v0_weaviate_proto_rawDesc)), + NumEnums: 0, + NumMessages: 0, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_v0_weaviate_proto_goTypes, + DependencyIndexes: file_v0_weaviate_proto_depIdxs, + }.Build() + File_v0_weaviate_proto = out.File + file_v0_weaviate_proto_goTypes = nil + file_v0_weaviate_proto_depIdxs = nil +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v0/weaviate_grpc.pb.go b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v0/weaviate_grpc.pb.go new file mode 100644 index 0000000000000000000000000000000000000000..594f4245b62e5aaf0c0bfd2e6276b26d67eb8e55 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v0/weaviate_grpc.pb.go @@ -0,0 +1,156 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. + +package protocol + +import ( + context "context" + + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + Weaviate_Search_FullMethodName = "/weaviategrpc.Weaviate/Search" + Weaviate_BatchObjects_FullMethodName = "/weaviategrpc.Weaviate/BatchObjects" +) + +// WeaviateClient is the client API for Weaviate service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type WeaviateClient interface { + Search(ctx context.Context, in *SearchRequest, opts ...grpc.CallOption) (*SearchReply, error) + BatchObjects(ctx context.Context, in *BatchObjectsRequest, opts ...grpc.CallOption) (*BatchObjectsReply, error) +} + +type weaviateClient struct { + cc grpc.ClientConnInterface +} + +func NewWeaviateClient(cc grpc.ClientConnInterface) WeaviateClient { + return &weaviateClient{cc} +} + +func (c *weaviateClient) Search(ctx context.Context, in *SearchRequest, opts ...grpc.CallOption) (*SearchReply, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(SearchReply) + err := c.cc.Invoke(ctx, Weaviate_Search_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *weaviateClient) BatchObjects(ctx context.Context, in *BatchObjectsRequest, opts ...grpc.CallOption) (*BatchObjectsReply, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(BatchObjectsReply) + err := c.cc.Invoke(ctx, Weaviate_BatchObjects_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// WeaviateServer is the server API for Weaviate service. +// All implementations must embed UnimplementedWeaviateServer +// for forward compatibility. +type WeaviateServer interface { + Search(context.Context, *SearchRequest) (*SearchReply, error) + BatchObjects(context.Context, *BatchObjectsRequest) (*BatchObjectsReply, error) + mustEmbedUnimplementedWeaviateServer() +} + +// UnimplementedWeaviateServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedWeaviateServer struct{} + +func (UnimplementedWeaviateServer) Search(context.Context, *SearchRequest) (*SearchReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method Search not implemented") +} +func (UnimplementedWeaviateServer) BatchObjects(context.Context, *BatchObjectsRequest) (*BatchObjectsReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method BatchObjects not implemented") +} +func (UnimplementedWeaviateServer) mustEmbedUnimplementedWeaviateServer() {} +func (UnimplementedWeaviateServer) testEmbeddedByValue() {} + +// UnsafeWeaviateServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to WeaviateServer will +// result in compilation errors. +type UnsafeWeaviateServer interface { + mustEmbedUnimplementedWeaviateServer() +} + +func RegisterWeaviateServer(s grpc.ServiceRegistrar, srv WeaviateServer) { + // If the following call pancis, it indicates UnimplementedWeaviateServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&Weaviate_ServiceDesc, srv) +} + +func _Weaviate_Search_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WeaviateServer).Search(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Weaviate_Search_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WeaviateServer).Search(ctx, req.(*SearchRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Weaviate_BatchObjects_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchObjectsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WeaviateServer).BatchObjects(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Weaviate_BatchObjects_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WeaviateServer).BatchObjects(ctx, req.(*BatchObjectsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// Weaviate_ServiceDesc is the grpc.ServiceDesc for Weaviate service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var Weaviate_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "weaviategrpc.Weaviate", + HandlerType: (*WeaviateServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Search", + Handler: _Weaviate_Search_Handler, + }, + { + MethodName: "BatchObjects", + Handler: _Weaviate_BatchObjects_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "v0/weaviate.proto", +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/aggregate.pb.go b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/aggregate.pb.go new file mode 100644 index 0000000000000000000000000000000000000000..09eb98d59ef03e23dbd00595a761e3e7591a3ae3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/aggregate.pb.go @@ -0,0 +1,2630 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. + +package protocol + +import ( + reflect "reflect" + sync "sync" + unsafe "unsafe" + + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type AggregateRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + // required + Collection string `protobuf:"bytes,1,opt,name=collection,proto3" json:"collection,omitempty"` + // parameters + Tenant string `protobuf:"bytes,10,opt,name=tenant,proto3" json:"tenant,omitempty"` + // what is returned + ObjectsCount bool `protobuf:"varint,20,opt,name=objects_count,json=objectsCount,proto3" json:"objects_count,omitempty"` + Aggregations []*AggregateRequest_Aggregation `protobuf:"bytes,21,rep,name=aggregations,proto3" json:"aggregations,omitempty"` + // affects aggregation results + ObjectLimit *uint32 `protobuf:"varint,30,opt,name=object_limit,json=objectLimit,proto3,oneof" json:"object_limit,omitempty"` + GroupBy *AggregateRequest_GroupBy `protobuf:"bytes,31,opt,name=group_by,json=groupBy,proto3,oneof" json:"group_by,omitempty"` + Limit *uint32 `protobuf:"varint,32,opt,name=limit,proto3,oneof" json:"limit,omitempty"` + // matches/searches for objects + Filters *Filters `protobuf:"bytes,40,opt,name=filters,proto3,oneof" json:"filters,omitempty"` + // Types that are valid to be assigned to Search: + // + // *AggregateRequest_Hybrid + // *AggregateRequest_NearVector + // *AggregateRequest_NearObject + // *AggregateRequest_NearText + // *AggregateRequest_NearImage + // *AggregateRequest_NearAudio + // *AggregateRequest_NearVideo + // *AggregateRequest_NearDepth + // *AggregateRequest_NearThermal + // *AggregateRequest_NearImu + Search isAggregateRequest_Search `protobuf_oneof:"search"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateRequest) Reset() { + *x = AggregateRequest{} + mi := &file_v1_aggregate_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateRequest) ProtoMessage() {} + +func (x *AggregateRequest) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateRequest.ProtoReflect.Descriptor instead. +func (*AggregateRequest) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{0} +} + +func (x *AggregateRequest) GetCollection() string { + if x != nil { + return x.Collection + } + return "" +} + +func (x *AggregateRequest) GetTenant() string { + if x != nil { + return x.Tenant + } + return "" +} + +func (x *AggregateRequest) GetObjectsCount() bool { + if x != nil { + return x.ObjectsCount + } + return false +} + +func (x *AggregateRequest) GetAggregations() []*AggregateRequest_Aggregation { + if x != nil { + return x.Aggregations + } + return nil +} + +func (x *AggregateRequest) GetObjectLimit() uint32 { + if x != nil && x.ObjectLimit != nil { + return *x.ObjectLimit + } + return 0 +} + +func (x *AggregateRequest) GetGroupBy() *AggregateRequest_GroupBy { + if x != nil { + return x.GroupBy + } + return nil +} + +func (x *AggregateRequest) GetLimit() uint32 { + if x != nil && x.Limit != nil { + return *x.Limit + } + return 0 +} + +func (x *AggregateRequest) GetFilters() *Filters { + if x != nil { + return x.Filters + } + return nil +} + +func (x *AggregateRequest) GetSearch() isAggregateRequest_Search { + if x != nil { + return x.Search + } + return nil +} + +func (x *AggregateRequest) GetHybrid() *Hybrid { + if x != nil { + if x, ok := x.Search.(*AggregateRequest_Hybrid); ok { + return x.Hybrid + } + } + return nil +} + +func (x *AggregateRequest) GetNearVector() *NearVector { + if x != nil { + if x, ok := x.Search.(*AggregateRequest_NearVector); ok { + return x.NearVector + } + } + return nil +} + +func (x *AggregateRequest) GetNearObject() *NearObject { + if x != nil { + if x, ok := x.Search.(*AggregateRequest_NearObject); ok { + return x.NearObject + } + } + return nil +} + +func (x *AggregateRequest) GetNearText() *NearTextSearch { + if x != nil { + if x, ok := x.Search.(*AggregateRequest_NearText); ok { + return x.NearText + } + } + return nil +} + +func (x *AggregateRequest) GetNearImage() *NearImageSearch { + if x != nil { + if x, ok := x.Search.(*AggregateRequest_NearImage); ok { + return x.NearImage + } + } + return nil +} + +func (x *AggregateRequest) GetNearAudio() *NearAudioSearch { + if x != nil { + if x, ok := x.Search.(*AggregateRequest_NearAudio); ok { + return x.NearAudio + } + } + return nil +} + +func (x *AggregateRequest) GetNearVideo() *NearVideoSearch { + if x != nil { + if x, ok := x.Search.(*AggregateRequest_NearVideo); ok { + return x.NearVideo + } + } + return nil +} + +func (x *AggregateRequest) GetNearDepth() *NearDepthSearch { + if x != nil { + if x, ok := x.Search.(*AggregateRequest_NearDepth); ok { + return x.NearDepth + } + } + return nil +} + +func (x *AggregateRequest) GetNearThermal() *NearThermalSearch { + if x != nil { + if x, ok := x.Search.(*AggregateRequest_NearThermal); ok { + return x.NearThermal + } + } + return nil +} + +func (x *AggregateRequest) GetNearImu() *NearIMUSearch { + if x != nil { + if x, ok := x.Search.(*AggregateRequest_NearImu); ok { + return x.NearImu + } + } + return nil +} + +type isAggregateRequest_Search interface { + isAggregateRequest_Search() +} + +type AggregateRequest_Hybrid struct { + Hybrid *Hybrid `protobuf:"bytes,41,opt,name=hybrid,proto3,oneof"` +} + +type AggregateRequest_NearVector struct { + NearVector *NearVector `protobuf:"bytes,42,opt,name=near_vector,json=nearVector,proto3,oneof"` +} + +type AggregateRequest_NearObject struct { + NearObject *NearObject `protobuf:"bytes,43,opt,name=near_object,json=nearObject,proto3,oneof"` +} + +type AggregateRequest_NearText struct { + NearText *NearTextSearch `protobuf:"bytes,44,opt,name=near_text,json=nearText,proto3,oneof"` +} + +type AggregateRequest_NearImage struct { + NearImage *NearImageSearch `protobuf:"bytes,45,opt,name=near_image,json=nearImage,proto3,oneof"` +} + +type AggregateRequest_NearAudio struct { + NearAudio *NearAudioSearch `protobuf:"bytes,46,opt,name=near_audio,json=nearAudio,proto3,oneof"` +} + +type AggregateRequest_NearVideo struct { + NearVideo *NearVideoSearch `protobuf:"bytes,47,opt,name=near_video,json=nearVideo,proto3,oneof"` +} + +type AggregateRequest_NearDepth struct { + NearDepth *NearDepthSearch `protobuf:"bytes,48,opt,name=near_depth,json=nearDepth,proto3,oneof"` +} + +type AggregateRequest_NearThermal struct { + NearThermal *NearThermalSearch `protobuf:"bytes,49,opt,name=near_thermal,json=nearThermal,proto3,oneof"` +} + +type AggregateRequest_NearImu struct { + NearImu *NearIMUSearch `protobuf:"bytes,50,opt,name=near_imu,json=nearImu,proto3,oneof"` +} + +func (*AggregateRequest_Hybrid) isAggregateRequest_Search() {} + +func (*AggregateRequest_NearVector) isAggregateRequest_Search() {} + +func (*AggregateRequest_NearObject) isAggregateRequest_Search() {} + +func (*AggregateRequest_NearText) isAggregateRequest_Search() {} + +func (*AggregateRequest_NearImage) isAggregateRequest_Search() {} + +func (*AggregateRequest_NearAudio) isAggregateRequest_Search() {} + +func (*AggregateRequest_NearVideo) isAggregateRequest_Search() {} + +func (*AggregateRequest_NearDepth) isAggregateRequest_Search() {} + +func (*AggregateRequest_NearThermal) isAggregateRequest_Search() {} + +func (*AggregateRequest_NearImu) isAggregateRequest_Search() {} + +type AggregateReply struct { + state protoimpl.MessageState `protogen:"open.v1"` + Took float32 `protobuf:"fixed32,1,opt,name=took,proto3" json:"took,omitempty"` + // Types that are valid to be assigned to Result: + // + // *AggregateReply_SingleResult + // *AggregateReply_GroupedResults + Result isAggregateReply_Result `protobuf_oneof:"result"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateReply) Reset() { + *x = AggregateReply{} + mi := &file_v1_aggregate_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateReply) ProtoMessage() {} + +func (x *AggregateReply) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateReply.ProtoReflect.Descriptor instead. +func (*AggregateReply) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{1} +} + +func (x *AggregateReply) GetTook() float32 { + if x != nil { + return x.Took + } + return 0 +} + +func (x *AggregateReply) GetResult() isAggregateReply_Result { + if x != nil { + return x.Result + } + return nil +} + +func (x *AggregateReply) GetSingleResult() *AggregateReply_Single { + if x != nil { + if x, ok := x.Result.(*AggregateReply_SingleResult); ok { + return x.SingleResult + } + } + return nil +} + +func (x *AggregateReply) GetGroupedResults() *AggregateReply_Grouped { + if x != nil { + if x, ok := x.Result.(*AggregateReply_GroupedResults); ok { + return x.GroupedResults + } + } + return nil +} + +type isAggregateReply_Result interface { + isAggregateReply_Result() +} + +type AggregateReply_SingleResult struct { + SingleResult *AggregateReply_Single `protobuf:"bytes,2,opt,name=single_result,json=singleResult,proto3,oneof"` +} + +type AggregateReply_GroupedResults struct { + GroupedResults *AggregateReply_Grouped `protobuf:"bytes,3,opt,name=grouped_results,json=groupedResults,proto3,oneof"` +} + +func (*AggregateReply_SingleResult) isAggregateReply_Result() {} + +func (*AggregateReply_GroupedResults) isAggregateReply_Result() {} + +type AggregateRequest_Aggregation struct { + state protoimpl.MessageState `protogen:"open.v1"` + Property string `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // Types that are valid to be assigned to Aggregation: + // + // *AggregateRequest_Aggregation_Int + // *AggregateRequest_Aggregation_Number_ + // *AggregateRequest_Aggregation_Text_ + // *AggregateRequest_Aggregation_Boolean_ + // *AggregateRequest_Aggregation_Date_ + // *AggregateRequest_Aggregation_Reference_ + Aggregation isAggregateRequest_Aggregation_Aggregation `protobuf_oneof:"aggregation"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateRequest_Aggregation) Reset() { + *x = AggregateRequest_Aggregation{} + mi := &file_v1_aggregate_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateRequest_Aggregation) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateRequest_Aggregation) ProtoMessage() {} + +func (x *AggregateRequest_Aggregation) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[2] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateRequest_Aggregation.ProtoReflect.Descriptor instead. +func (*AggregateRequest_Aggregation) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{0, 0} +} + +func (x *AggregateRequest_Aggregation) GetProperty() string { + if x != nil { + return x.Property + } + return "" +} + +func (x *AggregateRequest_Aggregation) GetAggregation() isAggregateRequest_Aggregation_Aggregation { + if x != nil { + return x.Aggregation + } + return nil +} + +func (x *AggregateRequest_Aggregation) GetInt() *AggregateRequest_Aggregation_Integer { + if x != nil { + if x, ok := x.Aggregation.(*AggregateRequest_Aggregation_Int); ok { + return x.Int + } + } + return nil +} + +func (x *AggregateRequest_Aggregation) GetNumber() *AggregateRequest_Aggregation_Number { + if x != nil { + if x, ok := x.Aggregation.(*AggregateRequest_Aggregation_Number_); ok { + return x.Number + } + } + return nil +} + +func (x *AggregateRequest_Aggregation) GetText() *AggregateRequest_Aggregation_Text { + if x != nil { + if x, ok := x.Aggregation.(*AggregateRequest_Aggregation_Text_); ok { + return x.Text + } + } + return nil +} + +func (x *AggregateRequest_Aggregation) GetBoolean() *AggregateRequest_Aggregation_Boolean { + if x != nil { + if x, ok := x.Aggregation.(*AggregateRequest_Aggregation_Boolean_); ok { + return x.Boolean + } + } + return nil +} + +func (x *AggregateRequest_Aggregation) GetDate() *AggregateRequest_Aggregation_Date { + if x != nil { + if x, ok := x.Aggregation.(*AggregateRequest_Aggregation_Date_); ok { + return x.Date + } + } + return nil +} + +func (x *AggregateRequest_Aggregation) GetReference() *AggregateRequest_Aggregation_Reference { + if x != nil { + if x, ok := x.Aggregation.(*AggregateRequest_Aggregation_Reference_); ok { + return x.Reference + } + } + return nil +} + +type isAggregateRequest_Aggregation_Aggregation interface { + isAggregateRequest_Aggregation_Aggregation() +} + +type AggregateRequest_Aggregation_Int struct { + Int *AggregateRequest_Aggregation_Integer `protobuf:"bytes,2,opt,name=int,proto3,oneof"` +} + +type AggregateRequest_Aggregation_Number_ struct { + Number *AggregateRequest_Aggregation_Number `protobuf:"bytes,3,opt,name=number,proto3,oneof"` +} + +type AggregateRequest_Aggregation_Text_ struct { + Text *AggregateRequest_Aggregation_Text `protobuf:"bytes,4,opt,name=text,proto3,oneof"` +} + +type AggregateRequest_Aggregation_Boolean_ struct { + Boolean *AggregateRequest_Aggregation_Boolean `protobuf:"bytes,5,opt,name=boolean,proto3,oneof"` +} + +type AggregateRequest_Aggregation_Date_ struct { + Date *AggregateRequest_Aggregation_Date `protobuf:"bytes,6,opt,name=date,proto3,oneof"` +} + +type AggregateRequest_Aggregation_Reference_ struct { + Reference *AggregateRequest_Aggregation_Reference `protobuf:"bytes,7,opt,name=reference,proto3,oneof"` +} + +func (*AggregateRequest_Aggregation_Int) isAggregateRequest_Aggregation_Aggregation() {} + +func (*AggregateRequest_Aggregation_Number_) isAggregateRequest_Aggregation_Aggregation() {} + +func (*AggregateRequest_Aggregation_Text_) isAggregateRequest_Aggregation_Aggregation() {} + +func (*AggregateRequest_Aggregation_Boolean_) isAggregateRequest_Aggregation_Aggregation() {} + +func (*AggregateRequest_Aggregation_Date_) isAggregateRequest_Aggregation_Aggregation() {} + +func (*AggregateRequest_Aggregation_Reference_) isAggregateRequest_Aggregation_Aggregation() {} + +type AggregateRequest_GroupBy struct { + state protoimpl.MessageState `protogen:"open.v1"` + Collection string `protobuf:"bytes,1,opt,name=collection,proto3" json:"collection,omitempty"` + Property string `protobuf:"bytes,2,opt,name=property,proto3" json:"property,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateRequest_GroupBy) Reset() { + *x = AggregateRequest_GroupBy{} + mi := &file_v1_aggregate_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateRequest_GroupBy) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateRequest_GroupBy) ProtoMessage() {} + +func (x *AggregateRequest_GroupBy) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[3] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateRequest_GroupBy.ProtoReflect.Descriptor instead. +func (*AggregateRequest_GroupBy) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{0, 1} +} + +func (x *AggregateRequest_GroupBy) GetCollection() string { + if x != nil { + return x.Collection + } + return "" +} + +func (x *AggregateRequest_GroupBy) GetProperty() string { + if x != nil { + return x.Property + } + return "" +} + +type AggregateRequest_Aggregation_Integer struct { + state protoimpl.MessageState `protogen:"open.v1"` + Count bool `protobuf:"varint,1,opt,name=count,proto3" json:"count,omitempty"` + Type bool `protobuf:"varint,2,opt,name=type,proto3" json:"type,omitempty"` + Sum bool `protobuf:"varint,3,opt,name=sum,proto3" json:"sum,omitempty"` + Mean bool `protobuf:"varint,4,opt,name=mean,proto3" json:"mean,omitempty"` + Mode bool `protobuf:"varint,5,opt,name=mode,proto3" json:"mode,omitempty"` + Median bool `protobuf:"varint,6,opt,name=median,proto3" json:"median,omitempty"` + Maximum bool `protobuf:"varint,7,opt,name=maximum,proto3" json:"maximum,omitempty"` + Minimum bool `protobuf:"varint,8,opt,name=minimum,proto3" json:"minimum,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateRequest_Aggregation_Integer) Reset() { + *x = AggregateRequest_Aggregation_Integer{} + mi := &file_v1_aggregate_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateRequest_Aggregation_Integer) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateRequest_Aggregation_Integer) ProtoMessage() {} + +func (x *AggregateRequest_Aggregation_Integer) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[4] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateRequest_Aggregation_Integer.ProtoReflect.Descriptor instead. +func (*AggregateRequest_Aggregation_Integer) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{0, 0, 0} +} + +func (x *AggregateRequest_Aggregation_Integer) GetCount() bool { + if x != nil { + return x.Count + } + return false +} + +func (x *AggregateRequest_Aggregation_Integer) GetType() bool { + if x != nil { + return x.Type + } + return false +} + +func (x *AggregateRequest_Aggregation_Integer) GetSum() bool { + if x != nil { + return x.Sum + } + return false +} + +func (x *AggregateRequest_Aggregation_Integer) GetMean() bool { + if x != nil { + return x.Mean + } + return false +} + +func (x *AggregateRequest_Aggregation_Integer) GetMode() bool { + if x != nil { + return x.Mode + } + return false +} + +func (x *AggregateRequest_Aggregation_Integer) GetMedian() bool { + if x != nil { + return x.Median + } + return false +} + +func (x *AggregateRequest_Aggregation_Integer) GetMaximum() bool { + if x != nil { + return x.Maximum + } + return false +} + +func (x *AggregateRequest_Aggregation_Integer) GetMinimum() bool { + if x != nil { + return x.Minimum + } + return false +} + +type AggregateRequest_Aggregation_Number struct { + state protoimpl.MessageState `protogen:"open.v1"` + Count bool `protobuf:"varint,1,opt,name=count,proto3" json:"count,omitempty"` + Type bool `protobuf:"varint,2,opt,name=type,proto3" json:"type,omitempty"` + Sum bool `protobuf:"varint,3,opt,name=sum,proto3" json:"sum,omitempty"` + Mean bool `protobuf:"varint,4,opt,name=mean,proto3" json:"mean,omitempty"` + Mode bool `protobuf:"varint,5,opt,name=mode,proto3" json:"mode,omitempty"` + Median bool `protobuf:"varint,6,opt,name=median,proto3" json:"median,omitempty"` + Maximum bool `protobuf:"varint,7,opt,name=maximum,proto3" json:"maximum,omitempty"` + Minimum bool `protobuf:"varint,8,opt,name=minimum,proto3" json:"minimum,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateRequest_Aggregation_Number) Reset() { + *x = AggregateRequest_Aggregation_Number{} + mi := &file_v1_aggregate_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateRequest_Aggregation_Number) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateRequest_Aggregation_Number) ProtoMessage() {} + +func (x *AggregateRequest_Aggregation_Number) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[5] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateRequest_Aggregation_Number.ProtoReflect.Descriptor instead. +func (*AggregateRequest_Aggregation_Number) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{0, 0, 1} +} + +func (x *AggregateRequest_Aggregation_Number) GetCount() bool { + if x != nil { + return x.Count + } + return false +} + +func (x *AggregateRequest_Aggregation_Number) GetType() bool { + if x != nil { + return x.Type + } + return false +} + +func (x *AggregateRequest_Aggregation_Number) GetSum() bool { + if x != nil { + return x.Sum + } + return false +} + +func (x *AggregateRequest_Aggregation_Number) GetMean() bool { + if x != nil { + return x.Mean + } + return false +} + +func (x *AggregateRequest_Aggregation_Number) GetMode() bool { + if x != nil { + return x.Mode + } + return false +} + +func (x *AggregateRequest_Aggregation_Number) GetMedian() bool { + if x != nil { + return x.Median + } + return false +} + +func (x *AggregateRequest_Aggregation_Number) GetMaximum() bool { + if x != nil { + return x.Maximum + } + return false +} + +func (x *AggregateRequest_Aggregation_Number) GetMinimum() bool { + if x != nil { + return x.Minimum + } + return false +} + +type AggregateRequest_Aggregation_Text struct { + state protoimpl.MessageState `protogen:"open.v1"` + Count bool `protobuf:"varint,1,opt,name=count,proto3" json:"count,omitempty"` + Type bool `protobuf:"varint,2,opt,name=type,proto3" json:"type,omitempty"` + TopOccurences bool `protobuf:"varint,3,opt,name=top_occurences,json=topOccurences,proto3" json:"top_occurences,omitempty"` + TopOccurencesLimit *uint32 `protobuf:"varint,4,opt,name=top_occurences_limit,json=topOccurencesLimit,proto3,oneof" json:"top_occurences_limit,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateRequest_Aggregation_Text) Reset() { + *x = AggregateRequest_Aggregation_Text{} + mi := &file_v1_aggregate_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateRequest_Aggregation_Text) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateRequest_Aggregation_Text) ProtoMessage() {} + +func (x *AggregateRequest_Aggregation_Text) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[6] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateRequest_Aggregation_Text.ProtoReflect.Descriptor instead. +func (*AggregateRequest_Aggregation_Text) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{0, 0, 2} +} + +func (x *AggregateRequest_Aggregation_Text) GetCount() bool { + if x != nil { + return x.Count + } + return false +} + +func (x *AggregateRequest_Aggregation_Text) GetType() bool { + if x != nil { + return x.Type + } + return false +} + +func (x *AggregateRequest_Aggregation_Text) GetTopOccurences() bool { + if x != nil { + return x.TopOccurences + } + return false +} + +func (x *AggregateRequest_Aggregation_Text) GetTopOccurencesLimit() uint32 { + if x != nil && x.TopOccurencesLimit != nil { + return *x.TopOccurencesLimit + } + return 0 +} + +type AggregateRequest_Aggregation_Boolean struct { + state protoimpl.MessageState `protogen:"open.v1"` + Count bool `protobuf:"varint,1,opt,name=count,proto3" json:"count,omitempty"` + Type bool `protobuf:"varint,2,opt,name=type,proto3" json:"type,omitempty"` + TotalTrue bool `protobuf:"varint,3,opt,name=total_true,json=totalTrue,proto3" json:"total_true,omitempty"` + TotalFalse bool `protobuf:"varint,4,opt,name=total_false,json=totalFalse,proto3" json:"total_false,omitempty"` + PercentageTrue bool `protobuf:"varint,5,opt,name=percentage_true,json=percentageTrue,proto3" json:"percentage_true,omitempty"` + PercentageFalse bool `protobuf:"varint,6,opt,name=percentage_false,json=percentageFalse,proto3" json:"percentage_false,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateRequest_Aggregation_Boolean) Reset() { + *x = AggregateRequest_Aggregation_Boolean{} + mi := &file_v1_aggregate_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateRequest_Aggregation_Boolean) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateRequest_Aggregation_Boolean) ProtoMessage() {} + +func (x *AggregateRequest_Aggregation_Boolean) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[7] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateRequest_Aggregation_Boolean.ProtoReflect.Descriptor instead. +func (*AggregateRequest_Aggregation_Boolean) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{0, 0, 3} +} + +func (x *AggregateRequest_Aggregation_Boolean) GetCount() bool { + if x != nil { + return x.Count + } + return false +} + +func (x *AggregateRequest_Aggregation_Boolean) GetType() bool { + if x != nil { + return x.Type + } + return false +} + +func (x *AggregateRequest_Aggregation_Boolean) GetTotalTrue() bool { + if x != nil { + return x.TotalTrue + } + return false +} + +func (x *AggregateRequest_Aggregation_Boolean) GetTotalFalse() bool { + if x != nil { + return x.TotalFalse + } + return false +} + +func (x *AggregateRequest_Aggregation_Boolean) GetPercentageTrue() bool { + if x != nil { + return x.PercentageTrue + } + return false +} + +func (x *AggregateRequest_Aggregation_Boolean) GetPercentageFalse() bool { + if x != nil { + return x.PercentageFalse + } + return false +} + +type AggregateRequest_Aggregation_Date struct { + state protoimpl.MessageState `protogen:"open.v1"` + Count bool `protobuf:"varint,1,opt,name=count,proto3" json:"count,omitempty"` + Type bool `protobuf:"varint,2,opt,name=type,proto3" json:"type,omitempty"` + Median bool `protobuf:"varint,3,opt,name=median,proto3" json:"median,omitempty"` + Mode bool `protobuf:"varint,4,opt,name=mode,proto3" json:"mode,omitempty"` + Maximum bool `protobuf:"varint,5,opt,name=maximum,proto3" json:"maximum,omitempty"` + Minimum bool `protobuf:"varint,6,opt,name=minimum,proto3" json:"minimum,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateRequest_Aggregation_Date) Reset() { + *x = AggregateRequest_Aggregation_Date{} + mi := &file_v1_aggregate_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateRequest_Aggregation_Date) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateRequest_Aggregation_Date) ProtoMessage() {} + +func (x *AggregateRequest_Aggregation_Date) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[8] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateRequest_Aggregation_Date.ProtoReflect.Descriptor instead. +func (*AggregateRequest_Aggregation_Date) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{0, 0, 4} +} + +func (x *AggregateRequest_Aggregation_Date) GetCount() bool { + if x != nil { + return x.Count + } + return false +} + +func (x *AggregateRequest_Aggregation_Date) GetType() bool { + if x != nil { + return x.Type + } + return false +} + +func (x *AggregateRequest_Aggregation_Date) GetMedian() bool { + if x != nil { + return x.Median + } + return false +} + +func (x *AggregateRequest_Aggregation_Date) GetMode() bool { + if x != nil { + return x.Mode + } + return false +} + +func (x *AggregateRequest_Aggregation_Date) GetMaximum() bool { + if x != nil { + return x.Maximum + } + return false +} + +func (x *AggregateRequest_Aggregation_Date) GetMinimum() bool { + if x != nil { + return x.Minimum + } + return false +} + +type AggregateRequest_Aggregation_Reference struct { + state protoimpl.MessageState `protogen:"open.v1"` + Type bool `protobuf:"varint,1,opt,name=type,proto3" json:"type,omitempty"` + PointingTo bool `protobuf:"varint,2,opt,name=pointing_to,json=pointingTo,proto3" json:"pointing_to,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateRequest_Aggregation_Reference) Reset() { + *x = AggregateRequest_Aggregation_Reference{} + mi := &file_v1_aggregate_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateRequest_Aggregation_Reference) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateRequest_Aggregation_Reference) ProtoMessage() {} + +func (x *AggregateRequest_Aggregation_Reference) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[9] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateRequest_Aggregation_Reference.ProtoReflect.Descriptor instead. +func (*AggregateRequest_Aggregation_Reference) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{0, 0, 5} +} + +func (x *AggregateRequest_Aggregation_Reference) GetType() bool { + if x != nil { + return x.Type + } + return false +} + +func (x *AggregateRequest_Aggregation_Reference) GetPointingTo() bool { + if x != nil { + return x.PointingTo + } + return false +} + +type AggregateReply_Aggregations struct { + state protoimpl.MessageState `protogen:"open.v1"` + Aggregations []*AggregateReply_Aggregations_Aggregation `protobuf:"bytes,1,rep,name=aggregations,proto3" json:"aggregations,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateReply_Aggregations) Reset() { + *x = AggregateReply_Aggregations{} + mi := &file_v1_aggregate_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateReply_Aggregations) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateReply_Aggregations) ProtoMessage() {} + +func (x *AggregateReply_Aggregations) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[10] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateReply_Aggregations.ProtoReflect.Descriptor instead. +func (*AggregateReply_Aggregations) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{1, 0} +} + +func (x *AggregateReply_Aggregations) GetAggregations() []*AggregateReply_Aggregations_Aggregation { + if x != nil { + return x.Aggregations + } + return nil +} + +type AggregateReply_Single struct { + state protoimpl.MessageState `protogen:"open.v1"` + ObjectsCount *int64 `protobuf:"varint,1,opt,name=objects_count,json=objectsCount,proto3,oneof" json:"objects_count,omitempty"` + Aggregations *AggregateReply_Aggregations `protobuf:"bytes,2,opt,name=aggregations,proto3,oneof" json:"aggregations,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateReply_Single) Reset() { + *x = AggregateReply_Single{} + mi := &file_v1_aggregate_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateReply_Single) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateReply_Single) ProtoMessage() {} + +func (x *AggregateReply_Single) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[11] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateReply_Single.ProtoReflect.Descriptor instead. +func (*AggregateReply_Single) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{1, 1} +} + +func (x *AggregateReply_Single) GetObjectsCount() int64 { + if x != nil && x.ObjectsCount != nil { + return *x.ObjectsCount + } + return 0 +} + +func (x *AggregateReply_Single) GetAggregations() *AggregateReply_Aggregations { + if x != nil { + return x.Aggregations + } + return nil +} + +type AggregateReply_Group struct { + state protoimpl.MessageState `protogen:"open.v1"` + ObjectsCount *int64 `protobuf:"varint,1,opt,name=objects_count,json=objectsCount,proto3,oneof" json:"objects_count,omitempty"` + Aggregations *AggregateReply_Aggregations `protobuf:"bytes,2,opt,name=aggregations,proto3,oneof" json:"aggregations,omitempty"` + GroupedBy *AggregateReply_Group_GroupedBy `protobuf:"bytes,3,opt,name=grouped_by,json=groupedBy,proto3,oneof" json:"grouped_by,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateReply_Group) Reset() { + *x = AggregateReply_Group{} + mi := &file_v1_aggregate_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateReply_Group) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateReply_Group) ProtoMessage() {} + +func (x *AggregateReply_Group) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[12] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateReply_Group.ProtoReflect.Descriptor instead. +func (*AggregateReply_Group) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{1, 2} +} + +func (x *AggregateReply_Group) GetObjectsCount() int64 { + if x != nil && x.ObjectsCount != nil { + return *x.ObjectsCount + } + return 0 +} + +func (x *AggregateReply_Group) GetAggregations() *AggregateReply_Aggregations { + if x != nil { + return x.Aggregations + } + return nil +} + +func (x *AggregateReply_Group) GetGroupedBy() *AggregateReply_Group_GroupedBy { + if x != nil { + return x.GroupedBy + } + return nil +} + +type AggregateReply_Grouped struct { + state protoimpl.MessageState `protogen:"open.v1"` + Groups []*AggregateReply_Group `protobuf:"bytes,1,rep,name=groups,proto3" json:"groups,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateReply_Grouped) Reset() { + *x = AggregateReply_Grouped{} + mi := &file_v1_aggregate_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateReply_Grouped) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateReply_Grouped) ProtoMessage() {} + +func (x *AggregateReply_Grouped) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[13] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateReply_Grouped.ProtoReflect.Descriptor instead. +func (*AggregateReply_Grouped) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{1, 3} +} + +func (x *AggregateReply_Grouped) GetGroups() []*AggregateReply_Group { + if x != nil { + return x.Groups + } + return nil +} + +type AggregateReply_Aggregations_Aggregation struct { + state protoimpl.MessageState `protogen:"open.v1"` + Property string `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // Types that are valid to be assigned to Aggregation: + // + // *AggregateReply_Aggregations_Aggregation_Int + // *AggregateReply_Aggregations_Aggregation_Number_ + // *AggregateReply_Aggregations_Aggregation_Text_ + // *AggregateReply_Aggregations_Aggregation_Boolean_ + // *AggregateReply_Aggregations_Aggregation_Date_ + // *AggregateReply_Aggregations_Aggregation_Reference_ + Aggregation isAggregateReply_Aggregations_Aggregation_Aggregation `protobuf_oneof:"aggregation"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateReply_Aggregations_Aggregation) Reset() { + *x = AggregateReply_Aggregations_Aggregation{} + mi := &file_v1_aggregate_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateReply_Aggregations_Aggregation) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateReply_Aggregations_Aggregation) ProtoMessage() {} + +func (x *AggregateReply_Aggregations_Aggregation) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[14] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateReply_Aggregations_Aggregation.ProtoReflect.Descriptor instead. +func (*AggregateReply_Aggregations_Aggregation) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{1, 0, 0} +} + +func (x *AggregateReply_Aggregations_Aggregation) GetProperty() string { + if x != nil { + return x.Property + } + return "" +} + +func (x *AggregateReply_Aggregations_Aggregation) GetAggregation() isAggregateReply_Aggregations_Aggregation_Aggregation { + if x != nil { + return x.Aggregation + } + return nil +} + +func (x *AggregateReply_Aggregations_Aggregation) GetInt() *AggregateReply_Aggregations_Aggregation_Integer { + if x != nil { + if x, ok := x.Aggregation.(*AggregateReply_Aggregations_Aggregation_Int); ok { + return x.Int + } + } + return nil +} + +func (x *AggregateReply_Aggregations_Aggregation) GetNumber() *AggregateReply_Aggregations_Aggregation_Number { + if x != nil { + if x, ok := x.Aggregation.(*AggregateReply_Aggregations_Aggregation_Number_); ok { + return x.Number + } + } + return nil +} + +func (x *AggregateReply_Aggregations_Aggregation) GetText() *AggregateReply_Aggregations_Aggregation_Text { + if x != nil { + if x, ok := x.Aggregation.(*AggregateReply_Aggregations_Aggregation_Text_); ok { + return x.Text + } + } + return nil +} + +func (x *AggregateReply_Aggregations_Aggregation) GetBoolean() *AggregateReply_Aggregations_Aggregation_Boolean { + if x != nil { + if x, ok := x.Aggregation.(*AggregateReply_Aggregations_Aggregation_Boolean_); ok { + return x.Boolean + } + } + return nil +} + +func (x *AggregateReply_Aggregations_Aggregation) GetDate() *AggregateReply_Aggregations_Aggregation_Date { + if x != nil { + if x, ok := x.Aggregation.(*AggregateReply_Aggregations_Aggregation_Date_); ok { + return x.Date + } + } + return nil +} + +func (x *AggregateReply_Aggregations_Aggregation) GetReference() *AggregateReply_Aggregations_Aggregation_Reference { + if x != nil { + if x, ok := x.Aggregation.(*AggregateReply_Aggregations_Aggregation_Reference_); ok { + return x.Reference + } + } + return nil +} + +type isAggregateReply_Aggregations_Aggregation_Aggregation interface { + isAggregateReply_Aggregations_Aggregation_Aggregation() +} + +type AggregateReply_Aggregations_Aggregation_Int struct { + Int *AggregateReply_Aggregations_Aggregation_Integer `protobuf:"bytes,2,opt,name=int,proto3,oneof"` +} + +type AggregateReply_Aggregations_Aggregation_Number_ struct { + Number *AggregateReply_Aggregations_Aggregation_Number `protobuf:"bytes,3,opt,name=number,proto3,oneof"` +} + +type AggregateReply_Aggregations_Aggregation_Text_ struct { + Text *AggregateReply_Aggregations_Aggregation_Text `protobuf:"bytes,4,opt,name=text,proto3,oneof"` +} + +type AggregateReply_Aggregations_Aggregation_Boolean_ struct { + Boolean *AggregateReply_Aggregations_Aggregation_Boolean `protobuf:"bytes,5,opt,name=boolean,proto3,oneof"` +} + +type AggregateReply_Aggregations_Aggregation_Date_ struct { + Date *AggregateReply_Aggregations_Aggregation_Date `protobuf:"bytes,6,opt,name=date,proto3,oneof"` +} + +type AggregateReply_Aggregations_Aggregation_Reference_ struct { + Reference *AggregateReply_Aggregations_Aggregation_Reference `protobuf:"bytes,7,opt,name=reference,proto3,oneof"` +} + +func (*AggregateReply_Aggregations_Aggregation_Int) isAggregateReply_Aggregations_Aggregation_Aggregation() { +} + +func (*AggregateReply_Aggregations_Aggregation_Number_) isAggregateReply_Aggregations_Aggregation_Aggregation() { +} + +func (*AggregateReply_Aggregations_Aggregation_Text_) isAggregateReply_Aggregations_Aggregation_Aggregation() { +} + +func (*AggregateReply_Aggregations_Aggregation_Boolean_) isAggregateReply_Aggregations_Aggregation_Aggregation() { +} + +func (*AggregateReply_Aggregations_Aggregation_Date_) isAggregateReply_Aggregations_Aggregation_Aggregation() { +} + +func (*AggregateReply_Aggregations_Aggregation_Reference_) isAggregateReply_Aggregations_Aggregation_Aggregation() { +} + +type AggregateReply_Aggregations_Aggregation_Integer struct { + state protoimpl.MessageState `protogen:"open.v1"` + Count *int64 `protobuf:"varint,1,opt,name=count,proto3,oneof" json:"count,omitempty"` + Type *string `protobuf:"bytes,2,opt,name=type,proto3,oneof" json:"type,omitempty"` + Mean *float64 `protobuf:"fixed64,3,opt,name=mean,proto3,oneof" json:"mean,omitempty"` + Median *float64 `protobuf:"fixed64,4,opt,name=median,proto3,oneof" json:"median,omitempty"` + Mode *int64 `protobuf:"varint,5,opt,name=mode,proto3,oneof" json:"mode,omitempty"` + Maximum *int64 `protobuf:"varint,6,opt,name=maximum,proto3,oneof" json:"maximum,omitempty"` + Minimum *int64 `protobuf:"varint,7,opt,name=minimum,proto3,oneof" json:"minimum,omitempty"` + Sum *int64 `protobuf:"varint,8,opt,name=sum,proto3,oneof" json:"sum,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateReply_Aggregations_Aggregation_Integer) Reset() { + *x = AggregateReply_Aggregations_Aggregation_Integer{} + mi := &file_v1_aggregate_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateReply_Aggregations_Aggregation_Integer) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateReply_Aggregations_Aggregation_Integer) ProtoMessage() {} + +func (x *AggregateReply_Aggregations_Aggregation_Integer) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[15] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateReply_Aggregations_Aggregation_Integer.ProtoReflect.Descriptor instead. +func (*AggregateReply_Aggregations_Aggregation_Integer) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{1, 0, 0, 0} +} + +func (x *AggregateReply_Aggregations_Aggregation_Integer) GetCount() int64 { + if x != nil && x.Count != nil { + return *x.Count + } + return 0 +} + +func (x *AggregateReply_Aggregations_Aggregation_Integer) GetType() string { + if x != nil && x.Type != nil { + return *x.Type + } + return "" +} + +func (x *AggregateReply_Aggregations_Aggregation_Integer) GetMean() float64 { + if x != nil && x.Mean != nil { + return *x.Mean + } + return 0 +} + +func (x *AggregateReply_Aggregations_Aggregation_Integer) GetMedian() float64 { + if x != nil && x.Median != nil { + return *x.Median + } + return 0 +} + +func (x *AggregateReply_Aggregations_Aggregation_Integer) GetMode() int64 { + if x != nil && x.Mode != nil { + return *x.Mode + } + return 0 +} + +func (x *AggregateReply_Aggregations_Aggregation_Integer) GetMaximum() int64 { + if x != nil && x.Maximum != nil { + return *x.Maximum + } + return 0 +} + +func (x *AggregateReply_Aggregations_Aggregation_Integer) GetMinimum() int64 { + if x != nil && x.Minimum != nil { + return *x.Minimum + } + return 0 +} + +func (x *AggregateReply_Aggregations_Aggregation_Integer) GetSum() int64 { + if x != nil && x.Sum != nil { + return *x.Sum + } + return 0 +} + +type AggregateReply_Aggregations_Aggregation_Number struct { + state protoimpl.MessageState `protogen:"open.v1"` + Count *int64 `protobuf:"varint,1,opt,name=count,proto3,oneof" json:"count,omitempty"` + Type *string `protobuf:"bytes,2,opt,name=type,proto3,oneof" json:"type,omitempty"` + Mean *float64 `protobuf:"fixed64,3,opt,name=mean,proto3,oneof" json:"mean,omitempty"` + Median *float64 `protobuf:"fixed64,4,opt,name=median,proto3,oneof" json:"median,omitempty"` + Mode *float64 `protobuf:"fixed64,5,opt,name=mode,proto3,oneof" json:"mode,omitempty"` + Maximum *float64 `protobuf:"fixed64,6,opt,name=maximum,proto3,oneof" json:"maximum,omitempty"` + Minimum *float64 `protobuf:"fixed64,7,opt,name=minimum,proto3,oneof" json:"minimum,omitempty"` + Sum *float64 `protobuf:"fixed64,8,opt,name=sum,proto3,oneof" json:"sum,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateReply_Aggregations_Aggregation_Number) Reset() { + *x = AggregateReply_Aggregations_Aggregation_Number{} + mi := &file_v1_aggregate_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateReply_Aggregations_Aggregation_Number) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateReply_Aggregations_Aggregation_Number) ProtoMessage() {} + +func (x *AggregateReply_Aggregations_Aggregation_Number) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[16] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateReply_Aggregations_Aggregation_Number.ProtoReflect.Descriptor instead. +func (*AggregateReply_Aggregations_Aggregation_Number) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{1, 0, 0, 1} +} + +func (x *AggregateReply_Aggregations_Aggregation_Number) GetCount() int64 { + if x != nil && x.Count != nil { + return *x.Count + } + return 0 +} + +func (x *AggregateReply_Aggregations_Aggregation_Number) GetType() string { + if x != nil && x.Type != nil { + return *x.Type + } + return "" +} + +func (x *AggregateReply_Aggregations_Aggregation_Number) GetMean() float64 { + if x != nil && x.Mean != nil { + return *x.Mean + } + return 0 +} + +func (x *AggregateReply_Aggregations_Aggregation_Number) GetMedian() float64 { + if x != nil && x.Median != nil { + return *x.Median + } + return 0 +} + +func (x *AggregateReply_Aggregations_Aggregation_Number) GetMode() float64 { + if x != nil && x.Mode != nil { + return *x.Mode + } + return 0 +} + +func (x *AggregateReply_Aggregations_Aggregation_Number) GetMaximum() float64 { + if x != nil && x.Maximum != nil { + return *x.Maximum + } + return 0 +} + +func (x *AggregateReply_Aggregations_Aggregation_Number) GetMinimum() float64 { + if x != nil && x.Minimum != nil { + return *x.Minimum + } + return 0 +} + +func (x *AggregateReply_Aggregations_Aggregation_Number) GetSum() float64 { + if x != nil && x.Sum != nil { + return *x.Sum + } + return 0 +} + +type AggregateReply_Aggregations_Aggregation_Text struct { + state protoimpl.MessageState `protogen:"open.v1"` + Count *int64 `protobuf:"varint,1,opt,name=count,proto3,oneof" json:"count,omitempty"` + Type *string `protobuf:"bytes,2,opt,name=type,proto3,oneof" json:"type,omitempty"` + TopOccurences *AggregateReply_Aggregations_Aggregation_Text_TopOccurrences `protobuf:"bytes,3,opt,name=top_occurences,json=topOccurences,proto3,oneof" json:"top_occurences,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateReply_Aggregations_Aggregation_Text) Reset() { + *x = AggregateReply_Aggregations_Aggregation_Text{} + mi := &file_v1_aggregate_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateReply_Aggregations_Aggregation_Text) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateReply_Aggregations_Aggregation_Text) ProtoMessage() {} + +func (x *AggregateReply_Aggregations_Aggregation_Text) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[17] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateReply_Aggregations_Aggregation_Text.ProtoReflect.Descriptor instead. +func (*AggregateReply_Aggregations_Aggregation_Text) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{1, 0, 0, 2} +} + +func (x *AggregateReply_Aggregations_Aggregation_Text) GetCount() int64 { + if x != nil && x.Count != nil { + return *x.Count + } + return 0 +} + +func (x *AggregateReply_Aggregations_Aggregation_Text) GetType() string { + if x != nil && x.Type != nil { + return *x.Type + } + return "" +} + +func (x *AggregateReply_Aggregations_Aggregation_Text) GetTopOccurences() *AggregateReply_Aggregations_Aggregation_Text_TopOccurrences { + if x != nil { + return x.TopOccurences + } + return nil +} + +type AggregateReply_Aggregations_Aggregation_Boolean struct { + state protoimpl.MessageState `protogen:"open.v1"` + Count *int64 `protobuf:"varint,1,opt,name=count,proto3,oneof" json:"count,omitempty"` + Type *string `protobuf:"bytes,2,opt,name=type,proto3,oneof" json:"type,omitempty"` + TotalTrue *int64 `protobuf:"varint,3,opt,name=total_true,json=totalTrue,proto3,oneof" json:"total_true,omitempty"` + TotalFalse *int64 `protobuf:"varint,4,opt,name=total_false,json=totalFalse,proto3,oneof" json:"total_false,omitempty"` + PercentageTrue *float64 `protobuf:"fixed64,5,opt,name=percentage_true,json=percentageTrue,proto3,oneof" json:"percentage_true,omitempty"` + PercentageFalse *float64 `protobuf:"fixed64,6,opt,name=percentage_false,json=percentageFalse,proto3,oneof" json:"percentage_false,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateReply_Aggregations_Aggregation_Boolean) Reset() { + *x = AggregateReply_Aggregations_Aggregation_Boolean{} + mi := &file_v1_aggregate_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateReply_Aggregations_Aggregation_Boolean) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateReply_Aggregations_Aggregation_Boolean) ProtoMessage() {} + +func (x *AggregateReply_Aggregations_Aggregation_Boolean) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[18] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateReply_Aggregations_Aggregation_Boolean.ProtoReflect.Descriptor instead. +func (*AggregateReply_Aggregations_Aggregation_Boolean) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{1, 0, 0, 3} +} + +func (x *AggregateReply_Aggregations_Aggregation_Boolean) GetCount() int64 { + if x != nil && x.Count != nil { + return *x.Count + } + return 0 +} + +func (x *AggregateReply_Aggregations_Aggregation_Boolean) GetType() string { + if x != nil && x.Type != nil { + return *x.Type + } + return "" +} + +func (x *AggregateReply_Aggregations_Aggregation_Boolean) GetTotalTrue() int64 { + if x != nil && x.TotalTrue != nil { + return *x.TotalTrue + } + return 0 +} + +func (x *AggregateReply_Aggregations_Aggregation_Boolean) GetTotalFalse() int64 { + if x != nil && x.TotalFalse != nil { + return *x.TotalFalse + } + return 0 +} + +func (x *AggregateReply_Aggregations_Aggregation_Boolean) GetPercentageTrue() float64 { + if x != nil && x.PercentageTrue != nil { + return *x.PercentageTrue + } + return 0 +} + +func (x *AggregateReply_Aggregations_Aggregation_Boolean) GetPercentageFalse() float64 { + if x != nil && x.PercentageFalse != nil { + return *x.PercentageFalse + } + return 0 +} + +type AggregateReply_Aggregations_Aggregation_Date struct { + state protoimpl.MessageState `protogen:"open.v1"` + Count *int64 `protobuf:"varint,1,opt,name=count,proto3,oneof" json:"count,omitempty"` + Type *string `protobuf:"bytes,2,opt,name=type,proto3,oneof" json:"type,omitempty"` + Median *string `protobuf:"bytes,3,opt,name=median,proto3,oneof" json:"median,omitempty"` + Mode *string `protobuf:"bytes,4,opt,name=mode,proto3,oneof" json:"mode,omitempty"` + Maximum *string `protobuf:"bytes,5,opt,name=maximum,proto3,oneof" json:"maximum,omitempty"` + Minimum *string `protobuf:"bytes,6,opt,name=minimum,proto3,oneof" json:"minimum,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateReply_Aggregations_Aggregation_Date) Reset() { + *x = AggregateReply_Aggregations_Aggregation_Date{} + mi := &file_v1_aggregate_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateReply_Aggregations_Aggregation_Date) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateReply_Aggregations_Aggregation_Date) ProtoMessage() {} + +func (x *AggregateReply_Aggregations_Aggregation_Date) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[19] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateReply_Aggregations_Aggregation_Date.ProtoReflect.Descriptor instead. +func (*AggregateReply_Aggregations_Aggregation_Date) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{1, 0, 0, 4} +} + +func (x *AggregateReply_Aggregations_Aggregation_Date) GetCount() int64 { + if x != nil && x.Count != nil { + return *x.Count + } + return 0 +} + +func (x *AggregateReply_Aggregations_Aggregation_Date) GetType() string { + if x != nil && x.Type != nil { + return *x.Type + } + return "" +} + +func (x *AggregateReply_Aggregations_Aggregation_Date) GetMedian() string { + if x != nil && x.Median != nil { + return *x.Median + } + return "" +} + +func (x *AggregateReply_Aggregations_Aggregation_Date) GetMode() string { + if x != nil && x.Mode != nil { + return *x.Mode + } + return "" +} + +func (x *AggregateReply_Aggregations_Aggregation_Date) GetMaximum() string { + if x != nil && x.Maximum != nil { + return *x.Maximum + } + return "" +} + +func (x *AggregateReply_Aggregations_Aggregation_Date) GetMinimum() string { + if x != nil && x.Minimum != nil { + return *x.Minimum + } + return "" +} + +type AggregateReply_Aggregations_Aggregation_Reference struct { + state protoimpl.MessageState `protogen:"open.v1"` + Type *string `protobuf:"bytes,1,opt,name=type,proto3,oneof" json:"type,omitempty"` + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + PointingTo []string `protobuf:"bytes,2,rep,name=pointing_to,json=pointingTo,proto3" json:"pointing_to,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateReply_Aggregations_Aggregation_Reference) Reset() { + *x = AggregateReply_Aggregations_Aggregation_Reference{} + mi := &file_v1_aggregate_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateReply_Aggregations_Aggregation_Reference) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateReply_Aggregations_Aggregation_Reference) ProtoMessage() {} + +func (x *AggregateReply_Aggregations_Aggregation_Reference) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[20] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateReply_Aggregations_Aggregation_Reference.ProtoReflect.Descriptor instead. +func (*AggregateReply_Aggregations_Aggregation_Reference) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{1, 0, 0, 5} +} + +func (x *AggregateReply_Aggregations_Aggregation_Reference) GetType() string { + if x != nil && x.Type != nil { + return *x.Type + } + return "" +} + +func (x *AggregateReply_Aggregations_Aggregation_Reference) GetPointingTo() []string { + if x != nil { + return x.PointingTo + } + return nil +} + +type AggregateReply_Aggregations_Aggregation_Text_TopOccurrences struct { + state protoimpl.MessageState `protogen:"open.v1"` + Items []*AggregateReply_Aggregations_Aggregation_Text_TopOccurrences_TopOccurrence `protobuf:"bytes,1,rep,name=items,proto3" json:"items,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateReply_Aggregations_Aggregation_Text_TopOccurrences) Reset() { + *x = AggregateReply_Aggregations_Aggregation_Text_TopOccurrences{} + mi := &file_v1_aggregate_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateReply_Aggregations_Aggregation_Text_TopOccurrences) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateReply_Aggregations_Aggregation_Text_TopOccurrences) ProtoMessage() {} + +func (x *AggregateReply_Aggregations_Aggregation_Text_TopOccurrences) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[21] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateReply_Aggregations_Aggregation_Text_TopOccurrences.ProtoReflect.Descriptor instead. +func (*AggregateReply_Aggregations_Aggregation_Text_TopOccurrences) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{1, 0, 0, 2, 0} +} + +func (x *AggregateReply_Aggregations_Aggregation_Text_TopOccurrences) GetItems() []*AggregateReply_Aggregations_Aggregation_Text_TopOccurrences_TopOccurrence { + if x != nil { + return x.Items + } + return nil +} + +type AggregateReply_Aggregations_Aggregation_Text_TopOccurrences_TopOccurrence struct { + state protoimpl.MessageState `protogen:"open.v1"` + Value string `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` + Occurs int64 `protobuf:"varint,2,opt,name=occurs,proto3" json:"occurs,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateReply_Aggregations_Aggregation_Text_TopOccurrences_TopOccurrence) Reset() { + *x = AggregateReply_Aggregations_Aggregation_Text_TopOccurrences_TopOccurrence{} + mi := &file_v1_aggregate_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateReply_Aggregations_Aggregation_Text_TopOccurrences_TopOccurrence) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateReply_Aggregations_Aggregation_Text_TopOccurrences_TopOccurrence) ProtoMessage() {} + +func (x *AggregateReply_Aggregations_Aggregation_Text_TopOccurrences_TopOccurrence) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[22] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateReply_Aggregations_Aggregation_Text_TopOccurrences_TopOccurrence.ProtoReflect.Descriptor instead. +func (*AggregateReply_Aggregations_Aggregation_Text_TopOccurrences_TopOccurrence) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{1, 0, 0, 2, 0, 0} +} + +func (x *AggregateReply_Aggregations_Aggregation_Text_TopOccurrences_TopOccurrence) GetValue() string { + if x != nil { + return x.Value + } + return "" +} + +func (x *AggregateReply_Aggregations_Aggregation_Text_TopOccurrences_TopOccurrence) GetOccurs() int64 { + if x != nil { + return x.Occurs + } + return 0 +} + +type AggregateReply_Group_GroupedBy struct { + state protoimpl.MessageState `protogen:"open.v1"` + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + Path []string `protobuf:"bytes,1,rep,name=path,proto3" json:"path,omitempty"` + // Types that are valid to be assigned to Value: + // + // *AggregateReply_Group_GroupedBy_Text + // *AggregateReply_Group_GroupedBy_Int + // *AggregateReply_Group_GroupedBy_Boolean + // *AggregateReply_Group_GroupedBy_Number + // *AggregateReply_Group_GroupedBy_Texts + // *AggregateReply_Group_GroupedBy_Ints + // *AggregateReply_Group_GroupedBy_Booleans + // *AggregateReply_Group_GroupedBy_Numbers + // *AggregateReply_Group_GroupedBy_Geo + Value isAggregateReply_Group_GroupedBy_Value `protobuf_oneof:"value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *AggregateReply_Group_GroupedBy) Reset() { + *x = AggregateReply_Group_GroupedBy{} + mi := &file_v1_aggregate_proto_msgTypes[23] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AggregateReply_Group_GroupedBy) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AggregateReply_Group_GroupedBy) ProtoMessage() {} + +func (x *AggregateReply_Group_GroupedBy) ProtoReflect() protoreflect.Message { + mi := &file_v1_aggregate_proto_msgTypes[23] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AggregateReply_Group_GroupedBy.ProtoReflect.Descriptor instead. +func (*AggregateReply_Group_GroupedBy) Descriptor() ([]byte, []int) { + return file_v1_aggregate_proto_rawDescGZIP(), []int{1, 2, 0} +} + +func (x *AggregateReply_Group_GroupedBy) GetPath() []string { + if x != nil { + return x.Path + } + return nil +} + +func (x *AggregateReply_Group_GroupedBy) GetValue() isAggregateReply_Group_GroupedBy_Value { + if x != nil { + return x.Value + } + return nil +} + +func (x *AggregateReply_Group_GroupedBy) GetText() string { + if x != nil { + if x, ok := x.Value.(*AggregateReply_Group_GroupedBy_Text); ok { + return x.Text + } + } + return "" +} + +func (x *AggregateReply_Group_GroupedBy) GetInt() int64 { + if x != nil { + if x, ok := x.Value.(*AggregateReply_Group_GroupedBy_Int); ok { + return x.Int + } + } + return 0 +} + +func (x *AggregateReply_Group_GroupedBy) GetBoolean() bool { + if x != nil { + if x, ok := x.Value.(*AggregateReply_Group_GroupedBy_Boolean); ok { + return x.Boolean + } + } + return false +} + +func (x *AggregateReply_Group_GroupedBy) GetNumber() float64 { + if x != nil { + if x, ok := x.Value.(*AggregateReply_Group_GroupedBy_Number); ok { + return x.Number + } + } + return 0 +} + +func (x *AggregateReply_Group_GroupedBy) GetTexts() *TextArray { + if x != nil { + if x, ok := x.Value.(*AggregateReply_Group_GroupedBy_Texts); ok { + return x.Texts + } + } + return nil +} + +func (x *AggregateReply_Group_GroupedBy) GetInts() *IntArray { + if x != nil { + if x, ok := x.Value.(*AggregateReply_Group_GroupedBy_Ints); ok { + return x.Ints + } + } + return nil +} + +func (x *AggregateReply_Group_GroupedBy) GetBooleans() *BooleanArray { + if x != nil { + if x, ok := x.Value.(*AggregateReply_Group_GroupedBy_Booleans); ok { + return x.Booleans + } + } + return nil +} + +func (x *AggregateReply_Group_GroupedBy) GetNumbers() *NumberArray { + if x != nil { + if x, ok := x.Value.(*AggregateReply_Group_GroupedBy_Numbers); ok { + return x.Numbers + } + } + return nil +} + +func (x *AggregateReply_Group_GroupedBy) GetGeo() *GeoCoordinatesFilter { + if x != nil { + if x, ok := x.Value.(*AggregateReply_Group_GroupedBy_Geo); ok { + return x.Geo + } + } + return nil +} + +type isAggregateReply_Group_GroupedBy_Value interface { + isAggregateReply_Group_GroupedBy_Value() +} + +type AggregateReply_Group_GroupedBy_Text struct { + Text string `protobuf:"bytes,2,opt,name=text,proto3,oneof"` +} + +type AggregateReply_Group_GroupedBy_Int struct { + Int int64 `protobuf:"varint,3,opt,name=int,proto3,oneof"` +} + +type AggregateReply_Group_GroupedBy_Boolean struct { + Boolean bool `protobuf:"varint,4,opt,name=boolean,proto3,oneof"` +} + +type AggregateReply_Group_GroupedBy_Number struct { + Number float64 `protobuf:"fixed64,5,opt,name=number,proto3,oneof"` +} + +type AggregateReply_Group_GroupedBy_Texts struct { + Texts *TextArray `protobuf:"bytes,6,opt,name=texts,proto3,oneof"` +} + +type AggregateReply_Group_GroupedBy_Ints struct { + Ints *IntArray `protobuf:"bytes,7,opt,name=ints,proto3,oneof"` +} + +type AggregateReply_Group_GroupedBy_Booleans struct { + Booleans *BooleanArray `protobuf:"bytes,8,opt,name=booleans,proto3,oneof"` +} + +type AggregateReply_Group_GroupedBy_Numbers struct { + Numbers *NumberArray `protobuf:"bytes,9,opt,name=numbers,proto3,oneof"` +} + +type AggregateReply_Group_GroupedBy_Geo struct { + Geo *GeoCoordinatesFilter `protobuf:"bytes,10,opt,name=geo,proto3,oneof"` +} + +func (*AggregateReply_Group_GroupedBy_Text) isAggregateReply_Group_GroupedBy_Value() {} + +func (*AggregateReply_Group_GroupedBy_Int) isAggregateReply_Group_GroupedBy_Value() {} + +func (*AggregateReply_Group_GroupedBy_Boolean) isAggregateReply_Group_GroupedBy_Value() {} + +func (*AggregateReply_Group_GroupedBy_Number) isAggregateReply_Group_GroupedBy_Value() {} + +func (*AggregateReply_Group_GroupedBy_Texts) isAggregateReply_Group_GroupedBy_Value() {} + +func (*AggregateReply_Group_GroupedBy_Ints) isAggregateReply_Group_GroupedBy_Value() {} + +func (*AggregateReply_Group_GroupedBy_Booleans) isAggregateReply_Group_GroupedBy_Value() {} + +func (*AggregateReply_Group_GroupedBy_Numbers) isAggregateReply_Group_GroupedBy_Value() {} + +func (*AggregateReply_Group_GroupedBy_Geo) isAggregateReply_Group_GroupedBy_Value() {} + +var File_v1_aggregate_proto protoreflect.FileDescriptor + +const file_v1_aggregate_proto_rawDesc = "" + + "\n" + + "\x12v1/aggregate.proto\x12\vweaviate.v1\x1a\rv1/base.proto\x1a\x14v1/base_search.proto\"\x9d\x14\n" + + "\x10AggregateRequest\x12\x1e\n" + + "\n" + + "collection\x18\x01 \x01(\tR\n" + + "collection\x12\x16\n" + + "\x06tenant\x18\n" + + " \x01(\tR\x06tenant\x12#\n" + + "\robjects_count\x18\x14 \x01(\bR\fobjectsCount\x12M\n" + + "\faggregations\x18\x15 \x03(\v2).weaviate.v1.AggregateRequest.AggregationR\faggregations\x12&\n" + + "\fobject_limit\x18\x1e \x01(\rH\x01R\vobjectLimit\x88\x01\x01\x12E\n" + + "\bgroup_by\x18\x1f \x01(\v2%.weaviate.v1.AggregateRequest.GroupByH\x02R\agroupBy\x88\x01\x01\x12\x19\n" + + "\x05limit\x18 \x01(\rH\x03R\x05limit\x88\x01\x01\x123\n" + + "\afilters\x18( \x01(\v2\x14.weaviate.v1.FiltersH\x04R\afilters\x88\x01\x01\x12-\n" + + "\x06hybrid\x18) \x01(\v2\x13.weaviate.v1.HybridH\x00R\x06hybrid\x12:\n" + + "\vnear_vector\x18* \x01(\v2\x17.weaviate.v1.NearVectorH\x00R\n" + + "nearVector\x12:\n" + + "\vnear_object\x18+ \x01(\v2\x17.weaviate.v1.NearObjectH\x00R\n" + + "nearObject\x12:\n" + + "\tnear_text\x18, \x01(\v2\x1b.weaviate.v1.NearTextSearchH\x00R\bnearText\x12=\n" + + "\n" + + "near_image\x18- \x01(\v2\x1c.weaviate.v1.NearImageSearchH\x00R\tnearImage\x12=\n" + + "\n" + + "near_audio\x18. \x01(\v2\x1c.weaviate.v1.NearAudioSearchH\x00R\tnearAudio\x12=\n" + + "\n" + + "near_video\x18/ \x01(\v2\x1c.weaviate.v1.NearVideoSearchH\x00R\tnearVideo\x12=\n" + + "\n" + + "near_depth\x180 \x01(\v2\x1c.weaviate.v1.NearDepthSearchH\x00R\tnearDepth\x12C\n" + + "\fnear_thermal\x181 \x01(\v2\x1e.weaviate.v1.NearThermalSearchH\x00R\vnearThermal\x127\n" + + "\bnear_imu\x182 \x01(\v2\x1a.weaviate.v1.NearIMUSearchH\x00R\anearImu\x1a\xbb\v\n" + + "\vAggregation\x12\x1a\n" + + "\bproperty\x18\x01 \x01(\tR\bproperty\x12E\n" + + "\x03int\x18\x02 \x01(\v21.weaviate.v1.AggregateRequest.Aggregation.IntegerH\x00R\x03int\x12J\n" + + "\x06number\x18\x03 \x01(\v20.weaviate.v1.AggregateRequest.Aggregation.NumberH\x00R\x06number\x12D\n" + + "\x04text\x18\x04 \x01(\v2..weaviate.v1.AggregateRequest.Aggregation.TextH\x00R\x04text\x12M\n" + + "\aboolean\x18\x05 \x01(\v21.weaviate.v1.AggregateRequest.Aggregation.BooleanH\x00R\aboolean\x12D\n" + + "\x04date\x18\x06 \x01(\v2..weaviate.v1.AggregateRequest.Aggregation.DateH\x00R\x04date\x12S\n" + + "\treference\x18\a \x01(\v23.weaviate.v1.AggregateRequest.Aggregation.ReferenceH\x00R\treference\x1a\xb9\x01\n" + + "\aInteger\x12\x14\n" + + "\x05count\x18\x01 \x01(\bR\x05count\x12\x12\n" + + "\x04type\x18\x02 \x01(\bR\x04type\x12\x10\n" + + "\x03sum\x18\x03 \x01(\bR\x03sum\x12\x12\n" + + "\x04mean\x18\x04 \x01(\bR\x04mean\x12\x12\n" + + "\x04mode\x18\x05 \x01(\bR\x04mode\x12\x16\n" + + "\x06median\x18\x06 \x01(\bR\x06median\x12\x18\n" + + "\amaximum\x18\a \x01(\bR\amaximum\x12\x18\n" + + "\aminimum\x18\b \x01(\bR\aminimum\x1a\xb8\x01\n" + + "\x06Number\x12\x14\n" + + "\x05count\x18\x01 \x01(\bR\x05count\x12\x12\n" + + "\x04type\x18\x02 \x01(\bR\x04type\x12\x10\n" + + "\x03sum\x18\x03 \x01(\bR\x03sum\x12\x12\n" + + "\x04mean\x18\x04 \x01(\bR\x04mean\x12\x12\n" + + "\x04mode\x18\x05 \x01(\bR\x04mode\x12\x16\n" + + "\x06median\x18\x06 \x01(\bR\x06median\x12\x18\n" + + "\amaximum\x18\a \x01(\bR\amaximum\x12\x18\n" + + "\aminimum\x18\b \x01(\bR\aminimum\x1a\xa7\x01\n" + + "\x04Text\x12\x14\n" + + "\x05count\x18\x01 \x01(\bR\x05count\x12\x12\n" + + "\x04type\x18\x02 \x01(\bR\x04type\x12%\n" + + "\x0etop_occurences\x18\x03 \x01(\bR\rtopOccurences\x125\n" + + "\x14top_occurences_limit\x18\x04 \x01(\rH\x00R\x12topOccurencesLimit\x88\x01\x01B\x17\n" + + "\x15_top_occurences_limit\x1a\xc7\x01\n" + + "\aBoolean\x12\x14\n" + + "\x05count\x18\x01 \x01(\bR\x05count\x12\x12\n" + + "\x04type\x18\x02 \x01(\bR\x04type\x12\x1d\n" + + "\n" + + "total_true\x18\x03 \x01(\bR\ttotalTrue\x12\x1f\n" + + "\vtotal_false\x18\x04 \x01(\bR\n" + + "totalFalse\x12'\n" + + "\x0fpercentage_true\x18\x05 \x01(\bR\x0epercentageTrue\x12)\n" + + "\x10percentage_false\x18\x06 \x01(\bR\x0fpercentageFalse\x1a\x90\x01\n" + + "\x04Date\x12\x14\n" + + "\x05count\x18\x01 \x01(\bR\x05count\x12\x12\n" + + "\x04type\x18\x02 \x01(\bR\x04type\x12\x16\n" + + "\x06median\x18\x03 \x01(\bR\x06median\x12\x12\n" + + "\x04mode\x18\x04 \x01(\bR\x04mode\x12\x18\n" + + "\amaximum\x18\x05 \x01(\bR\amaximum\x12\x18\n" + + "\aminimum\x18\x06 \x01(\bR\aminimum\x1a@\n" + + "\tReference\x12\x12\n" + + "\x04type\x18\x01 \x01(\bR\x04type\x12\x1f\n" + + "\vpointing_to\x18\x02 \x01(\bR\n" + + "pointingToB\r\n" + + "\vaggregation\x1aE\n" + + "\aGroupBy\x12\x1e\n" + + "\n" + + "collection\x18\x01 \x01(\tR\n" + + "collection\x12\x1a\n" + + "\bproperty\x18\x02 \x01(\tR\bpropertyB\b\n" + + "\x06searchB\x0f\n" + + "\r_object_limitB\v\n" + + "\t_group_byB\b\n" + + "\x06_limitB\n" + + "\n" + + "\b_filters\"\x80\x1b\n" + + "\x0eAggregateReply\x12\x12\n" + + "\x04took\x18\x01 \x01(\x02R\x04took\x12I\n" + + "\rsingle_result\x18\x02 \x01(\v2\".weaviate.v1.AggregateReply.SingleH\x00R\fsingleResult\x12N\n" + + "\x0fgrouped_results\x18\x03 \x01(\v2#.weaviate.v1.AggregateReply.GroupedH\x00R\x0egroupedResults\x1a\xab\x12\n" + + "\fAggregations\x12X\n" + + "\faggregations\x18\x01 \x03(\v24.weaviate.v1.AggregateReply.Aggregations.AggregationR\faggregations\x1a\xc0\x11\n" + + "\vAggregation\x12\x1a\n" + + "\bproperty\x18\x01 \x01(\tR\bproperty\x12P\n" + + "\x03int\x18\x02 \x01(\v2<.weaviate.v1.AggregateReply.Aggregations.Aggregation.IntegerH\x00R\x03int\x12U\n" + + "\x06number\x18\x03 \x01(\v2;.weaviate.v1.AggregateReply.Aggregations.Aggregation.NumberH\x00R\x06number\x12O\n" + + "\x04text\x18\x04 \x01(\v29.weaviate.v1.AggregateReply.Aggregations.Aggregation.TextH\x00R\x04text\x12X\n" + + "\aboolean\x18\x05 \x01(\v2<.weaviate.v1.AggregateReply.Aggregations.Aggregation.BooleanH\x00R\aboolean\x12O\n" + + "\x04date\x18\x06 \x01(\v29.weaviate.v1.AggregateReply.Aggregations.Aggregation.DateH\x00R\x04date\x12^\n" + + "\treference\x18\a \x01(\v2>.weaviate.v1.AggregateReply.Aggregations.Aggregation.ReferenceH\x00R\treference\x1a\xb1\x02\n" + + "\aInteger\x12\x19\n" + + "\x05count\x18\x01 \x01(\x03H\x00R\x05count\x88\x01\x01\x12\x17\n" + + "\x04type\x18\x02 \x01(\tH\x01R\x04type\x88\x01\x01\x12\x17\n" + + "\x04mean\x18\x03 \x01(\x01H\x02R\x04mean\x88\x01\x01\x12\x1b\n" + + "\x06median\x18\x04 \x01(\x01H\x03R\x06median\x88\x01\x01\x12\x17\n" + + "\x04mode\x18\x05 \x01(\x03H\x04R\x04mode\x88\x01\x01\x12\x1d\n" + + "\amaximum\x18\x06 \x01(\x03H\x05R\amaximum\x88\x01\x01\x12\x1d\n" + + "\aminimum\x18\a \x01(\x03H\x06R\aminimum\x88\x01\x01\x12\x15\n" + + "\x03sum\x18\b \x01(\x03H\aR\x03sum\x88\x01\x01B\b\n" + + "\x06_countB\a\n" + + "\x05_typeB\a\n" + + "\x05_meanB\t\n" + + "\a_medianB\a\n" + + "\x05_modeB\n" + + "\n" + + "\b_maximumB\n" + + "\n" + + "\b_minimumB\x06\n" + + "\x04_sum\x1a\xb0\x02\n" + + "\x06Number\x12\x19\n" + + "\x05count\x18\x01 \x01(\x03H\x00R\x05count\x88\x01\x01\x12\x17\n" + + "\x04type\x18\x02 \x01(\tH\x01R\x04type\x88\x01\x01\x12\x17\n" + + "\x04mean\x18\x03 \x01(\x01H\x02R\x04mean\x88\x01\x01\x12\x1b\n" + + "\x06median\x18\x04 \x01(\x01H\x03R\x06median\x88\x01\x01\x12\x17\n" + + "\x04mode\x18\x05 \x01(\x01H\x04R\x04mode\x88\x01\x01\x12\x1d\n" + + "\amaximum\x18\x06 \x01(\x01H\x05R\amaximum\x88\x01\x01\x12\x1d\n" + + "\aminimum\x18\a \x01(\x01H\x06R\aminimum\x88\x01\x01\x12\x15\n" + + "\x03sum\x18\b \x01(\x01H\aR\x03sum\x88\x01\x01B\b\n" + + "\x06_countB\a\n" + + "\x05_typeB\a\n" + + "\x05_meanB\t\n" + + "\a_medianB\a\n" + + "\x05_modeB\n" + + "\n" + + "\b_maximumB\n" + + "\n" + + "\b_minimumB\x06\n" + + "\x04_sum\x1a\x96\x03\n" + + "\x04Text\x12\x19\n" + + "\x05count\x18\x01 \x01(\x03H\x00R\x05count\x88\x01\x01\x12\x17\n" + + "\x04type\x18\x02 \x01(\tH\x01R\x04type\x88\x01\x01\x12t\n" + + "\x0etop_occurences\x18\x03 \x01(\v2H.weaviate.v1.AggregateReply.Aggregations.Aggregation.Text.TopOccurrencesH\x02R\rtopOccurences\x88\x01\x01\x1a\xbd\x01\n" + + "\x0eTopOccurrences\x12l\n" + + "\x05items\x18\x01 \x03(\v2V.weaviate.v1.AggregateReply.Aggregations.Aggregation.Text.TopOccurrences.TopOccurrenceR\x05items\x1a=\n" + + "\rTopOccurrence\x12\x14\n" + + "\x05value\x18\x01 \x01(\tR\x05value\x12\x16\n" + + "\x06occurs\x18\x02 \x01(\x03R\x06occursB\b\n" + + "\x06_countB\a\n" + + "\x05_typeB\x11\n" + + "\x0f_top_occurences\x1a\xc0\x02\n" + + "\aBoolean\x12\x19\n" + + "\x05count\x18\x01 \x01(\x03H\x00R\x05count\x88\x01\x01\x12\x17\n" + + "\x04type\x18\x02 \x01(\tH\x01R\x04type\x88\x01\x01\x12\"\n" + + "\n" + + "total_true\x18\x03 \x01(\x03H\x02R\ttotalTrue\x88\x01\x01\x12$\n" + + "\vtotal_false\x18\x04 \x01(\x03H\x03R\n" + + "totalFalse\x88\x01\x01\x12,\n" + + "\x0fpercentage_true\x18\x05 \x01(\x01H\x04R\x0epercentageTrue\x88\x01\x01\x12.\n" + + "\x10percentage_false\x18\x06 \x01(\x01H\x05R\x0fpercentageFalse\x88\x01\x01B\b\n" + + "\x06_countB\a\n" + + "\x05_typeB\r\n" + + "\v_total_trueB\x0e\n" + + "\f_total_falseB\x12\n" + + "\x10_percentage_trueB\x13\n" + + "\x11_percentage_false\x1a\xed\x01\n" + + "\x04Date\x12\x19\n" + + "\x05count\x18\x01 \x01(\x03H\x00R\x05count\x88\x01\x01\x12\x17\n" + + "\x04type\x18\x02 \x01(\tH\x01R\x04type\x88\x01\x01\x12\x1b\n" + + "\x06median\x18\x03 \x01(\tH\x02R\x06median\x88\x01\x01\x12\x17\n" + + "\x04mode\x18\x04 \x01(\tH\x03R\x04mode\x88\x01\x01\x12\x1d\n" + + "\amaximum\x18\x05 \x01(\tH\x04R\amaximum\x88\x01\x01\x12\x1d\n" + + "\aminimum\x18\x06 \x01(\tH\x05R\aminimum\x88\x01\x01B\b\n" + + "\x06_countB\a\n" + + "\x05_typeB\t\n" + + "\a_medianB\a\n" + + "\x05_modeB\n" + + "\n" + + "\b_maximumB\n" + + "\n" + + "\b_minimum\x1aN\n" + + "\tReference\x12\x17\n" + + "\x04type\x18\x01 \x01(\tH\x00R\x04type\x88\x01\x01\x12\x1f\n" + + "\vpointing_to\x18\x02 \x03(\tR\n" + + "pointingToB\a\n" + + "\x05_typeB\r\n" + + "\vaggregation\x1a\xa8\x01\n" + + "\x06Single\x12(\n" + + "\robjects_count\x18\x01 \x01(\x03H\x00R\fobjectsCount\x88\x01\x01\x12Q\n" + + "\faggregations\x18\x02 \x01(\v2(.weaviate.v1.AggregateReply.AggregationsH\x01R\faggregations\x88\x01\x01B\x10\n" + + "\x0e_objects_countB\x0f\n" + + "\r_aggregations\x1a\x95\x05\n" + + "\x05Group\x12(\n" + + "\robjects_count\x18\x01 \x01(\x03H\x00R\fobjectsCount\x88\x01\x01\x12Q\n" + + "\faggregations\x18\x02 \x01(\v2(.weaviate.v1.AggregateReply.AggregationsH\x01R\faggregations\x88\x01\x01\x12O\n" + + "\n" + + "grouped_by\x18\x03 \x01(\v2+.weaviate.v1.AggregateReply.Group.GroupedByH\x02R\tgroupedBy\x88\x01\x01\x1a\x8b\x03\n" + + "\tGroupedBy\x12\x12\n" + + "\x04path\x18\x01 \x03(\tR\x04path\x12\x14\n" + + "\x04text\x18\x02 \x01(\tH\x00R\x04text\x12\x12\n" + + "\x03int\x18\x03 \x01(\x03H\x00R\x03int\x12\x1a\n" + + "\aboolean\x18\x04 \x01(\bH\x00R\aboolean\x12\x18\n" + + "\x06number\x18\x05 \x01(\x01H\x00R\x06number\x12.\n" + + "\x05texts\x18\x06 \x01(\v2\x16.weaviate.v1.TextArrayH\x00R\x05texts\x12+\n" + + "\x04ints\x18\a \x01(\v2\x15.weaviate.v1.IntArrayH\x00R\x04ints\x127\n" + + "\bbooleans\x18\b \x01(\v2\x19.weaviate.v1.BooleanArrayH\x00R\bbooleans\x124\n" + + "\anumbers\x18\t \x01(\v2\x18.weaviate.v1.NumberArrayH\x00R\anumbers\x125\n" + + "\x03geo\x18\n" + + " \x01(\v2!.weaviate.v1.GeoCoordinatesFilterH\x00R\x03geoB\a\n" + + "\x05valueB\x10\n" + + "\x0e_objects_countB\x0f\n" + + "\r_aggregationsB\r\n" + + "\v_grouped_by\x1aD\n" + + "\aGrouped\x129\n" + + "\x06groups\x18\x01 \x03(\v2!.weaviate.v1.AggregateReply.GroupR\x06groupsB\b\n" + + "\x06resultBs\n" + + "#io.weaviate.client.grpc.protocol.v1B\x16WeaviateProtoAggregateZ4github.com/weaviate/weaviate/grpc/generated;protocolb\x06proto3" + +var ( + file_v1_aggregate_proto_rawDescOnce sync.Once + file_v1_aggregate_proto_rawDescData []byte +) + +func file_v1_aggregate_proto_rawDescGZIP() []byte { + file_v1_aggregate_proto_rawDescOnce.Do(func() { + file_v1_aggregate_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_v1_aggregate_proto_rawDesc), len(file_v1_aggregate_proto_rawDesc))) + }) + return file_v1_aggregate_proto_rawDescData +} + +var file_v1_aggregate_proto_msgTypes = make([]protoimpl.MessageInfo, 24) +var file_v1_aggregate_proto_goTypes = []any{ + (*AggregateRequest)(nil), // 0: weaviate.v1.AggregateRequest + (*AggregateReply)(nil), // 1: weaviate.v1.AggregateReply + (*AggregateRequest_Aggregation)(nil), // 2: weaviate.v1.AggregateRequest.Aggregation + (*AggregateRequest_GroupBy)(nil), // 3: weaviate.v1.AggregateRequest.GroupBy + (*AggregateRequest_Aggregation_Integer)(nil), // 4: weaviate.v1.AggregateRequest.Aggregation.Integer + (*AggregateRequest_Aggregation_Number)(nil), // 5: weaviate.v1.AggregateRequest.Aggregation.Number + (*AggregateRequest_Aggregation_Text)(nil), // 6: weaviate.v1.AggregateRequest.Aggregation.Text + (*AggregateRequest_Aggregation_Boolean)(nil), // 7: weaviate.v1.AggregateRequest.Aggregation.Boolean + (*AggregateRequest_Aggregation_Date)(nil), // 8: weaviate.v1.AggregateRequest.Aggregation.Date + (*AggregateRequest_Aggregation_Reference)(nil), // 9: weaviate.v1.AggregateRequest.Aggregation.Reference + (*AggregateReply_Aggregations)(nil), // 10: weaviate.v1.AggregateReply.Aggregations + (*AggregateReply_Single)(nil), // 11: weaviate.v1.AggregateReply.Single + (*AggregateReply_Group)(nil), // 12: weaviate.v1.AggregateReply.Group + (*AggregateReply_Grouped)(nil), // 13: weaviate.v1.AggregateReply.Grouped + (*AggregateReply_Aggregations_Aggregation)(nil), // 14: weaviate.v1.AggregateReply.Aggregations.Aggregation + (*AggregateReply_Aggregations_Aggregation_Integer)(nil), // 15: weaviate.v1.AggregateReply.Aggregations.Aggregation.Integer + (*AggregateReply_Aggregations_Aggregation_Number)(nil), // 16: weaviate.v1.AggregateReply.Aggregations.Aggregation.Number + (*AggregateReply_Aggregations_Aggregation_Text)(nil), // 17: weaviate.v1.AggregateReply.Aggregations.Aggregation.Text + (*AggregateReply_Aggregations_Aggregation_Boolean)(nil), // 18: weaviate.v1.AggregateReply.Aggregations.Aggregation.Boolean + (*AggregateReply_Aggregations_Aggregation_Date)(nil), // 19: weaviate.v1.AggregateReply.Aggregations.Aggregation.Date + (*AggregateReply_Aggregations_Aggregation_Reference)(nil), // 20: weaviate.v1.AggregateReply.Aggregations.Aggregation.Reference + (*AggregateReply_Aggregations_Aggregation_Text_TopOccurrences)(nil), // 21: weaviate.v1.AggregateReply.Aggregations.Aggregation.Text.TopOccurrences + (*AggregateReply_Aggregations_Aggregation_Text_TopOccurrences_TopOccurrence)(nil), // 22: weaviate.v1.AggregateReply.Aggregations.Aggregation.Text.TopOccurrences.TopOccurrence + (*AggregateReply_Group_GroupedBy)(nil), // 23: weaviate.v1.AggregateReply.Group.GroupedBy + (*Filters)(nil), // 24: weaviate.v1.Filters + (*Hybrid)(nil), // 25: weaviate.v1.Hybrid + (*NearVector)(nil), // 26: weaviate.v1.NearVector + (*NearObject)(nil), // 27: weaviate.v1.NearObject + (*NearTextSearch)(nil), // 28: weaviate.v1.NearTextSearch + (*NearImageSearch)(nil), // 29: weaviate.v1.NearImageSearch + (*NearAudioSearch)(nil), // 30: weaviate.v1.NearAudioSearch + (*NearVideoSearch)(nil), // 31: weaviate.v1.NearVideoSearch + (*NearDepthSearch)(nil), // 32: weaviate.v1.NearDepthSearch + (*NearThermalSearch)(nil), // 33: weaviate.v1.NearThermalSearch + (*NearIMUSearch)(nil), // 34: weaviate.v1.NearIMUSearch + (*TextArray)(nil), // 35: weaviate.v1.TextArray + (*IntArray)(nil), // 36: weaviate.v1.IntArray + (*BooleanArray)(nil), // 37: weaviate.v1.BooleanArray + (*NumberArray)(nil), // 38: weaviate.v1.NumberArray + (*GeoCoordinatesFilter)(nil), // 39: weaviate.v1.GeoCoordinatesFilter +} +var file_v1_aggregate_proto_depIdxs = []int32{ + 2, // 0: weaviate.v1.AggregateRequest.aggregations:type_name -> weaviate.v1.AggregateRequest.Aggregation + 3, // 1: weaviate.v1.AggregateRequest.group_by:type_name -> weaviate.v1.AggregateRequest.GroupBy + 24, // 2: weaviate.v1.AggregateRequest.filters:type_name -> weaviate.v1.Filters + 25, // 3: weaviate.v1.AggregateRequest.hybrid:type_name -> weaviate.v1.Hybrid + 26, // 4: weaviate.v1.AggregateRequest.near_vector:type_name -> weaviate.v1.NearVector + 27, // 5: weaviate.v1.AggregateRequest.near_object:type_name -> weaviate.v1.NearObject + 28, // 6: weaviate.v1.AggregateRequest.near_text:type_name -> weaviate.v1.NearTextSearch + 29, // 7: weaviate.v1.AggregateRequest.near_image:type_name -> weaviate.v1.NearImageSearch + 30, // 8: weaviate.v1.AggregateRequest.near_audio:type_name -> weaviate.v1.NearAudioSearch + 31, // 9: weaviate.v1.AggregateRequest.near_video:type_name -> weaviate.v1.NearVideoSearch + 32, // 10: weaviate.v1.AggregateRequest.near_depth:type_name -> weaviate.v1.NearDepthSearch + 33, // 11: weaviate.v1.AggregateRequest.near_thermal:type_name -> weaviate.v1.NearThermalSearch + 34, // 12: weaviate.v1.AggregateRequest.near_imu:type_name -> weaviate.v1.NearIMUSearch + 11, // 13: weaviate.v1.AggregateReply.single_result:type_name -> weaviate.v1.AggregateReply.Single + 13, // 14: weaviate.v1.AggregateReply.grouped_results:type_name -> weaviate.v1.AggregateReply.Grouped + 4, // 15: weaviate.v1.AggregateRequest.Aggregation.int:type_name -> weaviate.v1.AggregateRequest.Aggregation.Integer + 5, // 16: weaviate.v1.AggregateRequest.Aggregation.number:type_name -> weaviate.v1.AggregateRequest.Aggregation.Number + 6, // 17: weaviate.v1.AggregateRequest.Aggregation.text:type_name -> weaviate.v1.AggregateRequest.Aggregation.Text + 7, // 18: weaviate.v1.AggregateRequest.Aggregation.boolean:type_name -> weaviate.v1.AggregateRequest.Aggregation.Boolean + 8, // 19: weaviate.v1.AggregateRequest.Aggregation.date:type_name -> weaviate.v1.AggregateRequest.Aggregation.Date + 9, // 20: weaviate.v1.AggregateRequest.Aggregation.reference:type_name -> weaviate.v1.AggregateRequest.Aggregation.Reference + 14, // 21: weaviate.v1.AggregateReply.Aggregations.aggregations:type_name -> weaviate.v1.AggregateReply.Aggregations.Aggregation + 10, // 22: weaviate.v1.AggregateReply.Single.aggregations:type_name -> weaviate.v1.AggregateReply.Aggregations + 10, // 23: weaviate.v1.AggregateReply.Group.aggregations:type_name -> weaviate.v1.AggregateReply.Aggregations + 23, // 24: weaviate.v1.AggregateReply.Group.grouped_by:type_name -> weaviate.v1.AggregateReply.Group.GroupedBy + 12, // 25: weaviate.v1.AggregateReply.Grouped.groups:type_name -> weaviate.v1.AggregateReply.Group + 15, // 26: weaviate.v1.AggregateReply.Aggregations.Aggregation.int:type_name -> weaviate.v1.AggregateReply.Aggregations.Aggregation.Integer + 16, // 27: weaviate.v1.AggregateReply.Aggregations.Aggregation.number:type_name -> weaviate.v1.AggregateReply.Aggregations.Aggregation.Number + 17, // 28: weaviate.v1.AggregateReply.Aggregations.Aggregation.text:type_name -> weaviate.v1.AggregateReply.Aggregations.Aggregation.Text + 18, // 29: weaviate.v1.AggregateReply.Aggregations.Aggregation.boolean:type_name -> weaviate.v1.AggregateReply.Aggregations.Aggregation.Boolean + 19, // 30: weaviate.v1.AggregateReply.Aggregations.Aggregation.date:type_name -> weaviate.v1.AggregateReply.Aggregations.Aggregation.Date + 20, // 31: weaviate.v1.AggregateReply.Aggregations.Aggregation.reference:type_name -> weaviate.v1.AggregateReply.Aggregations.Aggregation.Reference + 21, // 32: weaviate.v1.AggregateReply.Aggregations.Aggregation.Text.top_occurences:type_name -> weaviate.v1.AggregateReply.Aggregations.Aggregation.Text.TopOccurrences + 22, // 33: weaviate.v1.AggregateReply.Aggregations.Aggregation.Text.TopOccurrences.items:type_name -> weaviate.v1.AggregateReply.Aggregations.Aggregation.Text.TopOccurrences.TopOccurrence + 35, // 34: weaviate.v1.AggregateReply.Group.GroupedBy.texts:type_name -> weaviate.v1.TextArray + 36, // 35: weaviate.v1.AggregateReply.Group.GroupedBy.ints:type_name -> weaviate.v1.IntArray + 37, // 36: weaviate.v1.AggregateReply.Group.GroupedBy.booleans:type_name -> weaviate.v1.BooleanArray + 38, // 37: weaviate.v1.AggregateReply.Group.GroupedBy.numbers:type_name -> weaviate.v1.NumberArray + 39, // 38: weaviate.v1.AggregateReply.Group.GroupedBy.geo:type_name -> weaviate.v1.GeoCoordinatesFilter + 39, // [39:39] is the sub-list for method output_type + 39, // [39:39] is the sub-list for method input_type + 39, // [39:39] is the sub-list for extension type_name + 39, // [39:39] is the sub-list for extension extendee + 0, // [0:39] is the sub-list for field type_name +} + +func init() { file_v1_aggregate_proto_init() } +func file_v1_aggregate_proto_init() { + if File_v1_aggregate_proto != nil { + return + } + file_v1_base_proto_init() + file_v1_base_search_proto_init() + file_v1_aggregate_proto_msgTypes[0].OneofWrappers = []any{ + (*AggregateRequest_Hybrid)(nil), + (*AggregateRequest_NearVector)(nil), + (*AggregateRequest_NearObject)(nil), + (*AggregateRequest_NearText)(nil), + (*AggregateRequest_NearImage)(nil), + (*AggregateRequest_NearAudio)(nil), + (*AggregateRequest_NearVideo)(nil), + (*AggregateRequest_NearDepth)(nil), + (*AggregateRequest_NearThermal)(nil), + (*AggregateRequest_NearImu)(nil), + } + file_v1_aggregate_proto_msgTypes[1].OneofWrappers = []any{ + (*AggregateReply_SingleResult)(nil), + (*AggregateReply_GroupedResults)(nil), + } + file_v1_aggregate_proto_msgTypes[2].OneofWrappers = []any{ + (*AggregateRequest_Aggregation_Int)(nil), + (*AggregateRequest_Aggregation_Number_)(nil), + (*AggregateRequest_Aggregation_Text_)(nil), + (*AggregateRequest_Aggregation_Boolean_)(nil), + (*AggregateRequest_Aggregation_Date_)(nil), + (*AggregateRequest_Aggregation_Reference_)(nil), + } + file_v1_aggregate_proto_msgTypes[6].OneofWrappers = []any{} + file_v1_aggregate_proto_msgTypes[11].OneofWrappers = []any{} + file_v1_aggregate_proto_msgTypes[12].OneofWrappers = []any{} + file_v1_aggregate_proto_msgTypes[14].OneofWrappers = []any{ + (*AggregateReply_Aggregations_Aggregation_Int)(nil), + (*AggregateReply_Aggregations_Aggregation_Number_)(nil), + (*AggregateReply_Aggregations_Aggregation_Text_)(nil), + (*AggregateReply_Aggregations_Aggregation_Boolean_)(nil), + (*AggregateReply_Aggregations_Aggregation_Date_)(nil), + (*AggregateReply_Aggregations_Aggregation_Reference_)(nil), + } + file_v1_aggregate_proto_msgTypes[15].OneofWrappers = []any{} + file_v1_aggregate_proto_msgTypes[16].OneofWrappers = []any{} + file_v1_aggregate_proto_msgTypes[17].OneofWrappers = []any{} + file_v1_aggregate_proto_msgTypes[18].OneofWrappers = []any{} + file_v1_aggregate_proto_msgTypes[19].OneofWrappers = []any{} + file_v1_aggregate_proto_msgTypes[20].OneofWrappers = []any{} + file_v1_aggregate_proto_msgTypes[23].OneofWrappers = []any{ + (*AggregateReply_Group_GroupedBy_Text)(nil), + (*AggregateReply_Group_GroupedBy_Int)(nil), + (*AggregateReply_Group_GroupedBy_Boolean)(nil), + (*AggregateReply_Group_GroupedBy_Number)(nil), + (*AggregateReply_Group_GroupedBy_Texts)(nil), + (*AggregateReply_Group_GroupedBy_Ints)(nil), + (*AggregateReply_Group_GroupedBy_Booleans)(nil), + (*AggregateReply_Group_GroupedBy_Numbers)(nil), + (*AggregateReply_Group_GroupedBy_Geo)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: unsafe.Slice(unsafe.StringData(file_v1_aggregate_proto_rawDesc), len(file_v1_aggregate_proto_rawDesc)), + NumEnums: 0, + NumMessages: 24, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_v1_aggregate_proto_goTypes, + DependencyIndexes: file_v1_aggregate_proto_depIdxs, + MessageInfos: file_v1_aggregate_proto_msgTypes, + }.Build() + File_v1_aggregate_proto = out.File + file_v1_aggregate_proto_goTypes = nil + file_v1_aggregate_proto_depIdxs = nil +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/base.pb.go b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/base.pb.go new file mode 100644 index 0000000000000000000000000000000000000000..4ba2780491242e5de222d0a0a2e0a8349dcc55be --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/base.pb.go @@ -0,0 +1,1665 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. + +package protocol + +import ( + reflect "reflect" + sync "sync" + unsafe "unsafe" + + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + structpb "google.golang.org/protobuf/types/known/structpb" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type ConsistencyLevel int32 + +const ( + ConsistencyLevel_CONSISTENCY_LEVEL_UNSPECIFIED ConsistencyLevel = 0 + ConsistencyLevel_CONSISTENCY_LEVEL_ONE ConsistencyLevel = 1 + ConsistencyLevel_CONSISTENCY_LEVEL_QUORUM ConsistencyLevel = 2 + ConsistencyLevel_CONSISTENCY_LEVEL_ALL ConsistencyLevel = 3 +) + +// Enum value maps for ConsistencyLevel. +var ( + ConsistencyLevel_name = map[int32]string{ + 0: "CONSISTENCY_LEVEL_UNSPECIFIED", + 1: "CONSISTENCY_LEVEL_ONE", + 2: "CONSISTENCY_LEVEL_QUORUM", + 3: "CONSISTENCY_LEVEL_ALL", + } + ConsistencyLevel_value = map[string]int32{ + "CONSISTENCY_LEVEL_UNSPECIFIED": 0, + "CONSISTENCY_LEVEL_ONE": 1, + "CONSISTENCY_LEVEL_QUORUM": 2, + "CONSISTENCY_LEVEL_ALL": 3, + } +) + +func (x ConsistencyLevel) Enum() *ConsistencyLevel { + p := new(ConsistencyLevel) + *p = x + return p +} + +func (x ConsistencyLevel) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (ConsistencyLevel) Descriptor() protoreflect.EnumDescriptor { + return file_v1_base_proto_enumTypes[0].Descriptor() +} + +func (ConsistencyLevel) Type() protoreflect.EnumType { + return &file_v1_base_proto_enumTypes[0] +} + +func (x ConsistencyLevel) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use ConsistencyLevel.Descriptor instead. +func (ConsistencyLevel) EnumDescriptor() ([]byte, []int) { + return file_v1_base_proto_rawDescGZIP(), []int{0} +} + +type Filters_Operator int32 + +const ( + Filters_OPERATOR_UNSPECIFIED Filters_Operator = 0 + Filters_OPERATOR_EQUAL Filters_Operator = 1 + Filters_OPERATOR_NOT_EQUAL Filters_Operator = 2 + Filters_OPERATOR_GREATER_THAN Filters_Operator = 3 + Filters_OPERATOR_GREATER_THAN_EQUAL Filters_Operator = 4 + Filters_OPERATOR_LESS_THAN Filters_Operator = 5 + Filters_OPERATOR_LESS_THAN_EQUAL Filters_Operator = 6 + Filters_OPERATOR_AND Filters_Operator = 7 + Filters_OPERATOR_OR Filters_Operator = 8 + Filters_OPERATOR_WITHIN_GEO_RANGE Filters_Operator = 9 + Filters_OPERATOR_LIKE Filters_Operator = 10 + Filters_OPERATOR_IS_NULL Filters_Operator = 11 + Filters_OPERATOR_CONTAINS_ANY Filters_Operator = 12 + Filters_OPERATOR_CONTAINS_ALL Filters_Operator = 13 + Filters_OPERATOR_CONTAINS_NONE Filters_Operator = 14 + Filters_OPERATOR_NOT Filters_Operator = 15 +) + +// Enum value maps for Filters_Operator. +var ( + Filters_Operator_name = map[int32]string{ + 0: "OPERATOR_UNSPECIFIED", + 1: "OPERATOR_EQUAL", + 2: "OPERATOR_NOT_EQUAL", + 3: "OPERATOR_GREATER_THAN", + 4: "OPERATOR_GREATER_THAN_EQUAL", + 5: "OPERATOR_LESS_THAN", + 6: "OPERATOR_LESS_THAN_EQUAL", + 7: "OPERATOR_AND", + 8: "OPERATOR_OR", + 9: "OPERATOR_WITHIN_GEO_RANGE", + 10: "OPERATOR_LIKE", + 11: "OPERATOR_IS_NULL", + 12: "OPERATOR_CONTAINS_ANY", + 13: "OPERATOR_CONTAINS_ALL", + 14: "OPERATOR_CONTAINS_NONE", + 15: "OPERATOR_NOT", + } + Filters_Operator_value = map[string]int32{ + "OPERATOR_UNSPECIFIED": 0, + "OPERATOR_EQUAL": 1, + "OPERATOR_NOT_EQUAL": 2, + "OPERATOR_GREATER_THAN": 3, + "OPERATOR_GREATER_THAN_EQUAL": 4, + "OPERATOR_LESS_THAN": 5, + "OPERATOR_LESS_THAN_EQUAL": 6, + "OPERATOR_AND": 7, + "OPERATOR_OR": 8, + "OPERATOR_WITHIN_GEO_RANGE": 9, + "OPERATOR_LIKE": 10, + "OPERATOR_IS_NULL": 11, + "OPERATOR_CONTAINS_ANY": 12, + "OPERATOR_CONTAINS_ALL": 13, + "OPERATOR_CONTAINS_NONE": 14, + "OPERATOR_NOT": 15, + } +) + +func (x Filters_Operator) Enum() *Filters_Operator { + p := new(Filters_Operator) + *p = x + return p +} + +func (x Filters_Operator) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Filters_Operator) Descriptor() protoreflect.EnumDescriptor { + return file_v1_base_proto_enumTypes[1].Descriptor() +} + +func (Filters_Operator) Type() protoreflect.EnumType { + return &file_v1_base_proto_enumTypes[1] +} + +func (x Filters_Operator) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Filters_Operator.Descriptor instead. +func (Filters_Operator) EnumDescriptor() ([]byte, []int) { + return file_v1_base_proto_rawDescGZIP(), []int{11, 0} +} + +type Vectors_VectorType int32 + +const ( + Vectors_VECTOR_TYPE_UNSPECIFIED Vectors_VectorType = 0 + Vectors_VECTOR_TYPE_SINGLE_FP32 Vectors_VectorType = 1 + Vectors_VECTOR_TYPE_MULTI_FP32 Vectors_VectorType = 2 +) + +// Enum value maps for Vectors_VectorType. +var ( + Vectors_VectorType_name = map[int32]string{ + 0: "VECTOR_TYPE_UNSPECIFIED", + 1: "VECTOR_TYPE_SINGLE_FP32", + 2: "VECTOR_TYPE_MULTI_FP32", + } + Vectors_VectorType_value = map[string]int32{ + "VECTOR_TYPE_UNSPECIFIED": 0, + "VECTOR_TYPE_SINGLE_FP32": 1, + "VECTOR_TYPE_MULTI_FP32": 2, + } +) + +func (x Vectors_VectorType) Enum() *Vectors_VectorType { + p := new(Vectors_VectorType) + *p = x + return p +} + +func (x Vectors_VectorType) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Vectors_VectorType) Descriptor() protoreflect.EnumDescriptor { + return file_v1_base_proto_enumTypes[2].Descriptor() +} + +func (Vectors_VectorType) Type() protoreflect.EnumType { + return &file_v1_base_proto_enumTypes[2] +} + +func (x Vectors_VectorType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Vectors_VectorType.Descriptor instead. +func (Vectors_VectorType) EnumDescriptor() ([]byte, []int) { + return file_v1_base_proto_rawDescGZIP(), []int{17, 0} +} + +type NumberArrayProperties struct { + state protoimpl.MessageState `protogen:"open.v1"` + // will be removed in the future, use vector_bytes + // go client 5.4.1 depends on this field. Only remove after go client is deprecated + // + // Deprecated: Marked as deprecated in v1/base.proto. + Values []float64 `protobuf:"fixed64,1,rep,packed,name=values,proto3" json:"values,omitempty"` + PropName string `protobuf:"bytes,2,opt,name=prop_name,json=propName,proto3" json:"prop_name,omitempty"` + ValuesBytes []byte `protobuf:"bytes,3,opt,name=values_bytes,json=valuesBytes,proto3" json:"values_bytes,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *NumberArrayProperties) Reset() { + *x = NumberArrayProperties{} + mi := &file_v1_base_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *NumberArrayProperties) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NumberArrayProperties) ProtoMessage() {} + +func (x *NumberArrayProperties) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NumberArrayProperties.ProtoReflect.Descriptor instead. +func (*NumberArrayProperties) Descriptor() ([]byte, []int) { + return file_v1_base_proto_rawDescGZIP(), []int{0} +} + +// Deprecated: Marked as deprecated in v1/base.proto. +func (x *NumberArrayProperties) GetValues() []float64 { + if x != nil { + return x.Values + } + return nil +} + +func (x *NumberArrayProperties) GetPropName() string { + if x != nil { + return x.PropName + } + return "" +} + +func (x *NumberArrayProperties) GetValuesBytes() []byte { + if x != nil { + return x.ValuesBytes + } + return nil +} + +type IntArrayProperties struct { + state protoimpl.MessageState `protogen:"open.v1"` + Values []int64 `protobuf:"varint,1,rep,packed,name=values,proto3" json:"values,omitempty"` + PropName string `protobuf:"bytes,2,opt,name=prop_name,json=propName,proto3" json:"prop_name,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *IntArrayProperties) Reset() { + *x = IntArrayProperties{} + mi := &file_v1_base_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *IntArrayProperties) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*IntArrayProperties) ProtoMessage() {} + +func (x *IntArrayProperties) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use IntArrayProperties.ProtoReflect.Descriptor instead. +func (*IntArrayProperties) Descriptor() ([]byte, []int) { + return file_v1_base_proto_rawDescGZIP(), []int{1} +} + +func (x *IntArrayProperties) GetValues() []int64 { + if x != nil { + return x.Values + } + return nil +} + +func (x *IntArrayProperties) GetPropName() string { + if x != nil { + return x.PropName + } + return "" +} + +type TextArrayProperties struct { + state protoimpl.MessageState `protogen:"open.v1"` + Values []string `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + PropName string `protobuf:"bytes,2,opt,name=prop_name,json=propName,proto3" json:"prop_name,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *TextArrayProperties) Reset() { + *x = TextArrayProperties{} + mi := &file_v1_base_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *TextArrayProperties) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TextArrayProperties) ProtoMessage() {} + +func (x *TextArrayProperties) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_proto_msgTypes[2] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TextArrayProperties.ProtoReflect.Descriptor instead. +func (*TextArrayProperties) Descriptor() ([]byte, []int) { + return file_v1_base_proto_rawDescGZIP(), []int{2} +} + +func (x *TextArrayProperties) GetValues() []string { + if x != nil { + return x.Values + } + return nil +} + +func (x *TextArrayProperties) GetPropName() string { + if x != nil { + return x.PropName + } + return "" +} + +type BooleanArrayProperties struct { + state protoimpl.MessageState `protogen:"open.v1"` + Values []bool `protobuf:"varint,1,rep,packed,name=values,proto3" json:"values,omitempty"` + PropName string `protobuf:"bytes,2,opt,name=prop_name,json=propName,proto3" json:"prop_name,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BooleanArrayProperties) Reset() { + *x = BooleanArrayProperties{} + mi := &file_v1_base_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BooleanArrayProperties) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BooleanArrayProperties) ProtoMessage() {} + +func (x *BooleanArrayProperties) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_proto_msgTypes[3] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BooleanArrayProperties.ProtoReflect.Descriptor instead. +func (*BooleanArrayProperties) Descriptor() ([]byte, []int) { + return file_v1_base_proto_rawDescGZIP(), []int{3} +} + +func (x *BooleanArrayProperties) GetValues() []bool { + if x != nil { + return x.Values + } + return nil +} + +func (x *BooleanArrayProperties) GetPropName() string { + if x != nil { + return x.PropName + } + return "" +} + +type ObjectPropertiesValue struct { + state protoimpl.MessageState `protogen:"open.v1"` + NonRefProperties *structpb.Struct `protobuf:"bytes,1,opt,name=non_ref_properties,json=nonRefProperties,proto3" json:"non_ref_properties,omitempty"` + NumberArrayProperties []*NumberArrayProperties `protobuf:"bytes,2,rep,name=number_array_properties,json=numberArrayProperties,proto3" json:"number_array_properties,omitempty"` + IntArrayProperties []*IntArrayProperties `protobuf:"bytes,3,rep,name=int_array_properties,json=intArrayProperties,proto3" json:"int_array_properties,omitempty"` + TextArrayProperties []*TextArrayProperties `protobuf:"bytes,4,rep,name=text_array_properties,json=textArrayProperties,proto3" json:"text_array_properties,omitempty"` + BooleanArrayProperties []*BooleanArrayProperties `protobuf:"bytes,5,rep,name=boolean_array_properties,json=booleanArrayProperties,proto3" json:"boolean_array_properties,omitempty"` + ObjectProperties []*ObjectProperties `protobuf:"bytes,6,rep,name=object_properties,json=objectProperties,proto3" json:"object_properties,omitempty"` + ObjectArrayProperties []*ObjectArrayProperties `protobuf:"bytes,7,rep,name=object_array_properties,json=objectArrayProperties,proto3" json:"object_array_properties,omitempty"` + EmptyListProps []string `protobuf:"bytes,10,rep,name=empty_list_props,json=emptyListProps,proto3" json:"empty_list_props,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ObjectPropertiesValue) Reset() { + *x = ObjectPropertiesValue{} + mi := &file_v1_base_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ObjectPropertiesValue) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ObjectPropertiesValue) ProtoMessage() {} + +func (x *ObjectPropertiesValue) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_proto_msgTypes[4] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ObjectPropertiesValue.ProtoReflect.Descriptor instead. +func (*ObjectPropertiesValue) Descriptor() ([]byte, []int) { + return file_v1_base_proto_rawDescGZIP(), []int{4} +} + +func (x *ObjectPropertiesValue) GetNonRefProperties() *structpb.Struct { + if x != nil { + return x.NonRefProperties + } + return nil +} + +func (x *ObjectPropertiesValue) GetNumberArrayProperties() []*NumberArrayProperties { + if x != nil { + return x.NumberArrayProperties + } + return nil +} + +func (x *ObjectPropertiesValue) GetIntArrayProperties() []*IntArrayProperties { + if x != nil { + return x.IntArrayProperties + } + return nil +} + +func (x *ObjectPropertiesValue) GetTextArrayProperties() []*TextArrayProperties { + if x != nil { + return x.TextArrayProperties + } + return nil +} + +func (x *ObjectPropertiesValue) GetBooleanArrayProperties() []*BooleanArrayProperties { + if x != nil { + return x.BooleanArrayProperties + } + return nil +} + +func (x *ObjectPropertiesValue) GetObjectProperties() []*ObjectProperties { + if x != nil { + return x.ObjectProperties + } + return nil +} + +func (x *ObjectPropertiesValue) GetObjectArrayProperties() []*ObjectArrayProperties { + if x != nil { + return x.ObjectArrayProperties + } + return nil +} + +func (x *ObjectPropertiesValue) GetEmptyListProps() []string { + if x != nil { + return x.EmptyListProps + } + return nil +} + +type ObjectArrayProperties struct { + state protoimpl.MessageState `protogen:"open.v1"` + Values []*ObjectPropertiesValue `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + PropName string `protobuf:"bytes,2,opt,name=prop_name,json=propName,proto3" json:"prop_name,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ObjectArrayProperties) Reset() { + *x = ObjectArrayProperties{} + mi := &file_v1_base_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ObjectArrayProperties) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ObjectArrayProperties) ProtoMessage() {} + +func (x *ObjectArrayProperties) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_proto_msgTypes[5] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ObjectArrayProperties.ProtoReflect.Descriptor instead. +func (*ObjectArrayProperties) Descriptor() ([]byte, []int) { + return file_v1_base_proto_rawDescGZIP(), []int{5} +} + +func (x *ObjectArrayProperties) GetValues() []*ObjectPropertiesValue { + if x != nil { + return x.Values + } + return nil +} + +func (x *ObjectArrayProperties) GetPropName() string { + if x != nil { + return x.PropName + } + return "" +} + +type ObjectProperties struct { + state protoimpl.MessageState `protogen:"open.v1"` + Value *ObjectPropertiesValue `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` + PropName string `protobuf:"bytes,2,opt,name=prop_name,json=propName,proto3" json:"prop_name,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ObjectProperties) Reset() { + *x = ObjectProperties{} + mi := &file_v1_base_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ObjectProperties) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ObjectProperties) ProtoMessage() {} + +func (x *ObjectProperties) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_proto_msgTypes[6] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ObjectProperties.ProtoReflect.Descriptor instead. +func (*ObjectProperties) Descriptor() ([]byte, []int) { + return file_v1_base_proto_rawDescGZIP(), []int{6} +} + +func (x *ObjectProperties) GetValue() *ObjectPropertiesValue { + if x != nil { + return x.Value + } + return nil +} + +func (x *ObjectProperties) GetPropName() string { + if x != nil { + return x.PropName + } + return "" +} + +type TextArray struct { + state protoimpl.MessageState `protogen:"open.v1"` + Values []string `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *TextArray) Reset() { + *x = TextArray{} + mi := &file_v1_base_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *TextArray) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TextArray) ProtoMessage() {} + +func (x *TextArray) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_proto_msgTypes[7] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TextArray.ProtoReflect.Descriptor instead. +func (*TextArray) Descriptor() ([]byte, []int) { + return file_v1_base_proto_rawDescGZIP(), []int{7} +} + +func (x *TextArray) GetValues() []string { + if x != nil { + return x.Values + } + return nil +} + +type IntArray struct { + state protoimpl.MessageState `protogen:"open.v1"` + Values []int64 `protobuf:"varint,1,rep,packed,name=values,proto3" json:"values,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *IntArray) Reset() { + *x = IntArray{} + mi := &file_v1_base_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *IntArray) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*IntArray) ProtoMessage() {} + +func (x *IntArray) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_proto_msgTypes[8] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use IntArray.ProtoReflect.Descriptor instead. +func (*IntArray) Descriptor() ([]byte, []int) { + return file_v1_base_proto_rawDescGZIP(), []int{8} +} + +func (x *IntArray) GetValues() []int64 { + if x != nil { + return x.Values + } + return nil +} + +type NumberArray struct { + state protoimpl.MessageState `protogen:"open.v1"` + Values []float64 `protobuf:"fixed64,1,rep,packed,name=values,proto3" json:"values,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *NumberArray) Reset() { + *x = NumberArray{} + mi := &file_v1_base_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *NumberArray) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NumberArray) ProtoMessage() {} + +func (x *NumberArray) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_proto_msgTypes[9] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NumberArray.ProtoReflect.Descriptor instead. +func (*NumberArray) Descriptor() ([]byte, []int) { + return file_v1_base_proto_rawDescGZIP(), []int{9} +} + +func (x *NumberArray) GetValues() []float64 { + if x != nil { + return x.Values + } + return nil +} + +type BooleanArray struct { + state protoimpl.MessageState `protogen:"open.v1"` + Values []bool `protobuf:"varint,1,rep,packed,name=values,proto3" json:"values,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BooleanArray) Reset() { + *x = BooleanArray{} + mi := &file_v1_base_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BooleanArray) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BooleanArray) ProtoMessage() {} + +func (x *BooleanArray) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_proto_msgTypes[10] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BooleanArray.ProtoReflect.Descriptor instead. +func (*BooleanArray) Descriptor() ([]byte, []int) { + return file_v1_base_proto_rawDescGZIP(), []int{10} +} + +func (x *BooleanArray) GetValues() []bool { + if x != nil { + return x.Values + } + return nil +} + +type Filters struct { + state protoimpl.MessageState `protogen:"open.v1"` + Operator Filters_Operator `protobuf:"varint,1,opt,name=operator,proto3,enum=weaviate.v1.Filters_Operator" json:"operator,omitempty"` + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + // + // Deprecated: Marked as deprecated in v1/base.proto. + On []string `protobuf:"bytes,2,rep,name=on,proto3" json:"on,omitempty"` // will be removed in the future, use path + Filters []*Filters `protobuf:"bytes,3,rep,name=filters,proto3" json:"filters,omitempty"` + // Types that are valid to be assigned to TestValue: + // + // *Filters_ValueText + // *Filters_ValueInt + // *Filters_ValueBoolean + // *Filters_ValueNumber + // *Filters_ValueTextArray + // *Filters_ValueIntArray + // *Filters_ValueBooleanArray + // *Filters_ValueNumberArray + // *Filters_ValueGeo + TestValue isFilters_TestValue `protobuf_oneof:"test_value"` + Target *FilterTarget `protobuf:"bytes,20,opt,name=target,proto3" json:"target,omitempty"` // leave space for more filter values + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *Filters) Reset() { + *x = Filters{} + mi := &file_v1_base_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *Filters) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Filters) ProtoMessage() {} + +func (x *Filters) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_proto_msgTypes[11] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Filters.ProtoReflect.Descriptor instead. +func (*Filters) Descriptor() ([]byte, []int) { + return file_v1_base_proto_rawDescGZIP(), []int{11} +} + +func (x *Filters) GetOperator() Filters_Operator { + if x != nil { + return x.Operator + } + return Filters_OPERATOR_UNSPECIFIED +} + +// Deprecated: Marked as deprecated in v1/base.proto. +func (x *Filters) GetOn() []string { + if x != nil { + return x.On + } + return nil +} + +func (x *Filters) GetFilters() []*Filters { + if x != nil { + return x.Filters + } + return nil +} + +func (x *Filters) GetTestValue() isFilters_TestValue { + if x != nil { + return x.TestValue + } + return nil +} + +func (x *Filters) GetValueText() string { + if x != nil { + if x, ok := x.TestValue.(*Filters_ValueText); ok { + return x.ValueText + } + } + return "" +} + +func (x *Filters) GetValueInt() int64 { + if x != nil { + if x, ok := x.TestValue.(*Filters_ValueInt); ok { + return x.ValueInt + } + } + return 0 +} + +func (x *Filters) GetValueBoolean() bool { + if x != nil { + if x, ok := x.TestValue.(*Filters_ValueBoolean); ok { + return x.ValueBoolean + } + } + return false +} + +func (x *Filters) GetValueNumber() float64 { + if x != nil { + if x, ok := x.TestValue.(*Filters_ValueNumber); ok { + return x.ValueNumber + } + } + return 0 +} + +func (x *Filters) GetValueTextArray() *TextArray { + if x != nil { + if x, ok := x.TestValue.(*Filters_ValueTextArray); ok { + return x.ValueTextArray + } + } + return nil +} + +func (x *Filters) GetValueIntArray() *IntArray { + if x != nil { + if x, ok := x.TestValue.(*Filters_ValueIntArray); ok { + return x.ValueIntArray + } + } + return nil +} + +func (x *Filters) GetValueBooleanArray() *BooleanArray { + if x != nil { + if x, ok := x.TestValue.(*Filters_ValueBooleanArray); ok { + return x.ValueBooleanArray + } + } + return nil +} + +func (x *Filters) GetValueNumberArray() *NumberArray { + if x != nil { + if x, ok := x.TestValue.(*Filters_ValueNumberArray); ok { + return x.ValueNumberArray + } + } + return nil +} + +func (x *Filters) GetValueGeo() *GeoCoordinatesFilter { + if x != nil { + if x, ok := x.TestValue.(*Filters_ValueGeo); ok { + return x.ValueGeo + } + } + return nil +} + +func (x *Filters) GetTarget() *FilterTarget { + if x != nil { + return x.Target + } + return nil +} + +type isFilters_TestValue interface { + isFilters_TestValue() +} + +type Filters_ValueText struct { + ValueText string `protobuf:"bytes,4,opt,name=value_text,json=valueText,proto3,oneof"` +} + +type Filters_ValueInt struct { + ValueInt int64 `protobuf:"varint,5,opt,name=value_int,json=valueInt,proto3,oneof"` +} + +type Filters_ValueBoolean struct { + ValueBoolean bool `protobuf:"varint,6,opt,name=value_boolean,json=valueBoolean,proto3,oneof"` +} + +type Filters_ValueNumber struct { + ValueNumber float64 `protobuf:"fixed64,7,opt,name=value_number,json=valueNumber,proto3,oneof"` +} + +type Filters_ValueTextArray struct { + ValueTextArray *TextArray `protobuf:"bytes,9,opt,name=value_text_array,json=valueTextArray,proto3,oneof"` +} + +type Filters_ValueIntArray struct { + ValueIntArray *IntArray `protobuf:"bytes,10,opt,name=value_int_array,json=valueIntArray,proto3,oneof"` +} + +type Filters_ValueBooleanArray struct { + ValueBooleanArray *BooleanArray `protobuf:"bytes,11,opt,name=value_boolean_array,json=valueBooleanArray,proto3,oneof"` +} + +type Filters_ValueNumberArray struct { + ValueNumberArray *NumberArray `protobuf:"bytes,12,opt,name=value_number_array,json=valueNumberArray,proto3,oneof"` +} + +type Filters_ValueGeo struct { + ValueGeo *GeoCoordinatesFilter `protobuf:"bytes,13,opt,name=value_geo,json=valueGeo,proto3,oneof"` +} + +func (*Filters_ValueText) isFilters_TestValue() {} + +func (*Filters_ValueInt) isFilters_TestValue() {} + +func (*Filters_ValueBoolean) isFilters_TestValue() {} + +func (*Filters_ValueNumber) isFilters_TestValue() {} + +func (*Filters_ValueTextArray) isFilters_TestValue() {} + +func (*Filters_ValueIntArray) isFilters_TestValue() {} + +func (*Filters_ValueBooleanArray) isFilters_TestValue() {} + +func (*Filters_ValueNumberArray) isFilters_TestValue() {} + +func (*Filters_ValueGeo) isFilters_TestValue() {} + +type FilterReferenceSingleTarget struct { + state protoimpl.MessageState `protogen:"open.v1"` + On string `protobuf:"bytes,1,opt,name=on,proto3" json:"on,omitempty"` + Target *FilterTarget `protobuf:"bytes,2,opt,name=target,proto3" json:"target,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *FilterReferenceSingleTarget) Reset() { + *x = FilterReferenceSingleTarget{} + mi := &file_v1_base_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *FilterReferenceSingleTarget) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FilterReferenceSingleTarget) ProtoMessage() {} + +func (x *FilterReferenceSingleTarget) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_proto_msgTypes[12] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FilterReferenceSingleTarget.ProtoReflect.Descriptor instead. +func (*FilterReferenceSingleTarget) Descriptor() ([]byte, []int) { + return file_v1_base_proto_rawDescGZIP(), []int{12} +} + +func (x *FilterReferenceSingleTarget) GetOn() string { + if x != nil { + return x.On + } + return "" +} + +func (x *FilterReferenceSingleTarget) GetTarget() *FilterTarget { + if x != nil { + return x.Target + } + return nil +} + +type FilterReferenceMultiTarget struct { + state protoimpl.MessageState `protogen:"open.v1"` + On string `protobuf:"bytes,1,opt,name=on,proto3" json:"on,omitempty"` + Target *FilterTarget `protobuf:"bytes,2,opt,name=target,proto3" json:"target,omitempty"` + TargetCollection string `protobuf:"bytes,3,opt,name=target_collection,json=targetCollection,proto3" json:"target_collection,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *FilterReferenceMultiTarget) Reset() { + *x = FilterReferenceMultiTarget{} + mi := &file_v1_base_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *FilterReferenceMultiTarget) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FilterReferenceMultiTarget) ProtoMessage() {} + +func (x *FilterReferenceMultiTarget) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_proto_msgTypes[13] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FilterReferenceMultiTarget.ProtoReflect.Descriptor instead. +func (*FilterReferenceMultiTarget) Descriptor() ([]byte, []int) { + return file_v1_base_proto_rawDescGZIP(), []int{13} +} + +func (x *FilterReferenceMultiTarget) GetOn() string { + if x != nil { + return x.On + } + return "" +} + +func (x *FilterReferenceMultiTarget) GetTarget() *FilterTarget { + if x != nil { + return x.Target + } + return nil +} + +func (x *FilterReferenceMultiTarget) GetTargetCollection() string { + if x != nil { + return x.TargetCollection + } + return "" +} + +type FilterReferenceCount struct { + state protoimpl.MessageState `protogen:"open.v1"` + On string `protobuf:"bytes,1,opt,name=on,proto3" json:"on,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *FilterReferenceCount) Reset() { + *x = FilterReferenceCount{} + mi := &file_v1_base_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *FilterReferenceCount) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FilterReferenceCount) ProtoMessage() {} + +func (x *FilterReferenceCount) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_proto_msgTypes[14] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FilterReferenceCount.ProtoReflect.Descriptor instead. +func (*FilterReferenceCount) Descriptor() ([]byte, []int) { + return file_v1_base_proto_rawDescGZIP(), []int{14} +} + +func (x *FilterReferenceCount) GetOn() string { + if x != nil { + return x.On + } + return "" +} + +type FilterTarget struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Types that are valid to be assigned to Target: + // + // *FilterTarget_Property + // *FilterTarget_SingleTarget + // *FilterTarget_MultiTarget + // *FilterTarget_Count + Target isFilterTarget_Target `protobuf_oneof:"target"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *FilterTarget) Reset() { + *x = FilterTarget{} + mi := &file_v1_base_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *FilterTarget) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FilterTarget) ProtoMessage() {} + +func (x *FilterTarget) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_proto_msgTypes[15] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FilterTarget.ProtoReflect.Descriptor instead. +func (*FilterTarget) Descriptor() ([]byte, []int) { + return file_v1_base_proto_rawDescGZIP(), []int{15} +} + +func (x *FilterTarget) GetTarget() isFilterTarget_Target { + if x != nil { + return x.Target + } + return nil +} + +func (x *FilterTarget) GetProperty() string { + if x != nil { + if x, ok := x.Target.(*FilterTarget_Property); ok { + return x.Property + } + } + return "" +} + +func (x *FilterTarget) GetSingleTarget() *FilterReferenceSingleTarget { + if x != nil { + if x, ok := x.Target.(*FilterTarget_SingleTarget); ok { + return x.SingleTarget + } + } + return nil +} + +func (x *FilterTarget) GetMultiTarget() *FilterReferenceMultiTarget { + if x != nil { + if x, ok := x.Target.(*FilterTarget_MultiTarget); ok { + return x.MultiTarget + } + } + return nil +} + +func (x *FilterTarget) GetCount() *FilterReferenceCount { + if x != nil { + if x, ok := x.Target.(*FilterTarget_Count); ok { + return x.Count + } + } + return nil +} + +type isFilterTarget_Target interface { + isFilterTarget_Target() +} + +type FilterTarget_Property struct { + Property string `protobuf:"bytes,1,opt,name=property,proto3,oneof"` +} + +type FilterTarget_SingleTarget struct { + SingleTarget *FilterReferenceSingleTarget `protobuf:"bytes,2,opt,name=single_target,json=singleTarget,proto3,oneof"` +} + +type FilterTarget_MultiTarget struct { + MultiTarget *FilterReferenceMultiTarget `protobuf:"bytes,3,opt,name=multi_target,json=multiTarget,proto3,oneof"` +} + +type FilterTarget_Count struct { + Count *FilterReferenceCount `protobuf:"bytes,4,opt,name=count,proto3,oneof"` +} + +func (*FilterTarget_Property) isFilterTarget_Target() {} + +func (*FilterTarget_SingleTarget) isFilterTarget_Target() {} + +func (*FilterTarget_MultiTarget) isFilterTarget_Target() {} + +func (*FilterTarget_Count) isFilterTarget_Target() {} + +type GeoCoordinatesFilter struct { + state protoimpl.MessageState `protogen:"open.v1"` + Latitude float32 `protobuf:"fixed32,1,opt,name=latitude,proto3" json:"latitude,omitempty"` + Longitude float32 `protobuf:"fixed32,2,opt,name=longitude,proto3" json:"longitude,omitempty"` + Distance float32 `protobuf:"fixed32,3,opt,name=distance,proto3" json:"distance,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GeoCoordinatesFilter) Reset() { + *x = GeoCoordinatesFilter{} + mi := &file_v1_base_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GeoCoordinatesFilter) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GeoCoordinatesFilter) ProtoMessage() {} + +func (x *GeoCoordinatesFilter) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_proto_msgTypes[16] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GeoCoordinatesFilter.ProtoReflect.Descriptor instead. +func (*GeoCoordinatesFilter) Descriptor() ([]byte, []int) { + return file_v1_base_proto_rawDescGZIP(), []int{16} +} + +func (x *GeoCoordinatesFilter) GetLatitude() float32 { + if x != nil { + return x.Latitude + } + return 0 +} + +func (x *GeoCoordinatesFilter) GetLongitude() float32 { + if x != nil { + return x.Longitude + } + return 0 +} + +func (x *GeoCoordinatesFilter) GetDistance() float32 { + if x != nil { + return x.Distance + } + return 0 +} + +type Vectors struct { + state protoimpl.MessageState `protogen:"open.v1"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Deprecated: Marked as deprecated in v1/base.proto. + Index uint64 `protobuf:"varint,2,opt,name=index,proto3" json:"index,omitempty"` // for multi-vec + VectorBytes []byte `protobuf:"bytes,3,opt,name=vector_bytes,json=vectorBytes,proto3" json:"vector_bytes,omitempty"` + Type Vectors_VectorType `protobuf:"varint,4,opt,name=type,proto3,enum=weaviate.v1.Vectors_VectorType" json:"type,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *Vectors) Reset() { + *x = Vectors{} + mi := &file_v1_base_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *Vectors) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Vectors) ProtoMessage() {} + +func (x *Vectors) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_proto_msgTypes[17] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Vectors.ProtoReflect.Descriptor instead. +func (*Vectors) Descriptor() ([]byte, []int) { + return file_v1_base_proto_rawDescGZIP(), []int{17} +} + +func (x *Vectors) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +// Deprecated: Marked as deprecated in v1/base.proto. +func (x *Vectors) GetIndex() uint64 { + if x != nil { + return x.Index + } + return 0 +} + +func (x *Vectors) GetVectorBytes() []byte { + if x != nil { + return x.VectorBytes + } + return nil +} + +func (x *Vectors) GetType() Vectors_VectorType { + if x != nil { + return x.Type + } + return Vectors_VECTOR_TYPE_UNSPECIFIED +} + +var File_v1_base_proto protoreflect.FileDescriptor + +const file_v1_base_proto_rawDesc = "" + + "\n" + + "\rv1/base.proto\x12\vweaviate.v1\x1a\x1cgoogle/protobuf/struct.proto\"s\n" + + "\x15NumberArrayProperties\x12\x1a\n" + + "\x06values\x18\x01 \x03(\x01B\x02\x18\x01R\x06values\x12\x1b\n" + + "\tprop_name\x18\x02 \x01(\tR\bpropName\x12!\n" + + "\fvalues_bytes\x18\x03 \x01(\fR\vvaluesBytes\"I\n" + + "\x12IntArrayProperties\x12\x16\n" + + "\x06values\x18\x01 \x03(\x03R\x06values\x12\x1b\n" + + "\tprop_name\x18\x02 \x01(\tR\bpropName\"J\n" + + "\x13TextArrayProperties\x12\x16\n" + + "\x06values\x18\x01 \x03(\tR\x06values\x12\x1b\n" + + "\tprop_name\x18\x02 \x01(\tR\bpropName\"M\n" + + "\x16BooleanArrayProperties\x12\x16\n" + + "\x06values\x18\x01 \x03(\bR\x06values\x12\x1b\n" + + "\tprop_name\x18\x02 \x01(\tR\bpropName\"\x94\x05\n" + + "\x15ObjectPropertiesValue\x12E\n" + + "\x12non_ref_properties\x18\x01 \x01(\v2\x17.google.protobuf.StructR\x10nonRefProperties\x12Z\n" + + "\x17number_array_properties\x18\x02 \x03(\v2\".weaviate.v1.NumberArrayPropertiesR\x15numberArrayProperties\x12Q\n" + + "\x14int_array_properties\x18\x03 \x03(\v2\x1f.weaviate.v1.IntArrayPropertiesR\x12intArrayProperties\x12T\n" + + "\x15text_array_properties\x18\x04 \x03(\v2 .weaviate.v1.TextArrayPropertiesR\x13textArrayProperties\x12]\n" + + "\x18boolean_array_properties\x18\x05 \x03(\v2#.weaviate.v1.BooleanArrayPropertiesR\x16booleanArrayProperties\x12J\n" + + "\x11object_properties\x18\x06 \x03(\v2\x1d.weaviate.v1.ObjectPropertiesR\x10objectProperties\x12Z\n" + + "\x17object_array_properties\x18\a \x03(\v2\".weaviate.v1.ObjectArrayPropertiesR\x15objectArrayProperties\x12(\n" + + "\x10empty_list_props\x18\n" + + " \x03(\tR\x0eemptyListProps\"p\n" + + "\x15ObjectArrayProperties\x12:\n" + + "\x06values\x18\x01 \x03(\v2\".weaviate.v1.ObjectPropertiesValueR\x06values\x12\x1b\n" + + "\tprop_name\x18\x02 \x01(\tR\bpropName\"i\n" + + "\x10ObjectProperties\x128\n" + + "\x05value\x18\x01 \x01(\v2\".weaviate.v1.ObjectPropertiesValueR\x05value\x12\x1b\n" + + "\tprop_name\x18\x02 \x01(\tR\bpropName\"#\n" + + "\tTextArray\x12\x16\n" + + "\x06values\x18\x01 \x03(\tR\x06values\"\"\n" + + "\bIntArray\x12\x16\n" + + "\x06values\x18\x01 \x03(\x03R\x06values\"%\n" + + "\vNumberArray\x12\x16\n" + + "\x06values\x18\x01 \x03(\x01R\x06values\"&\n" + + "\fBooleanArray\x12\x16\n" + + "\x06values\x18\x01 \x03(\bR\x06values\"\xc7\b\n" + + "\aFilters\x129\n" + + "\boperator\x18\x01 \x01(\x0e2\x1d.weaviate.v1.Filters.OperatorR\boperator\x12\x12\n" + + "\x02on\x18\x02 \x03(\tB\x02\x18\x01R\x02on\x12.\n" + + "\afilters\x18\x03 \x03(\v2\x14.weaviate.v1.FiltersR\afilters\x12\x1f\n" + + "\n" + + "value_text\x18\x04 \x01(\tH\x00R\tvalueText\x12\x1d\n" + + "\tvalue_int\x18\x05 \x01(\x03H\x00R\bvalueInt\x12%\n" + + "\rvalue_boolean\x18\x06 \x01(\bH\x00R\fvalueBoolean\x12#\n" + + "\fvalue_number\x18\a \x01(\x01H\x00R\vvalueNumber\x12B\n" + + "\x10value_text_array\x18\t \x01(\v2\x16.weaviate.v1.TextArrayH\x00R\x0evalueTextArray\x12?\n" + + "\x0fvalue_int_array\x18\n" + + " \x01(\v2\x15.weaviate.v1.IntArrayH\x00R\rvalueIntArray\x12K\n" + + "\x13value_boolean_array\x18\v \x01(\v2\x19.weaviate.v1.BooleanArrayH\x00R\x11valueBooleanArray\x12H\n" + + "\x12value_number_array\x18\f \x01(\v2\x18.weaviate.v1.NumberArrayH\x00R\x10valueNumberArray\x12@\n" + + "\tvalue_geo\x18\r \x01(\v2!.weaviate.v1.GeoCoordinatesFilterH\x00R\bvalueGeo\x121\n" + + "\x06target\x18\x14 \x01(\v2\x19.weaviate.v1.FilterTargetR\x06target\"\x91\x03\n" + + "\bOperator\x12\x18\n" + + "\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x12\n" + + "\x0eOPERATOR_EQUAL\x10\x01\x12\x16\n" + + "\x12OPERATOR_NOT_EQUAL\x10\x02\x12\x19\n" + + "\x15OPERATOR_GREATER_THAN\x10\x03\x12\x1f\n" + + "\x1bOPERATOR_GREATER_THAN_EQUAL\x10\x04\x12\x16\n" + + "\x12OPERATOR_LESS_THAN\x10\x05\x12\x1c\n" + + "\x18OPERATOR_LESS_THAN_EQUAL\x10\x06\x12\x10\n" + + "\fOPERATOR_AND\x10\a\x12\x0f\n" + + "\vOPERATOR_OR\x10\b\x12\x1d\n" + + "\x19OPERATOR_WITHIN_GEO_RANGE\x10\t\x12\x11\n" + + "\rOPERATOR_LIKE\x10\n" + + "\x12\x14\n" + + "\x10OPERATOR_IS_NULL\x10\v\x12\x19\n" + + "\x15OPERATOR_CONTAINS_ANY\x10\f\x12\x19\n" + + "\x15OPERATOR_CONTAINS_ALL\x10\r\x12\x1a\n" + + "\x16OPERATOR_CONTAINS_NONE\x10\x0e\x12\x10\n" + + "\fOPERATOR_NOT\x10\x0fB\f\n" + + "\n" + + "test_value\"`\n" + + "\x1bFilterReferenceSingleTarget\x12\x0e\n" + + "\x02on\x18\x01 \x01(\tR\x02on\x121\n" + + "\x06target\x18\x02 \x01(\v2\x19.weaviate.v1.FilterTargetR\x06target\"\x8c\x01\n" + + "\x1aFilterReferenceMultiTarget\x12\x0e\n" + + "\x02on\x18\x01 \x01(\tR\x02on\x121\n" + + "\x06target\x18\x02 \x01(\v2\x19.weaviate.v1.FilterTargetR\x06target\x12+\n" + + "\x11target_collection\x18\x03 \x01(\tR\x10targetCollection\"&\n" + + "\x14FilterReferenceCount\x12\x0e\n" + + "\x02on\x18\x01 \x01(\tR\x02on\"\x90\x02\n" + + "\fFilterTarget\x12\x1c\n" + + "\bproperty\x18\x01 \x01(\tH\x00R\bproperty\x12O\n" + + "\rsingle_target\x18\x02 \x01(\v2(.weaviate.v1.FilterReferenceSingleTargetH\x00R\fsingleTarget\x12L\n" + + "\fmulti_target\x18\x03 \x01(\v2'.weaviate.v1.FilterReferenceMultiTargetH\x00R\vmultiTarget\x129\n" + + "\x05count\x18\x04 \x01(\v2!.weaviate.v1.FilterReferenceCountH\x00R\x05countB\b\n" + + "\x06target\"l\n" + + "\x14GeoCoordinatesFilter\x12\x1a\n" + + "\blatitude\x18\x01 \x01(\x02R\blatitude\x12\x1c\n" + + "\tlongitude\x18\x02 \x01(\x02R\tlongitude\x12\x1a\n" + + "\bdistance\x18\x03 \x01(\x02R\bdistance\"\xf3\x01\n" + + "\aVectors\x12\x12\n" + + "\x04name\x18\x01 \x01(\tR\x04name\x12\x18\n" + + "\x05index\x18\x02 \x01(\x04B\x02\x18\x01R\x05index\x12!\n" + + "\fvector_bytes\x18\x03 \x01(\fR\vvectorBytes\x123\n" + + "\x04type\x18\x04 \x01(\x0e2\x1f.weaviate.v1.Vectors.VectorTypeR\x04type\"b\n" + + "\n" + + "VectorType\x12\x1b\n" + + "\x17VECTOR_TYPE_UNSPECIFIED\x10\x00\x12\x1b\n" + + "\x17VECTOR_TYPE_SINGLE_FP32\x10\x01\x12\x1a\n" + + "\x16VECTOR_TYPE_MULTI_FP32\x10\x02*\x89\x01\n" + + "\x10ConsistencyLevel\x12!\n" + + "\x1dCONSISTENCY_LEVEL_UNSPECIFIED\x10\x00\x12\x19\n" + + "\x15CONSISTENCY_LEVEL_ONE\x10\x01\x12\x1c\n" + + "\x18CONSISTENCY_LEVEL_QUORUM\x10\x02\x12\x19\n" + + "\x15CONSISTENCY_LEVEL_ALL\x10\x03Bn\n" + + "#io.weaviate.client.grpc.protocol.v1B\x11WeaviateProtoBaseZ4github.com/weaviate/weaviate/grpc/generated;protocolb\x06proto3" + +var ( + file_v1_base_proto_rawDescOnce sync.Once + file_v1_base_proto_rawDescData []byte +) + +func file_v1_base_proto_rawDescGZIP() []byte { + file_v1_base_proto_rawDescOnce.Do(func() { + file_v1_base_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_v1_base_proto_rawDesc), len(file_v1_base_proto_rawDesc))) + }) + return file_v1_base_proto_rawDescData +} + +var file_v1_base_proto_enumTypes = make([]protoimpl.EnumInfo, 3) +var file_v1_base_proto_msgTypes = make([]protoimpl.MessageInfo, 18) +var file_v1_base_proto_goTypes = []any{ + (ConsistencyLevel)(0), // 0: weaviate.v1.ConsistencyLevel + (Filters_Operator)(0), // 1: weaviate.v1.Filters.Operator + (Vectors_VectorType)(0), // 2: weaviate.v1.Vectors.VectorType + (*NumberArrayProperties)(nil), // 3: weaviate.v1.NumberArrayProperties + (*IntArrayProperties)(nil), // 4: weaviate.v1.IntArrayProperties + (*TextArrayProperties)(nil), // 5: weaviate.v1.TextArrayProperties + (*BooleanArrayProperties)(nil), // 6: weaviate.v1.BooleanArrayProperties + (*ObjectPropertiesValue)(nil), // 7: weaviate.v1.ObjectPropertiesValue + (*ObjectArrayProperties)(nil), // 8: weaviate.v1.ObjectArrayProperties + (*ObjectProperties)(nil), // 9: weaviate.v1.ObjectProperties + (*TextArray)(nil), // 10: weaviate.v1.TextArray + (*IntArray)(nil), // 11: weaviate.v1.IntArray + (*NumberArray)(nil), // 12: weaviate.v1.NumberArray + (*BooleanArray)(nil), // 13: weaviate.v1.BooleanArray + (*Filters)(nil), // 14: weaviate.v1.Filters + (*FilterReferenceSingleTarget)(nil), // 15: weaviate.v1.FilterReferenceSingleTarget + (*FilterReferenceMultiTarget)(nil), // 16: weaviate.v1.FilterReferenceMultiTarget + (*FilterReferenceCount)(nil), // 17: weaviate.v1.FilterReferenceCount + (*FilterTarget)(nil), // 18: weaviate.v1.FilterTarget + (*GeoCoordinatesFilter)(nil), // 19: weaviate.v1.GeoCoordinatesFilter + (*Vectors)(nil), // 20: weaviate.v1.Vectors + (*structpb.Struct)(nil), // 21: google.protobuf.Struct +} +var file_v1_base_proto_depIdxs = []int32{ + 21, // 0: weaviate.v1.ObjectPropertiesValue.non_ref_properties:type_name -> google.protobuf.Struct + 3, // 1: weaviate.v1.ObjectPropertiesValue.number_array_properties:type_name -> weaviate.v1.NumberArrayProperties + 4, // 2: weaviate.v1.ObjectPropertiesValue.int_array_properties:type_name -> weaviate.v1.IntArrayProperties + 5, // 3: weaviate.v1.ObjectPropertiesValue.text_array_properties:type_name -> weaviate.v1.TextArrayProperties + 6, // 4: weaviate.v1.ObjectPropertiesValue.boolean_array_properties:type_name -> weaviate.v1.BooleanArrayProperties + 9, // 5: weaviate.v1.ObjectPropertiesValue.object_properties:type_name -> weaviate.v1.ObjectProperties + 8, // 6: weaviate.v1.ObjectPropertiesValue.object_array_properties:type_name -> weaviate.v1.ObjectArrayProperties + 7, // 7: weaviate.v1.ObjectArrayProperties.values:type_name -> weaviate.v1.ObjectPropertiesValue + 7, // 8: weaviate.v1.ObjectProperties.value:type_name -> weaviate.v1.ObjectPropertiesValue + 1, // 9: weaviate.v1.Filters.operator:type_name -> weaviate.v1.Filters.Operator + 14, // 10: weaviate.v1.Filters.filters:type_name -> weaviate.v1.Filters + 10, // 11: weaviate.v1.Filters.value_text_array:type_name -> weaviate.v1.TextArray + 11, // 12: weaviate.v1.Filters.value_int_array:type_name -> weaviate.v1.IntArray + 13, // 13: weaviate.v1.Filters.value_boolean_array:type_name -> weaviate.v1.BooleanArray + 12, // 14: weaviate.v1.Filters.value_number_array:type_name -> weaviate.v1.NumberArray + 19, // 15: weaviate.v1.Filters.value_geo:type_name -> weaviate.v1.GeoCoordinatesFilter + 18, // 16: weaviate.v1.Filters.target:type_name -> weaviate.v1.FilterTarget + 18, // 17: weaviate.v1.FilterReferenceSingleTarget.target:type_name -> weaviate.v1.FilterTarget + 18, // 18: weaviate.v1.FilterReferenceMultiTarget.target:type_name -> weaviate.v1.FilterTarget + 15, // 19: weaviate.v1.FilterTarget.single_target:type_name -> weaviate.v1.FilterReferenceSingleTarget + 16, // 20: weaviate.v1.FilterTarget.multi_target:type_name -> weaviate.v1.FilterReferenceMultiTarget + 17, // 21: weaviate.v1.FilterTarget.count:type_name -> weaviate.v1.FilterReferenceCount + 2, // 22: weaviate.v1.Vectors.type:type_name -> weaviate.v1.Vectors.VectorType + 23, // [23:23] is the sub-list for method output_type + 23, // [23:23] is the sub-list for method input_type + 23, // [23:23] is the sub-list for extension type_name + 23, // [23:23] is the sub-list for extension extendee + 0, // [0:23] is the sub-list for field type_name +} + +func init() { file_v1_base_proto_init() } +func file_v1_base_proto_init() { + if File_v1_base_proto != nil { + return + } + file_v1_base_proto_msgTypes[11].OneofWrappers = []any{ + (*Filters_ValueText)(nil), + (*Filters_ValueInt)(nil), + (*Filters_ValueBoolean)(nil), + (*Filters_ValueNumber)(nil), + (*Filters_ValueTextArray)(nil), + (*Filters_ValueIntArray)(nil), + (*Filters_ValueBooleanArray)(nil), + (*Filters_ValueNumberArray)(nil), + (*Filters_ValueGeo)(nil), + } + file_v1_base_proto_msgTypes[15].OneofWrappers = []any{ + (*FilterTarget_Property)(nil), + (*FilterTarget_SingleTarget)(nil), + (*FilterTarget_MultiTarget)(nil), + (*FilterTarget_Count)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: unsafe.Slice(unsafe.StringData(file_v1_base_proto_rawDesc), len(file_v1_base_proto_rawDesc)), + NumEnums: 3, + NumMessages: 18, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_v1_base_proto_goTypes, + DependencyIndexes: file_v1_base_proto_depIdxs, + EnumInfos: file_v1_base_proto_enumTypes, + MessageInfos: file_v1_base_proto_msgTypes, + }.Build() + File_v1_base_proto = out.File + file_v1_base_proto_goTypes = nil + file_v1_base_proto_depIdxs = nil +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/base_search.pb.go b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/base_search.pb.go new file mode 100644 index 0000000000000000000000000000000000000000..6699839fedaa2f39d82e2cd53fee4fb1c49b2d4c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/base_search.pb.go @@ -0,0 +1,1728 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. + +package protocol + +import ( + reflect "reflect" + sync "sync" + unsafe "unsafe" + + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type CombinationMethod int32 + +const ( + CombinationMethod_COMBINATION_METHOD_UNSPECIFIED CombinationMethod = 0 + CombinationMethod_COMBINATION_METHOD_TYPE_SUM CombinationMethod = 1 + CombinationMethod_COMBINATION_METHOD_TYPE_MIN CombinationMethod = 2 + CombinationMethod_COMBINATION_METHOD_TYPE_AVERAGE CombinationMethod = 3 + CombinationMethod_COMBINATION_METHOD_TYPE_RELATIVE_SCORE CombinationMethod = 4 + CombinationMethod_COMBINATION_METHOD_TYPE_MANUAL CombinationMethod = 5 +) + +// Enum value maps for CombinationMethod. +var ( + CombinationMethod_name = map[int32]string{ + 0: "COMBINATION_METHOD_UNSPECIFIED", + 1: "COMBINATION_METHOD_TYPE_SUM", + 2: "COMBINATION_METHOD_TYPE_MIN", + 3: "COMBINATION_METHOD_TYPE_AVERAGE", + 4: "COMBINATION_METHOD_TYPE_RELATIVE_SCORE", + 5: "COMBINATION_METHOD_TYPE_MANUAL", + } + CombinationMethod_value = map[string]int32{ + "COMBINATION_METHOD_UNSPECIFIED": 0, + "COMBINATION_METHOD_TYPE_SUM": 1, + "COMBINATION_METHOD_TYPE_MIN": 2, + "COMBINATION_METHOD_TYPE_AVERAGE": 3, + "COMBINATION_METHOD_TYPE_RELATIVE_SCORE": 4, + "COMBINATION_METHOD_TYPE_MANUAL": 5, + } +) + +func (x CombinationMethod) Enum() *CombinationMethod { + p := new(CombinationMethod) + *p = x + return p +} + +func (x CombinationMethod) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (CombinationMethod) Descriptor() protoreflect.EnumDescriptor { + return file_v1_base_search_proto_enumTypes[0].Descriptor() +} + +func (CombinationMethod) Type() protoreflect.EnumType { + return &file_v1_base_search_proto_enumTypes[0] +} + +func (x CombinationMethod) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use CombinationMethod.Descriptor instead. +func (CombinationMethod) EnumDescriptor() ([]byte, []int) { + return file_v1_base_search_proto_rawDescGZIP(), []int{0} +} + +type SearchOperatorOptions_Operator int32 + +const ( + SearchOperatorOptions_OPERATOR_UNSPECIFIED SearchOperatorOptions_Operator = 0 + SearchOperatorOptions_OPERATOR_OR SearchOperatorOptions_Operator = 1 + SearchOperatorOptions_OPERATOR_AND SearchOperatorOptions_Operator = 2 +) + +// Enum value maps for SearchOperatorOptions_Operator. +var ( + SearchOperatorOptions_Operator_name = map[int32]string{ + 0: "OPERATOR_UNSPECIFIED", + 1: "OPERATOR_OR", + 2: "OPERATOR_AND", + } + SearchOperatorOptions_Operator_value = map[string]int32{ + "OPERATOR_UNSPECIFIED": 0, + "OPERATOR_OR": 1, + "OPERATOR_AND": 2, + } +) + +func (x SearchOperatorOptions_Operator) Enum() *SearchOperatorOptions_Operator { + p := new(SearchOperatorOptions_Operator) + *p = x + return p +} + +func (x SearchOperatorOptions_Operator) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (SearchOperatorOptions_Operator) Descriptor() protoreflect.EnumDescriptor { + return file_v1_base_search_proto_enumTypes[1].Descriptor() +} + +func (SearchOperatorOptions_Operator) Type() protoreflect.EnumType { + return &file_v1_base_search_proto_enumTypes[1] +} + +func (x SearchOperatorOptions_Operator) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use SearchOperatorOptions_Operator.Descriptor instead. +func (SearchOperatorOptions_Operator) EnumDescriptor() ([]byte, []int) { + return file_v1_base_search_proto_rawDescGZIP(), []int{3, 0} +} + +type Hybrid_FusionType int32 + +const ( + Hybrid_FUSION_TYPE_UNSPECIFIED Hybrid_FusionType = 0 + Hybrid_FUSION_TYPE_RANKED Hybrid_FusionType = 1 + Hybrid_FUSION_TYPE_RELATIVE_SCORE Hybrid_FusionType = 2 +) + +// Enum value maps for Hybrid_FusionType. +var ( + Hybrid_FusionType_name = map[int32]string{ + 0: "FUSION_TYPE_UNSPECIFIED", + 1: "FUSION_TYPE_RANKED", + 2: "FUSION_TYPE_RELATIVE_SCORE", + } + Hybrid_FusionType_value = map[string]int32{ + "FUSION_TYPE_UNSPECIFIED": 0, + "FUSION_TYPE_RANKED": 1, + "FUSION_TYPE_RELATIVE_SCORE": 2, + } +) + +func (x Hybrid_FusionType) Enum() *Hybrid_FusionType { + p := new(Hybrid_FusionType) + *p = x + return p +} + +func (x Hybrid_FusionType) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Hybrid_FusionType) Descriptor() protoreflect.EnumDescriptor { + return file_v1_base_search_proto_enumTypes[2].Descriptor() +} + +func (Hybrid_FusionType) Type() protoreflect.EnumType { + return &file_v1_base_search_proto_enumTypes[2] +} + +func (x Hybrid_FusionType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Hybrid_FusionType.Descriptor instead. +func (Hybrid_FusionType) EnumDescriptor() ([]byte, []int) { + return file_v1_base_search_proto_rawDescGZIP(), []int{4, 0} +} + +type WeightsForTarget struct { + state protoimpl.MessageState `protogen:"open.v1"` + Target string `protobuf:"bytes,1,opt,name=target,proto3" json:"target,omitempty"` + Weight float32 `protobuf:"fixed32,2,opt,name=weight,proto3" json:"weight,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *WeightsForTarget) Reset() { + *x = WeightsForTarget{} + mi := &file_v1_base_search_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *WeightsForTarget) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*WeightsForTarget) ProtoMessage() {} + +func (x *WeightsForTarget) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_search_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use WeightsForTarget.ProtoReflect.Descriptor instead. +func (*WeightsForTarget) Descriptor() ([]byte, []int) { + return file_v1_base_search_proto_rawDescGZIP(), []int{0} +} + +func (x *WeightsForTarget) GetTarget() string { + if x != nil { + return x.Target + } + return "" +} + +func (x *WeightsForTarget) GetWeight() float32 { + if x != nil { + return x.Weight + } + return 0 +} + +type Targets struct { + state protoimpl.MessageState `protogen:"open.v1"` + TargetVectors []string `protobuf:"bytes,1,rep,name=target_vectors,json=targetVectors,proto3" json:"target_vectors,omitempty"` + Combination CombinationMethod `protobuf:"varint,2,opt,name=combination,proto3,enum=weaviate.v1.CombinationMethod" json:"combination,omitempty"` + WeightsForTargets []*WeightsForTarget `protobuf:"bytes,4,rep,name=weights_for_targets,json=weightsForTargets,proto3" json:"weights_for_targets,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *Targets) Reset() { + *x = Targets{} + mi := &file_v1_base_search_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *Targets) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Targets) ProtoMessage() {} + +func (x *Targets) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_search_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Targets.ProtoReflect.Descriptor instead. +func (*Targets) Descriptor() ([]byte, []int) { + return file_v1_base_search_proto_rawDescGZIP(), []int{1} +} + +func (x *Targets) GetTargetVectors() []string { + if x != nil { + return x.TargetVectors + } + return nil +} + +func (x *Targets) GetCombination() CombinationMethod { + if x != nil { + return x.Combination + } + return CombinationMethod_COMBINATION_METHOD_UNSPECIFIED +} + +func (x *Targets) GetWeightsForTargets() []*WeightsForTarget { + if x != nil { + return x.WeightsForTargets + } + return nil +} + +type VectorForTarget struct { + state protoimpl.MessageState `protogen:"open.v1"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Deprecated: Marked as deprecated in v1/base_search.proto. + VectorBytes []byte `protobuf:"bytes,2,opt,name=vector_bytes,json=vectorBytes,proto3" json:"vector_bytes,omitempty"` // deprecated in 1.29.0 - use vectors + Vectors []*Vectors `protobuf:"bytes,3,rep,name=vectors,proto3" json:"vectors,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *VectorForTarget) Reset() { + *x = VectorForTarget{} + mi := &file_v1_base_search_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *VectorForTarget) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*VectorForTarget) ProtoMessage() {} + +func (x *VectorForTarget) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_search_proto_msgTypes[2] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use VectorForTarget.ProtoReflect.Descriptor instead. +func (*VectorForTarget) Descriptor() ([]byte, []int) { + return file_v1_base_search_proto_rawDescGZIP(), []int{2} +} + +func (x *VectorForTarget) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +// Deprecated: Marked as deprecated in v1/base_search.proto. +func (x *VectorForTarget) GetVectorBytes() []byte { + if x != nil { + return x.VectorBytes + } + return nil +} + +func (x *VectorForTarget) GetVectors() []*Vectors { + if x != nil { + return x.Vectors + } + return nil +} + +type SearchOperatorOptions struct { + state protoimpl.MessageState `protogen:"open.v1"` + Operator SearchOperatorOptions_Operator `protobuf:"varint,1,opt,name=operator,proto3,enum=weaviate.v1.SearchOperatorOptions_Operator" json:"operator,omitempty"` + MinimumOrTokensMatch *int32 `protobuf:"varint,2,opt,name=minimum_or_tokens_match,json=minimumOrTokensMatch,proto3,oneof" json:"minimum_or_tokens_match,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *SearchOperatorOptions) Reset() { + *x = SearchOperatorOptions{} + mi := &file_v1_base_search_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *SearchOperatorOptions) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SearchOperatorOptions) ProtoMessage() {} + +func (x *SearchOperatorOptions) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_search_proto_msgTypes[3] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SearchOperatorOptions.ProtoReflect.Descriptor instead. +func (*SearchOperatorOptions) Descriptor() ([]byte, []int) { + return file_v1_base_search_proto_rawDescGZIP(), []int{3} +} + +func (x *SearchOperatorOptions) GetOperator() SearchOperatorOptions_Operator { + if x != nil { + return x.Operator + } + return SearchOperatorOptions_OPERATOR_UNSPECIFIED +} + +func (x *SearchOperatorOptions) GetMinimumOrTokensMatch() int32 { + if x != nil && x.MinimumOrTokensMatch != nil { + return *x.MinimumOrTokensMatch + } + return 0 +} + +type Hybrid struct { + state protoimpl.MessageState `protogen:"open.v1"` + Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` + Properties []string `protobuf:"bytes,2,rep,name=properties,proto3" json:"properties,omitempty"` + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + // + // Deprecated: Marked as deprecated in v1/base_search.proto. + Vector []float32 `protobuf:"fixed32,3,rep,packed,name=vector,proto3" json:"vector,omitempty"` // will be removed in the future, use vectors + Alpha float32 `protobuf:"fixed32,4,opt,name=alpha,proto3" json:"alpha,omitempty"` + FusionType Hybrid_FusionType `protobuf:"varint,5,opt,name=fusion_type,json=fusionType,proto3,enum=weaviate.v1.Hybrid_FusionType" json:"fusion_type,omitempty"` + // Deprecated: Marked as deprecated in v1/base_search.proto. + VectorBytes []byte `protobuf:"bytes,6,opt,name=vector_bytes,json=vectorBytes,proto3" json:"vector_bytes,omitempty"` // deprecated in 1.29.0 - use vectors + // Deprecated: Marked as deprecated in v1/base_search.proto. + TargetVectors []string `protobuf:"bytes,7,rep,name=target_vectors,json=targetVectors,proto3" json:"target_vectors,omitempty"` // deprecated in 1.26 - use targets + NearText *NearTextSearch `protobuf:"bytes,8,opt,name=near_text,json=nearText,proto3" json:"near_text,omitempty"` // targets in msg is ignored and should not be set for hybrid + NearVector *NearVector `protobuf:"bytes,9,opt,name=near_vector,json=nearVector,proto3" json:"near_vector,omitempty"` // same as above. Use the target vector in the hybrid message + Targets *Targets `protobuf:"bytes,10,opt,name=targets,proto3" json:"targets,omitempty"` + Bm25SearchOperator *SearchOperatorOptions `protobuf:"bytes,11,opt,name=bm25_search_operator,json=bm25SearchOperator,proto3,oneof" json:"bm25_search_operator,omitempty"` + // only vector distance, but keep it extendable + // + // Types that are valid to be assigned to Threshold: + // + // *Hybrid_VectorDistance + Threshold isHybrid_Threshold `protobuf_oneof:"threshold"` + Vectors []*Vectors `protobuf:"bytes,21,rep,name=vectors,proto3" json:"vectors,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *Hybrid) Reset() { + *x = Hybrid{} + mi := &file_v1_base_search_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *Hybrid) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Hybrid) ProtoMessage() {} + +func (x *Hybrid) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_search_proto_msgTypes[4] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Hybrid.ProtoReflect.Descriptor instead. +func (*Hybrid) Descriptor() ([]byte, []int) { + return file_v1_base_search_proto_rawDescGZIP(), []int{4} +} + +func (x *Hybrid) GetQuery() string { + if x != nil { + return x.Query + } + return "" +} + +func (x *Hybrid) GetProperties() []string { + if x != nil { + return x.Properties + } + return nil +} + +// Deprecated: Marked as deprecated in v1/base_search.proto. +func (x *Hybrid) GetVector() []float32 { + if x != nil { + return x.Vector + } + return nil +} + +func (x *Hybrid) GetAlpha() float32 { + if x != nil { + return x.Alpha + } + return 0 +} + +func (x *Hybrid) GetFusionType() Hybrid_FusionType { + if x != nil { + return x.FusionType + } + return Hybrid_FUSION_TYPE_UNSPECIFIED +} + +// Deprecated: Marked as deprecated in v1/base_search.proto. +func (x *Hybrid) GetVectorBytes() []byte { + if x != nil { + return x.VectorBytes + } + return nil +} + +// Deprecated: Marked as deprecated in v1/base_search.proto. +func (x *Hybrid) GetTargetVectors() []string { + if x != nil { + return x.TargetVectors + } + return nil +} + +func (x *Hybrid) GetNearText() *NearTextSearch { + if x != nil { + return x.NearText + } + return nil +} + +func (x *Hybrid) GetNearVector() *NearVector { + if x != nil { + return x.NearVector + } + return nil +} + +func (x *Hybrid) GetTargets() *Targets { + if x != nil { + return x.Targets + } + return nil +} + +func (x *Hybrid) GetBm25SearchOperator() *SearchOperatorOptions { + if x != nil { + return x.Bm25SearchOperator + } + return nil +} + +func (x *Hybrid) GetThreshold() isHybrid_Threshold { + if x != nil { + return x.Threshold + } + return nil +} + +func (x *Hybrid) GetVectorDistance() float32 { + if x != nil { + if x, ok := x.Threshold.(*Hybrid_VectorDistance); ok { + return x.VectorDistance + } + } + return 0 +} + +func (x *Hybrid) GetVectors() []*Vectors { + if x != nil { + return x.Vectors + } + return nil +} + +type isHybrid_Threshold interface { + isHybrid_Threshold() +} + +type Hybrid_VectorDistance struct { + VectorDistance float32 `protobuf:"fixed32,20,opt,name=vector_distance,json=vectorDistance,proto3,oneof"` +} + +func (*Hybrid_VectorDistance) isHybrid_Threshold() {} + +type NearVector struct { + state protoimpl.MessageState `protogen:"open.v1"` + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + // + // Deprecated: Marked as deprecated in v1/base_search.proto. + Vector []float32 `protobuf:"fixed32,1,rep,packed,name=vector,proto3" json:"vector,omitempty"` // will be removed in the future, use vectors + Certainty *float64 `protobuf:"fixed64,2,opt,name=certainty,proto3,oneof" json:"certainty,omitempty"` + Distance *float64 `protobuf:"fixed64,3,opt,name=distance,proto3,oneof" json:"distance,omitempty"` + // Deprecated: Marked as deprecated in v1/base_search.proto. + VectorBytes []byte `protobuf:"bytes,4,opt,name=vector_bytes,json=vectorBytes,proto3" json:"vector_bytes,omitempty"` // deprecated in 1.29.0 - use vectors + // Deprecated: Marked as deprecated in v1/base_search.proto. + TargetVectors []string `protobuf:"bytes,5,rep,name=target_vectors,json=targetVectors,proto3" json:"target_vectors,omitempty"` // deprecated in 1.26 - use targets + Targets *Targets `protobuf:"bytes,6,opt,name=targets,proto3" json:"targets,omitempty"` + // Deprecated: Marked as deprecated in v1/base_search.proto. + VectorPerTarget map[string][]byte `protobuf:"bytes,7,rep,name=vector_per_target,json=vectorPerTarget,proto3" json:"vector_per_target,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // deprecated in 1.26.2 - use vector_for_targets + VectorForTargets []*VectorForTarget `protobuf:"bytes,8,rep,name=vector_for_targets,json=vectorForTargets,proto3" json:"vector_for_targets,omitempty"` + Vectors []*Vectors `protobuf:"bytes,9,rep,name=vectors,proto3" json:"vectors,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *NearVector) Reset() { + *x = NearVector{} + mi := &file_v1_base_search_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *NearVector) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NearVector) ProtoMessage() {} + +func (x *NearVector) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_search_proto_msgTypes[5] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NearVector.ProtoReflect.Descriptor instead. +func (*NearVector) Descriptor() ([]byte, []int) { + return file_v1_base_search_proto_rawDescGZIP(), []int{5} +} + +// Deprecated: Marked as deprecated in v1/base_search.proto. +func (x *NearVector) GetVector() []float32 { + if x != nil { + return x.Vector + } + return nil +} + +func (x *NearVector) GetCertainty() float64 { + if x != nil && x.Certainty != nil { + return *x.Certainty + } + return 0 +} + +func (x *NearVector) GetDistance() float64 { + if x != nil && x.Distance != nil { + return *x.Distance + } + return 0 +} + +// Deprecated: Marked as deprecated in v1/base_search.proto. +func (x *NearVector) GetVectorBytes() []byte { + if x != nil { + return x.VectorBytes + } + return nil +} + +// Deprecated: Marked as deprecated in v1/base_search.proto. +func (x *NearVector) GetTargetVectors() []string { + if x != nil { + return x.TargetVectors + } + return nil +} + +func (x *NearVector) GetTargets() *Targets { + if x != nil { + return x.Targets + } + return nil +} + +// Deprecated: Marked as deprecated in v1/base_search.proto. +func (x *NearVector) GetVectorPerTarget() map[string][]byte { + if x != nil { + return x.VectorPerTarget + } + return nil +} + +func (x *NearVector) GetVectorForTargets() []*VectorForTarget { + if x != nil { + return x.VectorForTargets + } + return nil +} + +func (x *NearVector) GetVectors() []*Vectors { + if x != nil { + return x.Vectors + } + return nil +} + +type NearObject struct { + state protoimpl.MessageState `protogen:"open.v1"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Certainty *float64 `protobuf:"fixed64,2,opt,name=certainty,proto3,oneof" json:"certainty,omitempty"` + Distance *float64 `protobuf:"fixed64,3,opt,name=distance,proto3,oneof" json:"distance,omitempty"` + // Deprecated: Marked as deprecated in v1/base_search.proto. + TargetVectors []string `protobuf:"bytes,4,rep,name=target_vectors,json=targetVectors,proto3" json:"target_vectors,omitempty"` // deprecated in 1.26 - use targets + Targets *Targets `protobuf:"bytes,5,opt,name=targets,proto3" json:"targets,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *NearObject) Reset() { + *x = NearObject{} + mi := &file_v1_base_search_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *NearObject) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NearObject) ProtoMessage() {} + +func (x *NearObject) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_search_proto_msgTypes[6] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NearObject.ProtoReflect.Descriptor instead. +func (*NearObject) Descriptor() ([]byte, []int) { + return file_v1_base_search_proto_rawDescGZIP(), []int{6} +} + +func (x *NearObject) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +func (x *NearObject) GetCertainty() float64 { + if x != nil && x.Certainty != nil { + return *x.Certainty + } + return 0 +} + +func (x *NearObject) GetDistance() float64 { + if x != nil && x.Distance != nil { + return *x.Distance + } + return 0 +} + +// Deprecated: Marked as deprecated in v1/base_search.proto. +func (x *NearObject) GetTargetVectors() []string { + if x != nil { + return x.TargetVectors + } + return nil +} + +func (x *NearObject) GetTargets() *Targets { + if x != nil { + return x.Targets + } + return nil +} + +type NearTextSearch struct { + state protoimpl.MessageState `protogen:"open.v1"` + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + Query []string `protobuf:"bytes,1,rep,name=query,proto3" json:"query,omitempty"` + Certainty *float64 `protobuf:"fixed64,2,opt,name=certainty,proto3,oneof" json:"certainty,omitempty"` + Distance *float64 `protobuf:"fixed64,3,opt,name=distance,proto3,oneof" json:"distance,omitempty"` + MoveTo *NearTextSearch_Move `protobuf:"bytes,4,opt,name=move_to,json=moveTo,proto3,oneof" json:"move_to,omitempty"` + MoveAway *NearTextSearch_Move `protobuf:"bytes,5,opt,name=move_away,json=moveAway,proto3,oneof" json:"move_away,omitempty"` + // Deprecated: Marked as deprecated in v1/base_search.proto. + TargetVectors []string `protobuf:"bytes,6,rep,name=target_vectors,json=targetVectors,proto3" json:"target_vectors,omitempty"` // deprecated in 1.26 - use targets + Targets *Targets `protobuf:"bytes,7,opt,name=targets,proto3" json:"targets,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *NearTextSearch) Reset() { + *x = NearTextSearch{} + mi := &file_v1_base_search_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *NearTextSearch) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NearTextSearch) ProtoMessage() {} + +func (x *NearTextSearch) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_search_proto_msgTypes[7] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NearTextSearch.ProtoReflect.Descriptor instead. +func (*NearTextSearch) Descriptor() ([]byte, []int) { + return file_v1_base_search_proto_rawDescGZIP(), []int{7} +} + +func (x *NearTextSearch) GetQuery() []string { + if x != nil { + return x.Query + } + return nil +} + +func (x *NearTextSearch) GetCertainty() float64 { + if x != nil && x.Certainty != nil { + return *x.Certainty + } + return 0 +} + +func (x *NearTextSearch) GetDistance() float64 { + if x != nil && x.Distance != nil { + return *x.Distance + } + return 0 +} + +func (x *NearTextSearch) GetMoveTo() *NearTextSearch_Move { + if x != nil { + return x.MoveTo + } + return nil +} + +func (x *NearTextSearch) GetMoveAway() *NearTextSearch_Move { + if x != nil { + return x.MoveAway + } + return nil +} + +// Deprecated: Marked as deprecated in v1/base_search.proto. +func (x *NearTextSearch) GetTargetVectors() []string { + if x != nil { + return x.TargetVectors + } + return nil +} + +func (x *NearTextSearch) GetTargets() *Targets { + if x != nil { + return x.Targets + } + return nil +} + +type NearImageSearch struct { + state protoimpl.MessageState `protogen:"open.v1"` + Image string `protobuf:"bytes,1,opt,name=image,proto3" json:"image,omitempty"` + Certainty *float64 `protobuf:"fixed64,2,opt,name=certainty,proto3,oneof" json:"certainty,omitempty"` + Distance *float64 `protobuf:"fixed64,3,opt,name=distance,proto3,oneof" json:"distance,omitempty"` + // Deprecated: Marked as deprecated in v1/base_search.proto. + TargetVectors []string `protobuf:"bytes,4,rep,name=target_vectors,json=targetVectors,proto3" json:"target_vectors,omitempty"` // deprecated in 1.26 - use targets + Targets *Targets `protobuf:"bytes,5,opt,name=targets,proto3" json:"targets,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *NearImageSearch) Reset() { + *x = NearImageSearch{} + mi := &file_v1_base_search_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *NearImageSearch) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NearImageSearch) ProtoMessage() {} + +func (x *NearImageSearch) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_search_proto_msgTypes[8] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NearImageSearch.ProtoReflect.Descriptor instead. +func (*NearImageSearch) Descriptor() ([]byte, []int) { + return file_v1_base_search_proto_rawDescGZIP(), []int{8} +} + +func (x *NearImageSearch) GetImage() string { + if x != nil { + return x.Image + } + return "" +} + +func (x *NearImageSearch) GetCertainty() float64 { + if x != nil && x.Certainty != nil { + return *x.Certainty + } + return 0 +} + +func (x *NearImageSearch) GetDistance() float64 { + if x != nil && x.Distance != nil { + return *x.Distance + } + return 0 +} + +// Deprecated: Marked as deprecated in v1/base_search.proto. +func (x *NearImageSearch) GetTargetVectors() []string { + if x != nil { + return x.TargetVectors + } + return nil +} + +func (x *NearImageSearch) GetTargets() *Targets { + if x != nil { + return x.Targets + } + return nil +} + +type NearAudioSearch struct { + state protoimpl.MessageState `protogen:"open.v1"` + Audio string `protobuf:"bytes,1,opt,name=audio,proto3" json:"audio,omitempty"` + Certainty *float64 `protobuf:"fixed64,2,opt,name=certainty,proto3,oneof" json:"certainty,omitempty"` + Distance *float64 `protobuf:"fixed64,3,opt,name=distance,proto3,oneof" json:"distance,omitempty"` + // Deprecated: Marked as deprecated in v1/base_search.proto. + TargetVectors []string `protobuf:"bytes,4,rep,name=target_vectors,json=targetVectors,proto3" json:"target_vectors,omitempty"` // deprecated in 1.26 - use targets + Targets *Targets `protobuf:"bytes,5,opt,name=targets,proto3" json:"targets,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *NearAudioSearch) Reset() { + *x = NearAudioSearch{} + mi := &file_v1_base_search_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *NearAudioSearch) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NearAudioSearch) ProtoMessage() {} + +func (x *NearAudioSearch) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_search_proto_msgTypes[9] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NearAudioSearch.ProtoReflect.Descriptor instead. +func (*NearAudioSearch) Descriptor() ([]byte, []int) { + return file_v1_base_search_proto_rawDescGZIP(), []int{9} +} + +func (x *NearAudioSearch) GetAudio() string { + if x != nil { + return x.Audio + } + return "" +} + +func (x *NearAudioSearch) GetCertainty() float64 { + if x != nil && x.Certainty != nil { + return *x.Certainty + } + return 0 +} + +func (x *NearAudioSearch) GetDistance() float64 { + if x != nil && x.Distance != nil { + return *x.Distance + } + return 0 +} + +// Deprecated: Marked as deprecated in v1/base_search.proto. +func (x *NearAudioSearch) GetTargetVectors() []string { + if x != nil { + return x.TargetVectors + } + return nil +} + +func (x *NearAudioSearch) GetTargets() *Targets { + if x != nil { + return x.Targets + } + return nil +} + +type NearVideoSearch struct { + state protoimpl.MessageState `protogen:"open.v1"` + Video string `protobuf:"bytes,1,opt,name=video,proto3" json:"video,omitempty"` + Certainty *float64 `protobuf:"fixed64,2,opt,name=certainty,proto3,oneof" json:"certainty,omitempty"` + Distance *float64 `protobuf:"fixed64,3,opt,name=distance,proto3,oneof" json:"distance,omitempty"` + // Deprecated: Marked as deprecated in v1/base_search.proto. + TargetVectors []string `protobuf:"bytes,4,rep,name=target_vectors,json=targetVectors,proto3" json:"target_vectors,omitempty"` // deprecated in 1.26 - use targets + Targets *Targets `protobuf:"bytes,5,opt,name=targets,proto3" json:"targets,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *NearVideoSearch) Reset() { + *x = NearVideoSearch{} + mi := &file_v1_base_search_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *NearVideoSearch) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NearVideoSearch) ProtoMessage() {} + +func (x *NearVideoSearch) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_search_proto_msgTypes[10] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NearVideoSearch.ProtoReflect.Descriptor instead. +func (*NearVideoSearch) Descriptor() ([]byte, []int) { + return file_v1_base_search_proto_rawDescGZIP(), []int{10} +} + +func (x *NearVideoSearch) GetVideo() string { + if x != nil { + return x.Video + } + return "" +} + +func (x *NearVideoSearch) GetCertainty() float64 { + if x != nil && x.Certainty != nil { + return *x.Certainty + } + return 0 +} + +func (x *NearVideoSearch) GetDistance() float64 { + if x != nil && x.Distance != nil { + return *x.Distance + } + return 0 +} + +// Deprecated: Marked as deprecated in v1/base_search.proto. +func (x *NearVideoSearch) GetTargetVectors() []string { + if x != nil { + return x.TargetVectors + } + return nil +} + +func (x *NearVideoSearch) GetTargets() *Targets { + if x != nil { + return x.Targets + } + return nil +} + +type NearDepthSearch struct { + state protoimpl.MessageState `protogen:"open.v1"` + Depth string `protobuf:"bytes,1,opt,name=depth,proto3" json:"depth,omitempty"` + Certainty *float64 `protobuf:"fixed64,2,opt,name=certainty,proto3,oneof" json:"certainty,omitempty"` + Distance *float64 `protobuf:"fixed64,3,opt,name=distance,proto3,oneof" json:"distance,omitempty"` + // Deprecated: Marked as deprecated in v1/base_search.proto. + TargetVectors []string `protobuf:"bytes,4,rep,name=target_vectors,json=targetVectors,proto3" json:"target_vectors,omitempty"` // deprecated in 1.26 - use targets + Targets *Targets `protobuf:"bytes,5,opt,name=targets,proto3" json:"targets,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *NearDepthSearch) Reset() { + *x = NearDepthSearch{} + mi := &file_v1_base_search_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *NearDepthSearch) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NearDepthSearch) ProtoMessage() {} + +func (x *NearDepthSearch) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_search_proto_msgTypes[11] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NearDepthSearch.ProtoReflect.Descriptor instead. +func (*NearDepthSearch) Descriptor() ([]byte, []int) { + return file_v1_base_search_proto_rawDescGZIP(), []int{11} +} + +func (x *NearDepthSearch) GetDepth() string { + if x != nil { + return x.Depth + } + return "" +} + +func (x *NearDepthSearch) GetCertainty() float64 { + if x != nil && x.Certainty != nil { + return *x.Certainty + } + return 0 +} + +func (x *NearDepthSearch) GetDistance() float64 { + if x != nil && x.Distance != nil { + return *x.Distance + } + return 0 +} + +// Deprecated: Marked as deprecated in v1/base_search.proto. +func (x *NearDepthSearch) GetTargetVectors() []string { + if x != nil { + return x.TargetVectors + } + return nil +} + +func (x *NearDepthSearch) GetTargets() *Targets { + if x != nil { + return x.Targets + } + return nil +} + +type NearThermalSearch struct { + state protoimpl.MessageState `protogen:"open.v1"` + Thermal string `protobuf:"bytes,1,opt,name=thermal,proto3" json:"thermal,omitempty"` + Certainty *float64 `protobuf:"fixed64,2,opt,name=certainty,proto3,oneof" json:"certainty,omitempty"` + Distance *float64 `protobuf:"fixed64,3,opt,name=distance,proto3,oneof" json:"distance,omitempty"` + // Deprecated: Marked as deprecated in v1/base_search.proto. + TargetVectors []string `protobuf:"bytes,4,rep,name=target_vectors,json=targetVectors,proto3" json:"target_vectors,omitempty"` // deprecated in 1.26 - use targets + Targets *Targets `protobuf:"bytes,5,opt,name=targets,proto3" json:"targets,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *NearThermalSearch) Reset() { + *x = NearThermalSearch{} + mi := &file_v1_base_search_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *NearThermalSearch) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NearThermalSearch) ProtoMessage() {} + +func (x *NearThermalSearch) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_search_proto_msgTypes[12] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NearThermalSearch.ProtoReflect.Descriptor instead. +func (*NearThermalSearch) Descriptor() ([]byte, []int) { + return file_v1_base_search_proto_rawDescGZIP(), []int{12} +} + +func (x *NearThermalSearch) GetThermal() string { + if x != nil { + return x.Thermal + } + return "" +} + +func (x *NearThermalSearch) GetCertainty() float64 { + if x != nil && x.Certainty != nil { + return *x.Certainty + } + return 0 +} + +func (x *NearThermalSearch) GetDistance() float64 { + if x != nil && x.Distance != nil { + return *x.Distance + } + return 0 +} + +// Deprecated: Marked as deprecated in v1/base_search.proto. +func (x *NearThermalSearch) GetTargetVectors() []string { + if x != nil { + return x.TargetVectors + } + return nil +} + +func (x *NearThermalSearch) GetTargets() *Targets { + if x != nil { + return x.Targets + } + return nil +} + +type NearIMUSearch struct { + state protoimpl.MessageState `protogen:"open.v1"` + Imu string `protobuf:"bytes,1,opt,name=imu,proto3" json:"imu,omitempty"` + Certainty *float64 `protobuf:"fixed64,2,opt,name=certainty,proto3,oneof" json:"certainty,omitempty"` + Distance *float64 `protobuf:"fixed64,3,opt,name=distance,proto3,oneof" json:"distance,omitempty"` + // Deprecated: Marked as deprecated in v1/base_search.proto. + TargetVectors []string `protobuf:"bytes,4,rep,name=target_vectors,json=targetVectors,proto3" json:"target_vectors,omitempty"` // deprecated in 1.26 - use targets + Targets *Targets `protobuf:"bytes,5,opt,name=targets,proto3" json:"targets,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *NearIMUSearch) Reset() { + *x = NearIMUSearch{} + mi := &file_v1_base_search_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *NearIMUSearch) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NearIMUSearch) ProtoMessage() {} + +func (x *NearIMUSearch) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_search_proto_msgTypes[13] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NearIMUSearch.ProtoReflect.Descriptor instead. +func (*NearIMUSearch) Descriptor() ([]byte, []int) { + return file_v1_base_search_proto_rawDescGZIP(), []int{13} +} + +func (x *NearIMUSearch) GetImu() string { + if x != nil { + return x.Imu + } + return "" +} + +func (x *NearIMUSearch) GetCertainty() float64 { + if x != nil && x.Certainty != nil { + return *x.Certainty + } + return 0 +} + +func (x *NearIMUSearch) GetDistance() float64 { + if x != nil && x.Distance != nil { + return *x.Distance + } + return 0 +} + +// Deprecated: Marked as deprecated in v1/base_search.proto. +func (x *NearIMUSearch) GetTargetVectors() []string { + if x != nil { + return x.TargetVectors + } + return nil +} + +func (x *NearIMUSearch) GetTargets() *Targets { + if x != nil { + return x.Targets + } + return nil +} + +type BM25 struct { + state protoimpl.MessageState `protogen:"open.v1"` + Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` + Properties []string `protobuf:"bytes,2,rep,name=properties,proto3" json:"properties,omitempty"` + SearchOperator *SearchOperatorOptions `protobuf:"bytes,3,opt,name=search_operator,json=searchOperator,proto3,oneof" json:"search_operator,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BM25) Reset() { + *x = BM25{} + mi := &file_v1_base_search_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BM25) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BM25) ProtoMessage() {} + +func (x *BM25) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_search_proto_msgTypes[14] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BM25.ProtoReflect.Descriptor instead. +func (*BM25) Descriptor() ([]byte, []int) { + return file_v1_base_search_proto_rawDescGZIP(), []int{14} +} + +func (x *BM25) GetQuery() string { + if x != nil { + return x.Query + } + return "" +} + +func (x *BM25) GetProperties() []string { + if x != nil { + return x.Properties + } + return nil +} + +func (x *BM25) GetSearchOperator() *SearchOperatorOptions { + if x != nil { + return x.SearchOperator + } + return nil +} + +type NearTextSearch_Move struct { + state protoimpl.MessageState `protogen:"open.v1"` + Force float32 `protobuf:"fixed32,1,opt,name=force,proto3" json:"force,omitempty"` + Concepts []string `protobuf:"bytes,2,rep,name=concepts,proto3" json:"concepts,omitempty"` + Uuids []string `protobuf:"bytes,3,rep,name=uuids,proto3" json:"uuids,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *NearTextSearch_Move) Reset() { + *x = NearTextSearch_Move{} + mi := &file_v1_base_search_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *NearTextSearch_Move) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NearTextSearch_Move) ProtoMessage() {} + +func (x *NearTextSearch_Move) ProtoReflect() protoreflect.Message { + mi := &file_v1_base_search_proto_msgTypes[16] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NearTextSearch_Move.ProtoReflect.Descriptor instead. +func (*NearTextSearch_Move) Descriptor() ([]byte, []int) { + return file_v1_base_search_proto_rawDescGZIP(), []int{7, 0} +} + +func (x *NearTextSearch_Move) GetForce() float32 { + if x != nil { + return x.Force + } + return 0 +} + +func (x *NearTextSearch_Move) GetConcepts() []string { + if x != nil { + return x.Concepts + } + return nil +} + +func (x *NearTextSearch_Move) GetUuids() []string { + if x != nil { + return x.Uuids + } + return nil +} + +var File_v1_base_search_proto protoreflect.FileDescriptor + +const file_v1_base_search_proto_rawDesc = "" + + "\n" + + "\x14v1/base_search.proto\x12\vweaviate.v1\x1a\rv1/base.proto\"B\n" + + "\x10WeightsForTarget\x12\x16\n" + + "\x06target\x18\x01 \x01(\tR\x06target\x12\x16\n" + + "\x06weight\x18\x02 \x01(\x02R\x06weight\"\xc7\x01\n" + + "\aTargets\x12%\n" + + "\x0etarget_vectors\x18\x01 \x03(\tR\rtargetVectors\x12@\n" + + "\vcombination\x18\x02 \x01(\x0e2\x1e.weaviate.v1.CombinationMethodR\vcombination\x12M\n" + + "\x13weights_for_targets\x18\x04 \x03(\v2\x1d.weaviate.v1.WeightsForTargetR\x11weightsForTargetsJ\x04\b\x03\x10\x04\"|\n" + + "\x0fVectorForTarget\x12\x12\n" + + "\x04name\x18\x01 \x01(\tR\x04name\x12%\n" + + "\fvector_bytes\x18\x02 \x01(\fB\x02\x18\x01R\vvectorBytes\x12.\n" + + "\avectors\x18\x03 \x03(\v2\x14.weaviate.v1.VectorsR\avectors\"\x81\x02\n" + + "\x15SearchOperatorOptions\x12G\n" + + "\boperator\x18\x01 \x01(\x0e2+.weaviate.v1.SearchOperatorOptions.OperatorR\boperator\x12:\n" + + "\x17minimum_or_tokens_match\x18\x02 \x01(\x05H\x00R\x14minimumOrTokensMatch\x88\x01\x01\"G\n" + + "\bOperator\x12\x18\n" + + "\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x0f\n" + + "\vOPERATOR_OR\x10\x01\x12\x10\n" + + "\fOPERATOR_AND\x10\x02B\x1a\n" + + "\x18_minimum_or_tokens_match\"\xe6\x05\n" + + "\x06Hybrid\x12\x14\n" + + "\x05query\x18\x01 \x01(\tR\x05query\x12\x1e\n" + + "\n" + + "properties\x18\x02 \x03(\tR\n" + + "properties\x12\x1a\n" + + "\x06vector\x18\x03 \x03(\x02B\x02\x18\x01R\x06vector\x12\x14\n" + + "\x05alpha\x18\x04 \x01(\x02R\x05alpha\x12?\n" + + "\vfusion_type\x18\x05 \x01(\x0e2\x1e.weaviate.v1.Hybrid.FusionTypeR\n" + + "fusionType\x12%\n" + + "\fvector_bytes\x18\x06 \x01(\fB\x02\x18\x01R\vvectorBytes\x12)\n" + + "\x0etarget_vectors\x18\a \x03(\tB\x02\x18\x01R\rtargetVectors\x128\n" + + "\tnear_text\x18\b \x01(\v2\x1b.weaviate.v1.NearTextSearchR\bnearText\x128\n" + + "\vnear_vector\x18\t \x01(\v2\x17.weaviate.v1.NearVectorR\n" + + "nearVector\x12.\n" + + "\atargets\x18\n" + + " \x01(\v2\x14.weaviate.v1.TargetsR\atargets\x12Y\n" + + "\x14bm25_search_operator\x18\v \x01(\v2\".weaviate.v1.SearchOperatorOptionsH\x01R\x12bm25SearchOperator\x88\x01\x01\x12)\n" + + "\x0fvector_distance\x18\x14 \x01(\x02H\x00R\x0evectorDistance\x12.\n" + + "\avectors\x18\x15 \x03(\v2\x14.weaviate.v1.VectorsR\avectors\"a\n" + + "\n" + + "FusionType\x12\x1b\n" + + "\x17FUSION_TYPE_UNSPECIFIED\x10\x00\x12\x16\n" + + "\x12FUSION_TYPE_RANKED\x10\x01\x12\x1e\n" + + "\x1aFUSION_TYPE_RELATIVE_SCORE\x10\x02B\v\n" + + "\tthresholdB\x17\n" + + "\x15_bm25_search_operator\"\xa7\x04\n" + + "\n" + + "NearVector\x12\x1a\n" + + "\x06vector\x18\x01 \x03(\x02B\x02\x18\x01R\x06vector\x12!\n" + + "\tcertainty\x18\x02 \x01(\x01H\x00R\tcertainty\x88\x01\x01\x12\x1f\n" + + "\bdistance\x18\x03 \x01(\x01H\x01R\bdistance\x88\x01\x01\x12%\n" + + "\fvector_bytes\x18\x04 \x01(\fB\x02\x18\x01R\vvectorBytes\x12)\n" + + "\x0etarget_vectors\x18\x05 \x03(\tB\x02\x18\x01R\rtargetVectors\x12.\n" + + "\atargets\x18\x06 \x01(\v2\x14.weaviate.v1.TargetsR\atargets\x12\\\n" + + "\x11vector_per_target\x18\a \x03(\v2,.weaviate.v1.NearVector.VectorPerTargetEntryB\x02\x18\x01R\x0fvectorPerTarget\x12J\n" + + "\x12vector_for_targets\x18\b \x03(\v2\x1c.weaviate.v1.VectorForTargetR\x10vectorForTargets\x12.\n" + + "\avectors\x18\t \x03(\v2\x14.weaviate.v1.VectorsR\avectors\x1aB\n" + + "\x14VectorPerTargetEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" + + "\x05value\x18\x02 \x01(\fR\x05value:\x028\x01B\f\n" + + "\n" + + "_certaintyB\v\n" + + "\t_distance\"\xd6\x01\n" + + "\n" + + "NearObject\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\x12!\n" + + "\tcertainty\x18\x02 \x01(\x01H\x00R\tcertainty\x88\x01\x01\x12\x1f\n" + + "\bdistance\x18\x03 \x01(\x01H\x01R\bdistance\x88\x01\x01\x12)\n" + + "\x0etarget_vectors\x18\x04 \x03(\tB\x02\x18\x01R\rtargetVectors\x12.\n" + + "\atargets\x18\x05 \x01(\v2\x14.weaviate.v1.TargetsR\atargetsB\f\n" + + "\n" + + "_certaintyB\v\n" + + "\t_distance\"\xce\x03\n" + + "\x0eNearTextSearch\x12\x14\n" + + "\x05query\x18\x01 \x03(\tR\x05query\x12!\n" + + "\tcertainty\x18\x02 \x01(\x01H\x00R\tcertainty\x88\x01\x01\x12\x1f\n" + + "\bdistance\x18\x03 \x01(\x01H\x01R\bdistance\x88\x01\x01\x12>\n" + + "\amove_to\x18\x04 \x01(\v2 .weaviate.v1.NearTextSearch.MoveH\x02R\x06moveTo\x88\x01\x01\x12B\n" + + "\tmove_away\x18\x05 \x01(\v2 .weaviate.v1.NearTextSearch.MoveH\x03R\bmoveAway\x88\x01\x01\x12)\n" + + "\x0etarget_vectors\x18\x06 \x03(\tB\x02\x18\x01R\rtargetVectors\x12.\n" + + "\atargets\x18\a \x01(\v2\x14.weaviate.v1.TargetsR\atargets\x1aN\n" + + "\x04Move\x12\x14\n" + + "\x05force\x18\x01 \x01(\x02R\x05force\x12\x1a\n" + + "\bconcepts\x18\x02 \x03(\tR\bconcepts\x12\x14\n" + + "\x05uuids\x18\x03 \x03(\tR\x05uuidsB\f\n" + + "\n" + + "_certaintyB\v\n" + + "\t_distanceB\n" + + "\n" + + "\b_move_toB\f\n" + + "\n" + + "_move_away\"\xe1\x01\n" + + "\x0fNearImageSearch\x12\x14\n" + + "\x05image\x18\x01 \x01(\tR\x05image\x12!\n" + + "\tcertainty\x18\x02 \x01(\x01H\x00R\tcertainty\x88\x01\x01\x12\x1f\n" + + "\bdistance\x18\x03 \x01(\x01H\x01R\bdistance\x88\x01\x01\x12)\n" + + "\x0etarget_vectors\x18\x04 \x03(\tB\x02\x18\x01R\rtargetVectors\x12.\n" + + "\atargets\x18\x05 \x01(\v2\x14.weaviate.v1.TargetsR\atargetsB\f\n" + + "\n" + + "_certaintyB\v\n" + + "\t_distance\"\xe1\x01\n" + + "\x0fNearAudioSearch\x12\x14\n" + + "\x05audio\x18\x01 \x01(\tR\x05audio\x12!\n" + + "\tcertainty\x18\x02 \x01(\x01H\x00R\tcertainty\x88\x01\x01\x12\x1f\n" + + "\bdistance\x18\x03 \x01(\x01H\x01R\bdistance\x88\x01\x01\x12)\n" + + "\x0etarget_vectors\x18\x04 \x03(\tB\x02\x18\x01R\rtargetVectors\x12.\n" + + "\atargets\x18\x05 \x01(\v2\x14.weaviate.v1.TargetsR\atargetsB\f\n" + + "\n" + + "_certaintyB\v\n" + + "\t_distance\"\xe1\x01\n" + + "\x0fNearVideoSearch\x12\x14\n" + + "\x05video\x18\x01 \x01(\tR\x05video\x12!\n" + + "\tcertainty\x18\x02 \x01(\x01H\x00R\tcertainty\x88\x01\x01\x12\x1f\n" + + "\bdistance\x18\x03 \x01(\x01H\x01R\bdistance\x88\x01\x01\x12)\n" + + "\x0etarget_vectors\x18\x04 \x03(\tB\x02\x18\x01R\rtargetVectors\x12.\n" + + "\atargets\x18\x05 \x01(\v2\x14.weaviate.v1.TargetsR\atargetsB\f\n" + + "\n" + + "_certaintyB\v\n" + + "\t_distance\"\xe1\x01\n" + + "\x0fNearDepthSearch\x12\x14\n" + + "\x05depth\x18\x01 \x01(\tR\x05depth\x12!\n" + + "\tcertainty\x18\x02 \x01(\x01H\x00R\tcertainty\x88\x01\x01\x12\x1f\n" + + "\bdistance\x18\x03 \x01(\x01H\x01R\bdistance\x88\x01\x01\x12)\n" + + "\x0etarget_vectors\x18\x04 \x03(\tB\x02\x18\x01R\rtargetVectors\x12.\n" + + "\atargets\x18\x05 \x01(\v2\x14.weaviate.v1.TargetsR\atargetsB\f\n" + + "\n" + + "_certaintyB\v\n" + + "\t_distance\"\xe7\x01\n" + + "\x11NearThermalSearch\x12\x18\n" + + "\athermal\x18\x01 \x01(\tR\athermal\x12!\n" + + "\tcertainty\x18\x02 \x01(\x01H\x00R\tcertainty\x88\x01\x01\x12\x1f\n" + + "\bdistance\x18\x03 \x01(\x01H\x01R\bdistance\x88\x01\x01\x12)\n" + + "\x0etarget_vectors\x18\x04 \x03(\tB\x02\x18\x01R\rtargetVectors\x12.\n" + + "\atargets\x18\x05 \x01(\v2\x14.weaviate.v1.TargetsR\atargetsB\f\n" + + "\n" + + "_certaintyB\v\n" + + "\t_distance\"\xdb\x01\n" + + "\rNearIMUSearch\x12\x10\n" + + "\x03imu\x18\x01 \x01(\tR\x03imu\x12!\n" + + "\tcertainty\x18\x02 \x01(\x01H\x00R\tcertainty\x88\x01\x01\x12\x1f\n" + + "\bdistance\x18\x03 \x01(\x01H\x01R\bdistance\x88\x01\x01\x12)\n" + + "\x0etarget_vectors\x18\x04 \x03(\tB\x02\x18\x01R\rtargetVectors\x12.\n" + + "\atargets\x18\x05 \x01(\v2\x14.weaviate.v1.TargetsR\atargetsB\f\n" + + "\n" + + "_certaintyB\v\n" + + "\t_distance\"\xa2\x01\n" + + "\x04BM25\x12\x14\n" + + "\x05query\x18\x01 \x01(\tR\x05query\x12\x1e\n" + + "\n" + + "properties\x18\x02 \x03(\tR\n" + + "properties\x12P\n" + + "\x0fsearch_operator\x18\x03 \x01(\v2\".weaviate.v1.SearchOperatorOptionsH\x00R\x0esearchOperator\x88\x01\x01B\x12\n" + + "\x10_search_operator*\xee\x01\n" + + "\x11CombinationMethod\x12\"\n" + + "\x1eCOMBINATION_METHOD_UNSPECIFIED\x10\x00\x12\x1f\n" + + "\x1bCOMBINATION_METHOD_TYPE_SUM\x10\x01\x12\x1f\n" + + "\x1bCOMBINATION_METHOD_TYPE_MIN\x10\x02\x12#\n" + + "\x1fCOMBINATION_METHOD_TYPE_AVERAGE\x10\x03\x12*\n" + + "&COMBINATION_METHOD_TYPE_RELATIVE_SCORE\x10\x04\x12\"\n" + + "\x1eCOMBINATION_METHOD_TYPE_MANUAL\x10\x05Bt\n" + + "#io.weaviate.client.grpc.protocol.v1B\x17WeaviateProtoBaseSearchZ4github.com/weaviate/weaviate/grpc/generated;protocolb\x06proto3" + +var ( + file_v1_base_search_proto_rawDescOnce sync.Once + file_v1_base_search_proto_rawDescData []byte +) + +func file_v1_base_search_proto_rawDescGZIP() []byte { + file_v1_base_search_proto_rawDescOnce.Do(func() { + file_v1_base_search_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_v1_base_search_proto_rawDesc), len(file_v1_base_search_proto_rawDesc))) + }) + return file_v1_base_search_proto_rawDescData +} + +var file_v1_base_search_proto_enumTypes = make([]protoimpl.EnumInfo, 3) +var file_v1_base_search_proto_msgTypes = make([]protoimpl.MessageInfo, 17) +var file_v1_base_search_proto_goTypes = []any{ + (CombinationMethod)(0), // 0: weaviate.v1.CombinationMethod + (SearchOperatorOptions_Operator)(0), // 1: weaviate.v1.SearchOperatorOptions.Operator + (Hybrid_FusionType)(0), // 2: weaviate.v1.Hybrid.FusionType + (*WeightsForTarget)(nil), // 3: weaviate.v1.WeightsForTarget + (*Targets)(nil), // 4: weaviate.v1.Targets + (*VectorForTarget)(nil), // 5: weaviate.v1.VectorForTarget + (*SearchOperatorOptions)(nil), // 6: weaviate.v1.SearchOperatorOptions + (*Hybrid)(nil), // 7: weaviate.v1.Hybrid + (*NearVector)(nil), // 8: weaviate.v1.NearVector + (*NearObject)(nil), // 9: weaviate.v1.NearObject + (*NearTextSearch)(nil), // 10: weaviate.v1.NearTextSearch + (*NearImageSearch)(nil), // 11: weaviate.v1.NearImageSearch + (*NearAudioSearch)(nil), // 12: weaviate.v1.NearAudioSearch + (*NearVideoSearch)(nil), // 13: weaviate.v1.NearVideoSearch + (*NearDepthSearch)(nil), // 14: weaviate.v1.NearDepthSearch + (*NearThermalSearch)(nil), // 15: weaviate.v1.NearThermalSearch + (*NearIMUSearch)(nil), // 16: weaviate.v1.NearIMUSearch + (*BM25)(nil), // 17: weaviate.v1.BM25 + nil, // 18: weaviate.v1.NearVector.VectorPerTargetEntry + (*NearTextSearch_Move)(nil), // 19: weaviate.v1.NearTextSearch.Move + (*Vectors)(nil), // 20: weaviate.v1.Vectors +} +var file_v1_base_search_proto_depIdxs = []int32{ + 0, // 0: weaviate.v1.Targets.combination:type_name -> weaviate.v1.CombinationMethod + 3, // 1: weaviate.v1.Targets.weights_for_targets:type_name -> weaviate.v1.WeightsForTarget + 20, // 2: weaviate.v1.VectorForTarget.vectors:type_name -> weaviate.v1.Vectors + 1, // 3: weaviate.v1.SearchOperatorOptions.operator:type_name -> weaviate.v1.SearchOperatorOptions.Operator + 2, // 4: weaviate.v1.Hybrid.fusion_type:type_name -> weaviate.v1.Hybrid.FusionType + 10, // 5: weaviate.v1.Hybrid.near_text:type_name -> weaviate.v1.NearTextSearch + 8, // 6: weaviate.v1.Hybrid.near_vector:type_name -> weaviate.v1.NearVector + 4, // 7: weaviate.v1.Hybrid.targets:type_name -> weaviate.v1.Targets + 6, // 8: weaviate.v1.Hybrid.bm25_search_operator:type_name -> weaviate.v1.SearchOperatorOptions + 20, // 9: weaviate.v1.Hybrid.vectors:type_name -> weaviate.v1.Vectors + 4, // 10: weaviate.v1.NearVector.targets:type_name -> weaviate.v1.Targets + 18, // 11: weaviate.v1.NearVector.vector_per_target:type_name -> weaviate.v1.NearVector.VectorPerTargetEntry + 5, // 12: weaviate.v1.NearVector.vector_for_targets:type_name -> weaviate.v1.VectorForTarget + 20, // 13: weaviate.v1.NearVector.vectors:type_name -> weaviate.v1.Vectors + 4, // 14: weaviate.v1.NearObject.targets:type_name -> weaviate.v1.Targets + 19, // 15: weaviate.v1.NearTextSearch.move_to:type_name -> weaviate.v1.NearTextSearch.Move + 19, // 16: weaviate.v1.NearTextSearch.move_away:type_name -> weaviate.v1.NearTextSearch.Move + 4, // 17: weaviate.v1.NearTextSearch.targets:type_name -> weaviate.v1.Targets + 4, // 18: weaviate.v1.NearImageSearch.targets:type_name -> weaviate.v1.Targets + 4, // 19: weaviate.v1.NearAudioSearch.targets:type_name -> weaviate.v1.Targets + 4, // 20: weaviate.v1.NearVideoSearch.targets:type_name -> weaviate.v1.Targets + 4, // 21: weaviate.v1.NearDepthSearch.targets:type_name -> weaviate.v1.Targets + 4, // 22: weaviate.v1.NearThermalSearch.targets:type_name -> weaviate.v1.Targets + 4, // 23: weaviate.v1.NearIMUSearch.targets:type_name -> weaviate.v1.Targets + 6, // 24: weaviate.v1.BM25.search_operator:type_name -> weaviate.v1.SearchOperatorOptions + 25, // [25:25] is the sub-list for method output_type + 25, // [25:25] is the sub-list for method input_type + 25, // [25:25] is the sub-list for extension type_name + 25, // [25:25] is the sub-list for extension extendee + 0, // [0:25] is the sub-list for field type_name +} + +func init() { file_v1_base_search_proto_init() } +func file_v1_base_search_proto_init() { + if File_v1_base_search_proto != nil { + return + } + file_v1_base_proto_init() + file_v1_base_search_proto_msgTypes[3].OneofWrappers = []any{} + file_v1_base_search_proto_msgTypes[4].OneofWrappers = []any{ + (*Hybrid_VectorDistance)(nil), + } + file_v1_base_search_proto_msgTypes[5].OneofWrappers = []any{} + file_v1_base_search_proto_msgTypes[6].OneofWrappers = []any{} + file_v1_base_search_proto_msgTypes[7].OneofWrappers = []any{} + file_v1_base_search_proto_msgTypes[8].OneofWrappers = []any{} + file_v1_base_search_proto_msgTypes[9].OneofWrappers = []any{} + file_v1_base_search_proto_msgTypes[10].OneofWrappers = []any{} + file_v1_base_search_proto_msgTypes[11].OneofWrappers = []any{} + file_v1_base_search_proto_msgTypes[12].OneofWrappers = []any{} + file_v1_base_search_proto_msgTypes[13].OneofWrappers = []any{} + file_v1_base_search_proto_msgTypes[14].OneofWrappers = []any{} + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: unsafe.Slice(unsafe.StringData(file_v1_base_search_proto_rawDesc), len(file_v1_base_search_proto_rawDesc)), + NumEnums: 3, + NumMessages: 17, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_v1_base_search_proto_goTypes, + DependencyIndexes: file_v1_base_search_proto_depIdxs, + EnumInfos: file_v1_base_search_proto_enumTypes, + MessageInfos: file_v1_base_search_proto_msgTypes, + }.Build() + File_v1_base_search_proto = out.File + file_v1_base_search_proto_goTypes = nil + file_v1_base_search_proto_depIdxs = nil +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/batch.pb.go b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/batch.pb.go new file mode 100644 index 0000000000000000000000000000000000000000..0ad09f59bc73d0f484064e724408f061d304dc8e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/batch.pb.go @@ -0,0 +1,1682 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. + +package protocol + +import ( + reflect "reflect" + sync "sync" + unsafe "unsafe" + + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + structpb "google.golang.org/protobuf/types/known/structpb" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type BatchObjectsRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + Objects []*BatchObject `protobuf:"bytes,1,rep,name=objects,proto3" json:"objects,omitempty"` + ConsistencyLevel *ConsistencyLevel `protobuf:"varint,2,opt,name=consistency_level,json=consistencyLevel,proto3,enum=weaviate.v1.ConsistencyLevel,oneof" json:"consistency_level,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchObjectsRequest) Reset() { + *x = BatchObjectsRequest{} + mi := &file_v1_batch_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchObjectsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchObjectsRequest) ProtoMessage() {} + +func (x *BatchObjectsRequest) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchObjectsRequest.ProtoReflect.Descriptor instead. +func (*BatchObjectsRequest) Descriptor() ([]byte, []int) { + return file_v1_batch_proto_rawDescGZIP(), []int{0} +} + +func (x *BatchObjectsRequest) GetObjects() []*BatchObject { + if x != nil { + return x.Objects + } + return nil +} + +func (x *BatchObjectsRequest) GetConsistencyLevel() ConsistencyLevel { + if x != nil && x.ConsistencyLevel != nil { + return *x.ConsistencyLevel + } + return ConsistencyLevel_CONSISTENCY_LEVEL_UNSPECIFIED +} + +type BatchReferencesRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + References []*BatchReference `protobuf:"bytes,1,rep,name=references,proto3" json:"references,omitempty"` + ConsistencyLevel *ConsistencyLevel `protobuf:"varint,2,opt,name=consistency_level,json=consistencyLevel,proto3,enum=weaviate.v1.ConsistencyLevel,oneof" json:"consistency_level,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchReferencesRequest) Reset() { + *x = BatchReferencesRequest{} + mi := &file_v1_batch_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchReferencesRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchReferencesRequest) ProtoMessage() {} + +func (x *BatchReferencesRequest) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchReferencesRequest.ProtoReflect.Descriptor instead. +func (*BatchReferencesRequest) Descriptor() ([]byte, []int) { + return file_v1_batch_proto_rawDescGZIP(), []int{1} +} + +func (x *BatchReferencesRequest) GetReferences() []*BatchReference { + if x != nil { + return x.References + } + return nil +} + +func (x *BatchReferencesRequest) GetConsistencyLevel() ConsistencyLevel { + if x != nil && x.ConsistencyLevel != nil { + return *x.ConsistencyLevel + } + return ConsistencyLevel_CONSISTENCY_LEVEL_UNSPECIFIED +} + +type BatchSendRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + StreamId string `protobuf:"bytes,1,opt,name=stream_id,json=streamId,proto3" json:"stream_id,omitempty"` + // Types that are valid to be assigned to Message: + // + // *BatchSendRequest_Objects_ + // *BatchSendRequest_References_ + // *BatchSendRequest_Stop_ + Message isBatchSendRequest_Message `protobuf_oneof:"message"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchSendRequest) Reset() { + *x = BatchSendRequest{} + mi := &file_v1_batch_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchSendRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchSendRequest) ProtoMessage() {} + +func (x *BatchSendRequest) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_proto_msgTypes[2] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchSendRequest.ProtoReflect.Descriptor instead. +func (*BatchSendRequest) Descriptor() ([]byte, []int) { + return file_v1_batch_proto_rawDescGZIP(), []int{2} +} + +func (x *BatchSendRequest) GetStreamId() string { + if x != nil { + return x.StreamId + } + return "" +} + +func (x *BatchSendRequest) GetMessage() isBatchSendRequest_Message { + if x != nil { + return x.Message + } + return nil +} + +func (x *BatchSendRequest) GetObjects() *BatchSendRequest_Objects { + if x != nil { + if x, ok := x.Message.(*BatchSendRequest_Objects_); ok { + return x.Objects + } + } + return nil +} + +func (x *BatchSendRequest) GetReferences() *BatchSendRequest_References { + if x != nil { + if x, ok := x.Message.(*BatchSendRequest_References_); ok { + return x.References + } + } + return nil +} + +func (x *BatchSendRequest) GetStop() *BatchSendRequest_Stop { + if x != nil { + if x, ok := x.Message.(*BatchSendRequest_Stop_); ok { + return x.Stop + } + } + return nil +} + +type isBatchSendRequest_Message interface { + isBatchSendRequest_Message() +} + +type BatchSendRequest_Objects_ struct { + Objects *BatchSendRequest_Objects `protobuf:"bytes,2,opt,name=objects,proto3,oneof"` +} + +type BatchSendRequest_References_ struct { + References *BatchSendRequest_References `protobuf:"bytes,3,opt,name=references,proto3,oneof"` +} + +type BatchSendRequest_Stop_ struct { + Stop *BatchSendRequest_Stop `protobuf:"bytes,4,opt,name=stop,proto3,oneof"` +} + +func (*BatchSendRequest_Objects_) isBatchSendRequest_Message() {} + +func (*BatchSendRequest_References_) isBatchSendRequest_Message() {} + +func (*BatchSendRequest_Stop_) isBatchSendRequest_Message() {} + +type BatchSendReply struct { + state protoimpl.MessageState `protogen:"open.v1"` + NextBatchSize int32 `protobuf:"varint,1,opt,name=next_batch_size,json=nextBatchSize,proto3" json:"next_batch_size,omitempty"` + BackoffSeconds float32 `protobuf:"fixed32,2,opt,name=backoff_seconds,json=backoffSeconds,proto3" json:"backoff_seconds,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchSendReply) Reset() { + *x = BatchSendReply{} + mi := &file_v1_batch_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchSendReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchSendReply) ProtoMessage() {} + +func (x *BatchSendReply) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_proto_msgTypes[3] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchSendReply.ProtoReflect.Descriptor instead. +func (*BatchSendReply) Descriptor() ([]byte, []int) { + return file_v1_batch_proto_rawDescGZIP(), []int{3} +} + +func (x *BatchSendReply) GetNextBatchSize() int32 { + if x != nil { + return x.NextBatchSize + } + return 0 +} + +func (x *BatchSendReply) GetBackoffSeconds() float32 { + if x != nil { + return x.BackoffSeconds + } + return 0 +} + +type BatchStreamRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + ConsistencyLevel *ConsistencyLevel `protobuf:"varint,1,opt,name=consistency_level,json=consistencyLevel,proto3,enum=weaviate.v1.ConsistencyLevel,oneof" json:"consistency_level,omitempty"` + ObjectIndex *int32 `protobuf:"varint,2,opt,name=object_index,json=objectIndex,proto3,oneof" json:"object_index,omitempty"` + ReferenceIndex *int32 `protobuf:"varint,3,opt,name=reference_index,json=referenceIndex,proto3,oneof" json:"reference_index,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchStreamRequest) Reset() { + *x = BatchStreamRequest{} + mi := &file_v1_batch_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchStreamRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchStreamRequest) ProtoMessage() {} + +func (x *BatchStreamRequest) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_proto_msgTypes[4] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchStreamRequest.ProtoReflect.Descriptor instead. +func (*BatchStreamRequest) Descriptor() ([]byte, []int) { + return file_v1_batch_proto_rawDescGZIP(), []int{4} +} + +func (x *BatchStreamRequest) GetConsistencyLevel() ConsistencyLevel { + if x != nil && x.ConsistencyLevel != nil { + return *x.ConsistencyLevel + } + return ConsistencyLevel_CONSISTENCY_LEVEL_UNSPECIFIED +} + +func (x *BatchStreamRequest) GetObjectIndex() int32 { + if x != nil && x.ObjectIndex != nil { + return *x.ObjectIndex + } + return 0 +} + +func (x *BatchStreamRequest) GetReferenceIndex() int32 { + if x != nil && x.ReferenceIndex != nil { + return *x.ReferenceIndex + } + return 0 +} + +type BatchStreamMessage struct { + state protoimpl.MessageState `protogen:"open.v1"` + StreamId string `protobuf:"bytes,1,opt,name=stream_id,json=streamId,proto3" json:"stream_id,omitempty"` + // Types that are valid to be assigned to Message: + // + // *BatchStreamMessage_Error_ + // *BatchStreamMessage_Start_ + // *BatchStreamMessage_Stop_ + // *BatchStreamMessage_Shutdown_ + // *BatchStreamMessage_ShuttingDown_ + Message isBatchStreamMessage_Message `protobuf_oneof:"message"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchStreamMessage) Reset() { + *x = BatchStreamMessage{} + mi := &file_v1_batch_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchStreamMessage) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchStreamMessage) ProtoMessage() {} + +func (x *BatchStreamMessage) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_proto_msgTypes[5] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchStreamMessage.ProtoReflect.Descriptor instead. +func (*BatchStreamMessage) Descriptor() ([]byte, []int) { + return file_v1_batch_proto_rawDescGZIP(), []int{5} +} + +func (x *BatchStreamMessage) GetStreamId() string { + if x != nil { + return x.StreamId + } + return "" +} + +func (x *BatchStreamMessage) GetMessage() isBatchStreamMessage_Message { + if x != nil { + return x.Message + } + return nil +} + +func (x *BatchStreamMessage) GetError() *BatchStreamMessage_Error { + if x != nil { + if x, ok := x.Message.(*BatchStreamMessage_Error_); ok { + return x.Error + } + } + return nil +} + +func (x *BatchStreamMessage) GetStart() *BatchStreamMessage_Start { + if x != nil { + if x, ok := x.Message.(*BatchStreamMessage_Start_); ok { + return x.Start + } + } + return nil +} + +func (x *BatchStreamMessage) GetStop() *BatchStreamMessage_Stop { + if x != nil { + if x, ok := x.Message.(*BatchStreamMessage_Stop_); ok { + return x.Stop + } + } + return nil +} + +func (x *BatchStreamMessage) GetShutdown() *BatchStreamMessage_Shutdown { + if x != nil { + if x, ok := x.Message.(*BatchStreamMessage_Shutdown_); ok { + return x.Shutdown + } + } + return nil +} + +func (x *BatchStreamMessage) GetShuttingDown() *BatchStreamMessage_ShuttingDown { + if x != nil { + if x, ok := x.Message.(*BatchStreamMessage_ShuttingDown_); ok { + return x.ShuttingDown + } + } + return nil +} + +type isBatchStreamMessage_Message interface { + isBatchStreamMessage_Message() +} + +type BatchStreamMessage_Error_ struct { + Error *BatchStreamMessage_Error `protobuf:"bytes,2,opt,name=error,proto3,oneof"` +} + +type BatchStreamMessage_Start_ struct { + Start *BatchStreamMessage_Start `protobuf:"bytes,3,opt,name=start,proto3,oneof"` +} + +type BatchStreamMessage_Stop_ struct { + Stop *BatchStreamMessage_Stop `protobuf:"bytes,4,opt,name=stop,proto3,oneof"` +} + +type BatchStreamMessage_Shutdown_ struct { + Shutdown *BatchStreamMessage_Shutdown `protobuf:"bytes,5,opt,name=shutdown,proto3,oneof"` +} + +type BatchStreamMessage_ShuttingDown_ struct { + ShuttingDown *BatchStreamMessage_ShuttingDown `protobuf:"bytes,6,opt,name=shutting_down,json=shuttingDown,proto3,oneof"` +} + +func (*BatchStreamMessage_Error_) isBatchStreamMessage_Message() {} + +func (*BatchStreamMessage_Start_) isBatchStreamMessage_Message() {} + +func (*BatchStreamMessage_Stop_) isBatchStreamMessage_Message() {} + +func (*BatchStreamMessage_Shutdown_) isBatchStreamMessage_Message() {} + +func (*BatchStreamMessage_ShuttingDown_) isBatchStreamMessage_Message() {} + +type BatchObject struct { + state protoimpl.MessageState `protogen:"open.v1"` + Uuid string `protobuf:"bytes,1,opt,name=uuid,proto3" json:"uuid,omitempty"` + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + // + // Deprecated: Marked as deprecated in v1/batch.proto. + Vector []float32 `protobuf:"fixed32,2,rep,packed,name=vector,proto3" json:"vector,omitempty"` // deprecated, will be removed + Properties *BatchObject_Properties `protobuf:"bytes,3,opt,name=properties,proto3" json:"properties,omitempty"` + Collection string `protobuf:"bytes,4,opt,name=collection,proto3" json:"collection,omitempty"` + Tenant string `protobuf:"bytes,5,opt,name=tenant,proto3" json:"tenant,omitempty"` + VectorBytes []byte `protobuf:"bytes,6,opt,name=vector_bytes,json=vectorBytes,proto3" json:"vector_bytes,omitempty"` + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + Vectors []*Vectors `protobuf:"bytes,23,rep,name=vectors,proto3" json:"vectors,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchObject) Reset() { + *x = BatchObject{} + mi := &file_v1_batch_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchObject) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchObject) ProtoMessage() {} + +func (x *BatchObject) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_proto_msgTypes[6] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchObject.ProtoReflect.Descriptor instead. +func (*BatchObject) Descriptor() ([]byte, []int) { + return file_v1_batch_proto_rawDescGZIP(), []int{6} +} + +func (x *BatchObject) GetUuid() string { + if x != nil { + return x.Uuid + } + return "" +} + +// Deprecated: Marked as deprecated in v1/batch.proto. +func (x *BatchObject) GetVector() []float32 { + if x != nil { + return x.Vector + } + return nil +} + +func (x *BatchObject) GetProperties() *BatchObject_Properties { + if x != nil { + return x.Properties + } + return nil +} + +func (x *BatchObject) GetCollection() string { + if x != nil { + return x.Collection + } + return "" +} + +func (x *BatchObject) GetTenant() string { + if x != nil { + return x.Tenant + } + return "" +} + +func (x *BatchObject) GetVectorBytes() []byte { + if x != nil { + return x.VectorBytes + } + return nil +} + +func (x *BatchObject) GetVectors() []*Vectors { + if x != nil { + return x.Vectors + } + return nil +} + +type BatchReference struct { + state protoimpl.MessageState `protogen:"open.v1"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + FromCollection string `protobuf:"bytes,2,opt,name=from_collection,json=fromCollection,proto3" json:"from_collection,omitempty"` + FromUuid string `protobuf:"bytes,3,opt,name=from_uuid,json=fromUuid,proto3" json:"from_uuid,omitempty"` + ToCollection *string `protobuf:"bytes,4,opt,name=to_collection,json=toCollection,proto3,oneof" json:"to_collection,omitempty"` + ToUuid string `protobuf:"bytes,5,opt,name=to_uuid,json=toUuid,proto3" json:"to_uuid,omitempty"` + Tenant string `protobuf:"bytes,6,opt,name=tenant,proto3" json:"tenant,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchReference) Reset() { + *x = BatchReference{} + mi := &file_v1_batch_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchReference) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchReference) ProtoMessage() {} + +func (x *BatchReference) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_proto_msgTypes[7] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchReference.ProtoReflect.Descriptor instead. +func (*BatchReference) Descriptor() ([]byte, []int) { + return file_v1_batch_proto_rawDescGZIP(), []int{7} +} + +func (x *BatchReference) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *BatchReference) GetFromCollection() string { + if x != nil { + return x.FromCollection + } + return "" +} + +func (x *BatchReference) GetFromUuid() string { + if x != nil { + return x.FromUuid + } + return "" +} + +func (x *BatchReference) GetToCollection() string { + if x != nil && x.ToCollection != nil { + return *x.ToCollection + } + return "" +} + +func (x *BatchReference) GetToUuid() string { + if x != nil { + return x.ToUuid + } + return "" +} + +func (x *BatchReference) GetTenant() string { + if x != nil { + return x.Tenant + } + return "" +} + +type BatchObjectsReply struct { + state protoimpl.MessageState `protogen:"open.v1"` + Took float32 `protobuf:"fixed32,1,opt,name=took,proto3" json:"took,omitempty"` + Errors []*BatchObjectsReply_BatchError `protobuf:"bytes,2,rep,name=errors,proto3" json:"errors,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchObjectsReply) Reset() { + *x = BatchObjectsReply{} + mi := &file_v1_batch_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchObjectsReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchObjectsReply) ProtoMessage() {} + +func (x *BatchObjectsReply) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_proto_msgTypes[8] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchObjectsReply.ProtoReflect.Descriptor instead. +func (*BatchObjectsReply) Descriptor() ([]byte, []int) { + return file_v1_batch_proto_rawDescGZIP(), []int{8} +} + +func (x *BatchObjectsReply) GetTook() float32 { + if x != nil { + return x.Took + } + return 0 +} + +func (x *BatchObjectsReply) GetErrors() []*BatchObjectsReply_BatchError { + if x != nil { + return x.Errors + } + return nil +} + +type BatchReferencesReply struct { + state protoimpl.MessageState `protogen:"open.v1"` + Took float32 `protobuf:"fixed32,1,opt,name=took,proto3" json:"took,omitempty"` + Errors []*BatchReferencesReply_BatchError `protobuf:"bytes,2,rep,name=errors,proto3" json:"errors,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchReferencesReply) Reset() { + *x = BatchReferencesReply{} + mi := &file_v1_batch_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchReferencesReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchReferencesReply) ProtoMessage() {} + +func (x *BatchReferencesReply) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_proto_msgTypes[9] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchReferencesReply.ProtoReflect.Descriptor instead. +func (*BatchReferencesReply) Descriptor() ([]byte, []int) { + return file_v1_batch_proto_rawDescGZIP(), []int{9} +} + +func (x *BatchReferencesReply) GetTook() float32 { + if x != nil { + return x.Took + } + return 0 +} + +func (x *BatchReferencesReply) GetErrors() []*BatchReferencesReply_BatchError { + if x != nil { + return x.Errors + } + return nil +} + +type BatchSendRequest_Stop struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchSendRequest_Stop) Reset() { + *x = BatchSendRequest_Stop{} + mi := &file_v1_batch_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchSendRequest_Stop) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchSendRequest_Stop) ProtoMessage() {} + +func (x *BatchSendRequest_Stop) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_proto_msgTypes[10] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchSendRequest_Stop.ProtoReflect.Descriptor instead. +func (*BatchSendRequest_Stop) Descriptor() ([]byte, []int) { + return file_v1_batch_proto_rawDescGZIP(), []int{2, 0} +} + +type BatchSendRequest_Objects struct { + state protoimpl.MessageState `protogen:"open.v1"` + Values []*BatchObject `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchSendRequest_Objects) Reset() { + *x = BatchSendRequest_Objects{} + mi := &file_v1_batch_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchSendRequest_Objects) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchSendRequest_Objects) ProtoMessage() {} + +func (x *BatchSendRequest_Objects) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_proto_msgTypes[11] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchSendRequest_Objects.ProtoReflect.Descriptor instead. +func (*BatchSendRequest_Objects) Descriptor() ([]byte, []int) { + return file_v1_batch_proto_rawDescGZIP(), []int{2, 1} +} + +func (x *BatchSendRequest_Objects) GetValues() []*BatchObject { + if x != nil { + return x.Values + } + return nil +} + +type BatchSendRequest_References struct { + state protoimpl.MessageState `protogen:"open.v1"` + Values []*BatchReference `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchSendRequest_References) Reset() { + *x = BatchSendRequest_References{} + mi := &file_v1_batch_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchSendRequest_References) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchSendRequest_References) ProtoMessage() {} + +func (x *BatchSendRequest_References) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_proto_msgTypes[12] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchSendRequest_References.ProtoReflect.Descriptor instead. +func (*BatchSendRequest_References) Descriptor() ([]byte, []int) { + return file_v1_batch_proto_rawDescGZIP(), []int{2, 2} +} + +func (x *BatchSendRequest_References) GetValues() []*BatchReference { + if x != nil { + return x.Values + } + return nil +} + +type BatchStreamMessage_Start struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchStreamMessage_Start) Reset() { + *x = BatchStreamMessage_Start{} + mi := &file_v1_batch_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchStreamMessage_Start) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchStreamMessage_Start) ProtoMessage() {} + +func (x *BatchStreamMessage_Start) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_proto_msgTypes[13] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchStreamMessage_Start.ProtoReflect.Descriptor instead. +func (*BatchStreamMessage_Start) Descriptor() ([]byte, []int) { + return file_v1_batch_proto_rawDescGZIP(), []int{5, 0} +} + +type BatchStreamMessage_Stop struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchStreamMessage_Stop) Reset() { + *x = BatchStreamMessage_Stop{} + mi := &file_v1_batch_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchStreamMessage_Stop) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchStreamMessage_Stop) ProtoMessage() {} + +func (x *BatchStreamMessage_Stop) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_proto_msgTypes[14] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchStreamMessage_Stop.ProtoReflect.Descriptor instead. +func (*BatchStreamMessage_Stop) Descriptor() ([]byte, []int) { + return file_v1_batch_proto_rawDescGZIP(), []int{5, 1} +} + +type BatchStreamMessage_Shutdown struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchStreamMessage_Shutdown) Reset() { + *x = BatchStreamMessage_Shutdown{} + mi := &file_v1_batch_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchStreamMessage_Shutdown) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchStreamMessage_Shutdown) ProtoMessage() {} + +func (x *BatchStreamMessage_Shutdown) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_proto_msgTypes[15] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchStreamMessage_Shutdown.ProtoReflect.Descriptor instead. +func (*BatchStreamMessage_Shutdown) Descriptor() ([]byte, []int) { + return file_v1_batch_proto_rawDescGZIP(), []int{5, 2} +} + +type BatchStreamMessage_ShuttingDown struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchStreamMessage_ShuttingDown) Reset() { + *x = BatchStreamMessage_ShuttingDown{} + mi := &file_v1_batch_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchStreamMessage_ShuttingDown) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchStreamMessage_ShuttingDown) ProtoMessage() {} + +func (x *BatchStreamMessage_ShuttingDown) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_proto_msgTypes[16] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchStreamMessage_ShuttingDown.ProtoReflect.Descriptor instead. +func (*BatchStreamMessage_ShuttingDown) Descriptor() ([]byte, []int) { + return file_v1_batch_proto_rawDescGZIP(), []int{5, 3} +} + +type BatchStreamMessage_Error struct { + state protoimpl.MessageState `protogen:"open.v1"` + Error string `protobuf:"bytes,1,opt,name=error,proto3" json:"error,omitempty"` + Index int32 `protobuf:"varint,2,opt,name=index,proto3" json:"index,omitempty"` + IsRetriable bool `protobuf:"varint,3,opt,name=is_retriable,json=isRetriable,proto3" json:"is_retriable,omitempty"` + IsObject bool `protobuf:"varint,4,opt,name=is_object,json=isObject,proto3" json:"is_object,omitempty"` + IsReference bool `protobuf:"varint,5,opt,name=is_reference,json=isReference,proto3" json:"is_reference,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchStreamMessage_Error) Reset() { + *x = BatchStreamMessage_Error{} + mi := &file_v1_batch_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchStreamMessage_Error) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchStreamMessage_Error) ProtoMessage() {} + +func (x *BatchStreamMessage_Error) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_proto_msgTypes[17] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchStreamMessage_Error.ProtoReflect.Descriptor instead. +func (*BatchStreamMessage_Error) Descriptor() ([]byte, []int) { + return file_v1_batch_proto_rawDescGZIP(), []int{5, 4} +} + +func (x *BatchStreamMessage_Error) GetError() string { + if x != nil { + return x.Error + } + return "" +} + +func (x *BatchStreamMessage_Error) GetIndex() int32 { + if x != nil { + return x.Index + } + return 0 +} + +func (x *BatchStreamMessage_Error) GetIsRetriable() bool { + if x != nil { + return x.IsRetriable + } + return false +} + +func (x *BatchStreamMessage_Error) GetIsObject() bool { + if x != nil { + return x.IsObject + } + return false +} + +func (x *BatchStreamMessage_Error) GetIsReference() bool { + if x != nil { + return x.IsReference + } + return false +} + +type BatchObject_Properties struct { + state protoimpl.MessageState `protogen:"open.v1"` + NonRefProperties *structpb.Struct `protobuf:"bytes,1,opt,name=non_ref_properties,json=nonRefProperties,proto3" json:"non_ref_properties,omitempty"` + SingleTargetRefProps []*BatchObject_SingleTargetRefProps `protobuf:"bytes,2,rep,name=single_target_ref_props,json=singleTargetRefProps,proto3" json:"single_target_ref_props,omitempty"` + MultiTargetRefProps []*BatchObject_MultiTargetRefProps `protobuf:"bytes,3,rep,name=multi_target_ref_props,json=multiTargetRefProps,proto3" json:"multi_target_ref_props,omitempty"` + NumberArrayProperties []*NumberArrayProperties `protobuf:"bytes,4,rep,name=number_array_properties,json=numberArrayProperties,proto3" json:"number_array_properties,omitempty"` + IntArrayProperties []*IntArrayProperties `protobuf:"bytes,5,rep,name=int_array_properties,json=intArrayProperties,proto3" json:"int_array_properties,omitempty"` + TextArrayProperties []*TextArrayProperties `protobuf:"bytes,6,rep,name=text_array_properties,json=textArrayProperties,proto3" json:"text_array_properties,omitempty"` + BooleanArrayProperties []*BooleanArrayProperties `protobuf:"bytes,7,rep,name=boolean_array_properties,json=booleanArrayProperties,proto3" json:"boolean_array_properties,omitempty"` + ObjectProperties []*ObjectProperties `protobuf:"bytes,8,rep,name=object_properties,json=objectProperties,proto3" json:"object_properties,omitempty"` + ObjectArrayProperties []*ObjectArrayProperties `protobuf:"bytes,9,rep,name=object_array_properties,json=objectArrayProperties,proto3" json:"object_array_properties,omitempty"` + // empty lists do not have a type in many languages and clients do not know which datatype the property has. + // Weaviate can get the datatype from its schema + EmptyListProps []string `protobuf:"bytes,10,rep,name=empty_list_props,json=emptyListProps,proto3" json:"empty_list_props,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchObject_Properties) Reset() { + *x = BatchObject_Properties{} + mi := &file_v1_batch_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchObject_Properties) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchObject_Properties) ProtoMessage() {} + +func (x *BatchObject_Properties) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_proto_msgTypes[18] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchObject_Properties.ProtoReflect.Descriptor instead. +func (*BatchObject_Properties) Descriptor() ([]byte, []int) { + return file_v1_batch_proto_rawDescGZIP(), []int{6, 0} +} + +func (x *BatchObject_Properties) GetNonRefProperties() *structpb.Struct { + if x != nil { + return x.NonRefProperties + } + return nil +} + +func (x *BatchObject_Properties) GetSingleTargetRefProps() []*BatchObject_SingleTargetRefProps { + if x != nil { + return x.SingleTargetRefProps + } + return nil +} + +func (x *BatchObject_Properties) GetMultiTargetRefProps() []*BatchObject_MultiTargetRefProps { + if x != nil { + return x.MultiTargetRefProps + } + return nil +} + +func (x *BatchObject_Properties) GetNumberArrayProperties() []*NumberArrayProperties { + if x != nil { + return x.NumberArrayProperties + } + return nil +} + +func (x *BatchObject_Properties) GetIntArrayProperties() []*IntArrayProperties { + if x != nil { + return x.IntArrayProperties + } + return nil +} + +func (x *BatchObject_Properties) GetTextArrayProperties() []*TextArrayProperties { + if x != nil { + return x.TextArrayProperties + } + return nil +} + +func (x *BatchObject_Properties) GetBooleanArrayProperties() []*BooleanArrayProperties { + if x != nil { + return x.BooleanArrayProperties + } + return nil +} + +func (x *BatchObject_Properties) GetObjectProperties() []*ObjectProperties { + if x != nil { + return x.ObjectProperties + } + return nil +} + +func (x *BatchObject_Properties) GetObjectArrayProperties() []*ObjectArrayProperties { + if x != nil { + return x.ObjectArrayProperties + } + return nil +} + +func (x *BatchObject_Properties) GetEmptyListProps() []string { + if x != nil { + return x.EmptyListProps + } + return nil +} + +type BatchObject_SingleTargetRefProps struct { + state protoimpl.MessageState `protogen:"open.v1"` + Uuids []string `protobuf:"bytes,1,rep,name=uuids,proto3" json:"uuids,omitempty"` + PropName string `protobuf:"bytes,2,opt,name=prop_name,json=propName,proto3" json:"prop_name,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchObject_SingleTargetRefProps) Reset() { + *x = BatchObject_SingleTargetRefProps{} + mi := &file_v1_batch_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchObject_SingleTargetRefProps) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchObject_SingleTargetRefProps) ProtoMessage() {} + +func (x *BatchObject_SingleTargetRefProps) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_proto_msgTypes[19] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchObject_SingleTargetRefProps.ProtoReflect.Descriptor instead. +func (*BatchObject_SingleTargetRefProps) Descriptor() ([]byte, []int) { + return file_v1_batch_proto_rawDescGZIP(), []int{6, 1} +} + +func (x *BatchObject_SingleTargetRefProps) GetUuids() []string { + if x != nil { + return x.Uuids + } + return nil +} + +func (x *BatchObject_SingleTargetRefProps) GetPropName() string { + if x != nil { + return x.PropName + } + return "" +} + +type BatchObject_MultiTargetRefProps struct { + state protoimpl.MessageState `protogen:"open.v1"` + Uuids []string `protobuf:"bytes,1,rep,name=uuids,proto3" json:"uuids,omitempty"` + PropName string `protobuf:"bytes,2,opt,name=prop_name,json=propName,proto3" json:"prop_name,omitempty"` + TargetCollection string `protobuf:"bytes,3,opt,name=target_collection,json=targetCollection,proto3" json:"target_collection,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchObject_MultiTargetRefProps) Reset() { + *x = BatchObject_MultiTargetRefProps{} + mi := &file_v1_batch_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchObject_MultiTargetRefProps) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchObject_MultiTargetRefProps) ProtoMessage() {} + +func (x *BatchObject_MultiTargetRefProps) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_proto_msgTypes[20] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchObject_MultiTargetRefProps.ProtoReflect.Descriptor instead. +func (*BatchObject_MultiTargetRefProps) Descriptor() ([]byte, []int) { + return file_v1_batch_proto_rawDescGZIP(), []int{6, 2} +} + +func (x *BatchObject_MultiTargetRefProps) GetUuids() []string { + if x != nil { + return x.Uuids + } + return nil +} + +func (x *BatchObject_MultiTargetRefProps) GetPropName() string { + if x != nil { + return x.PropName + } + return "" +} + +func (x *BatchObject_MultiTargetRefProps) GetTargetCollection() string { + if x != nil { + return x.TargetCollection + } + return "" +} + +type BatchObjectsReply_BatchError struct { + state protoimpl.MessageState `protogen:"open.v1"` + Index int32 `protobuf:"varint,1,opt,name=index,proto3" json:"index,omitempty"` + Error string `protobuf:"bytes,2,opt,name=error,proto3" json:"error,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchObjectsReply_BatchError) Reset() { + *x = BatchObjectsReply_BatchError{} + mi := &file_v1_batch_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchObjectsReply_BatchError) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchObjectsReply_BatchError) ProtoMessage() {} + +func (x *BatchObjectsReply_BatchError) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_proto_msgTypes[21] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchObjectsReply_BatchError.ProtoReflect.Descriptor instead. +func (*BatchObjectsReply_BatchError) Descriptor() ([]byte, []int) { + return file_v1_batch_proto_rawDescGZIP(), []int{8, 0} +} + +func (x *BatchObjectsReply_BatchError) GetIndex() int32 { + if x != nil { + return x.Index + } + return 0 +} + +func (x *BatchObjectsReply_BatchError) GetError() string { + if x != nil { + return x.Error + } + return "" +} + +type BatchReferencesReply_BatchError struct { + state protoimpl.MessageState `protogen:"open.v1"` + Index int32 `protobuf:"varint,1,opt,name=index,proto3" json:"index,omitempty"` + Error string `protobuf:"bytes,2,opt,name=error,proto3" json:"error,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchReferencesReply_BatchError) Reset() { + *x = BatchReferencesReply_BatchError{} + mi := &file_v1_batch_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchReferencesReply_BatchError) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchReferencesReply_BatchError) ProtoMessage() {} + +func (x *BatchReferencesReply_BatchError) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_proto_msgTypes[22] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchReferencesReply_BatchError.ProtoReflect.Descriptor instead. +func (*BatchReferencesReply_BatchError) Descriptor() ([]byte, []int) { + return file_v1_batch_proto_rawDescGZIP(), []int{9, 0} +} + +func (x *BatchReferencesReply_BatchError) GetIndex() int32 { + if x != nil { + return x.Index + } + return 0 +} + +func (x *BatchReferencesReply_BatchError) GetError() string { + if x != nil { + return x.Error + } + return "" +} + +var File_v1_batch_proto protoreflect.FileDescriptor + +const file_v1_batch_proto_rawDesc = "" + + "\n" + + "\x0ev1/batch.proto\x12\vweaviate.v1\x1a\x1cgoogle/protobuf/struct.proto\x1a\rv1/base.proto\"\xb0\x01\n" + + "\x13BatchObjectsRequest\x122\n" + + "\aobjects\x18\x01 \x03(\v2\x18.weaviate.v1.BatchObjectR\aobjects\x12O\n" + + "\x11consistency_level\x18\x02 \x01(\x0e2\x1d.weaviate.v1.ConsistencyLevelH\x00R\x10consistencyLevel\x88\x01\x01B\x14\n" + + "\x12_consistency_level\"\xbc\x01\n" + + "\x16BatchReferencesRequest\x12;\n" + + "\n" + + "references\x18\x01 \x03(\v2\x1b.weaviate.v1.BatchReferenceR\n" + + "references\x12O\n" + + "\x11consistency_level\x18\x02 \x01(\x0e2\x1d.weaviate.v1.ConsistencyLevelH\x00R\x10consistencyLevel\x88\x01\x01B\x14\n" + + "\x12_consistency_level\"\x8b\x03\n" + + "\x10BatchSendRequest\x12\x1b\n" + + "\tstream_id\x18\x01 \x01(\tR\bstreamId\x12A\n" + + "\aobjects\x18\x02 \x01(\v2%.weaviate.v1.BatchSendRequest.ObjectsH\x00R\aobjects\x12J\n" + + "\n" + + "references\x18\x03 \x01(\v2(.weaviate.v1.BatchSendRequest.ReferencesH\x00R\n" + + "references\x128\n" + + "\x04stop\x18\x04 \x01(\v2\".weaviate.v1.BatchSendRequest.StopH\x00R\x04stop\x1a\x06\n" + + "\x04Stop\x1a;\n" + + "\aObjects\x120\n" + + "\x06values\x18\x01 \x03(\v2\x18.weaviate.v1.BatchObjectR\x06values\x1aA\n" + + "\n" + + "References\x123\n" + + "\x06values\x18\x01 \x03(\v2\x1b.weaviate.v1.BatchReferenceR\x06valuesB\t\n" + + "\amessage\"a\n" + + "\x0eBatchSendReply\x12&\n" + + "\x0fnext_batch_size\x18\x01 \x01(\x05R\rnextBatchSize\x12'\n" + + "\x0fbackoff_seconds\x18\x02 \x01(\x02R\x0ebackoffSeconds\"\xf6\x01\n" + + "\x12BatchStreamRequest\x12O\n" + + "\x11consistency_level\x18\x01 \x01(\x0e2\x1d.weaviate.v1.ConsistencyLevelH\x00R\x10consistencyLevel\x88\x01\x01\x12&\n" + + "\fobject_index\x18\x02 \x01(\x05H\x01R\vobjectIndex\x88\x01\x01\x12,\n" + + "\x0freference_index\x18\x03 \x01(\x05H\x02R\x0ereferenceIndex\x88\x01\x01B\x14\n" + + "\x12_consistency_levelB\x0f\n" + + "\r_object_indexB\x12\n" + + "\x10_reference_index\"\xd9\x04\n" + + "\x12BatchStreamMessage\x12\x1b\n" + + "\tstream_id\x18\x01 \x01(\tR\bstreamId\x12=\n" + + "\x05error\x18\x02 \x01(\v2%.weaviate.v1.BatchStreamMessage.ErrorH\x00R\x05error\x12=\n" + + "\x05start\x18\x03 \x01(\v2%.weaviate.v1.BatchStreamMessage.StartH\x00R\x05start\x12:\n" + + "\x04stop\x18\x04 \x01(\v2$.weaviate.v1.BatchStreamMessage.StopH\x00R\x04stop\x12F\n" + + "\bshutdown\x18\x05 \x01(\v2(.weaviate.v1.BatchStreamMessage.ShutdownH\x00R\bshutdown\x12S\n" + + "\rshutting_down\x18\x06 \x01(\v2,.weaviate.v1.BatchStreamMessage.ShuttingDownH\x00R\fshuttingDown\x1a\a\n" + + "\x05Start\x1a\x06\n" + + "\x04Stop\x1a\n" + + "\n" + + "\bShutdown\x1a\x0e\n" + + "\fShuttingDown\x1a\x96\x01\n" + + "\x05Error\x12\x14\n" + + "\x05error\x18\x01 \x01(\tR\x05error\x12\x14\n" + + "\x05index\x18\x02 \x01(\x05R\x05index\x12!\n" + + "\fis_retriable\x18\x03 \x01(\bR\visRetriable\x12\x1b\n" + + "\tis_object\x18\x04 \x01(\bR\bisObject\x12!\n" + + "\fis_reference\x18\x05 \x01(\bR\visReferenceB\t\n" + + "\amessage\"\xa4\n" + + "\n" + + "\vBatchObject\x12\x12\n" + + "\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x1a\n" + + "\x06vector\x18\x02 \x03(\x02B\x02\x18\x01R\x06vector\x12C\n" + + "\n" + + "properties\x18\x03 \x01(\v2#.weaviate.v1.BatchObject.PropertiesR\n" + + "properties\x12\x1e\n" + + "\n" + + "collection\x18\x04 \x01(\tR\n" + + "collection\x12\x16\n" + + "\x06tenant\x18\x05 \x01(\tR\x06tenant\x12!\n" + + "\fvector_bytes\x18\x06 \x01(\fR\vvectorBytes\x12.\n" + + "\avectors\x18\x17 \x03(\v2\x14.weaviate.v1.VectorsR\avectors\x1a\xd2\x06\n" + + "\n" + + "Properties\x12E\n" + + "\x12non_ref_properties\x18\x01 \x01(\v2\x17.google.protobuf.StructR\x10nonRefProperties\x12d\n" + + "\x17single_target_ref_props\x18\x02 \x03(\v2-.weaviate.v1.BatchObject.SingleTargetRefPropsR\x14singleTargetRefProps\x12a\n" + + "\x16multi_target_ref_props\x18\x03 \x03(\v2,.weaviate.v1.BatchObject.MultiTargetRefPropsR\x13multiTargetRefProps\x12Z\n" + + "\x17number_array_properties\x18\x04 \x03(\v2\".weaviate.v1.NumberArrayPropertiesR\x15numberArrayProperties\x12Q\n" + + "\x14int_array_properties\x18\x05 \x03(\v2\x1f.weaviate.v1.IntArrayPropertiesR\x12intArrayProperties\x12T\n" + + "\x15text_array_properties\x18\x06 \x03(\v2 .weaviate.v1.TextArrayPropertiesR\x13textArrayProperties\x12]\n" + + "\x18boolean_array_properties\x18\a \x03(\v2#.weaviate.v1.BooleanArrayPropertiesR\x16booleanArrayProperties\x12J\n" + + "\x11object_properties\x18\b \x03(\v2\x1d.weaviate.v1.ObjectPropertiesR\x10objectProperties\x12Z\n" + + "\x17object_array_properties\x18\t \x03(\v2\".weaviate.v1.ObjectArrayPropertiesR\x15objectArrayProperties\x12(\n" + + "\x10empty_list_props\x18\n" + + " \x03(\tR\x0eemptyListProps\x1aI\n" + + "\x14SingleTargetRefProps\x12\x14\n" + + "\x05uuids\x18\x01 \x03(\tR\x05uuids\x12\x1b\n" + + "\tprop_name\x18\x02 \x01(\tR\bpropName\x1au\n" + + "\x13MultiTargetRefProps\x12\x14\n" + + "\x05uuids\x18\x01 \x03(\tR\x05uuids\x12\x1b\n" + + "\tprop_name\x18\x02 \x01(\tR\bpropName\x12+\n" + + "\x11target_collection\x18\x03 \x01(\tR\x10targetCollection\"\xd7\x01\n" + + "\x0eBatchReference\x12\x12\n" + + "\x04name\x18\x01 \x01(\tR\x04name\x12'\n" + + "\x0ffrom_collection\x18\x02 \x01(\tR\x0efromCollection\x12\x1b\n" + + "\tfrom_uuid\x18\x03 \x01(\tR\bfromUuid\x12(\n" + + "\rto_collection\x18\x04 \x01(\tH\x00R\ftoCollection\x88\x01\x01\x12\x17\n" + + "\ato_uuid\x18\x05 \x01(\tR\x06toUuid\x12\x16\n" + + "\x06tenant\x18\x06 \x01(\tR\x06tenantB\x10\n" + + "\x0e_to_collection\"\xa4\x01\n" + + "\x11BatchObjectsReply\x12\x12\n" + + "\x04took\x18\x01 \x01(\x02R\x04took\x12A\n" + + "\x06errors\x18\x02 \x03(\v2).weaviate.v1.BatchObjectsReply.BatchErrorR\x06errors\x1a8\n" + + "\n" + + "BatchError\x12\x14\n" + + "\x05index\x18\x01 \x01(\x05R\x05index\x12\x14\n" + + "\x05error\x18\x02 \x01(\tR\x05error\"\xaa\x01\n" + + "\x14BatchReferencesReply\x12\x12\n" + + "\x04took\x18\x01 \x01(\x02R\x04took\x12D\n" + + "\x06errors\x18\x02 \x03(\v2,.weaviate.v1.BatchReferencesReply.BatchErrorR\x06errors\x1a8\n" + + "\n" + + "BatchError\x12\x14\n" + + "\x05index\x18\x01 \x01(\x05R\x05index\x12\x14\n" + + "\x05error\x18\x02 \x01(\tR\x05errorBo\n" + + "#io.weaviate.client.grpc.protocol.v1B\x12WeaviateProtoBatchZ4github.com/weaviate/weaviate/grpc/generated;protocolb\x06proto3" + +var ( + file_v1_batch_proto_rawDescOnce sync.Once + file_v1_batch_proto_rawDescData []byte +) + +func file_v1_batch_proto_rawDescGZIP() []byte { + file_v1_batch_proto_rawDescOnce.Do(func() { + file_v1_batch_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_v1_batch_proto_rawDesc), len(file_v1_batch_proto_rawDesc))) + }) + return file_v1_batch_proto_rawDescData +} + +var file_v1_batch_proto_msgTypes = make([]protoimpl.MessageInfo, 23) +var file_v1_batch_proto_goTypes = []any{ + (*BatchObjectsRequest)(nil), // 0: weaviate.v1.BatchObjectsRequest + (*BatchReferencesRequest)(nil), // 1: weaviate.v1.BatchReferencesRequest + (*BatchSendRequest)(nil), // 2: weaviate.v1.BatchSendRequest + (*BatchSendReply)(nil), // 3: weaviate.v1.BatchSendReply + (*BatchStreamRequest)(nil), // 4: weaviate.v1.BatchStreamRequest + (*BatchStreamMessage)(nil), // 5: weaviate.v1.BatchStreamMessage + (*BatchObject)(nil), // 6: weaviate.v1.BatchObject + (*BatchReference)(nil), // 7: weaviate.v1.BatchReference + (*BatchObjectsReply)(nil), // 8: weaviate.v1.BatchObjectsReply + (*BatchReferencesReply)(nil), // 9: weaviate.v1.BatchReferencesReply + (*BatchSendRequest_Stop)(nil), // 10: weaviate.v1.BatchSendRequest.Stop + (*BatchSendRequest_Objects)(nil), // 11: weaviate.v1.BatchSendRequest.Objects + (*BatchSendRequest_References)(nil), // 12: weaviate.v1.BatchSendRequest.References + (*BatchStreamMessage_Start)(nil), // 13: weaviate.v1.BatchStreamMessage.Start + (*BatchStreamMessage_Stop)(nil), // 14: weaviate.v1.BatchStreamMessage.Stop + (*BatchStreamMessage_Shutdown)(nil), // 15: weaviate.v1.BatchStreamMessage.Shutdown + (*BatchStreamMessage_ShuttingDown)(nil), // 16: weaviate.v1.BatchStreamMessage.ShuttingDown + (*BatchStreamMessage_Error)(nil), // 17: weaviate.v1.BatchStreamMessage.Error + (*BatchObject_Properties)(nil), // 18: weaviate.v1.BatchObject.Properties + (*BatchObject_SingleTargetRefProps)(nil), // 19: weaviate.v1.BatchObject.SingleTargetRefProps + (*BatchObject_MultiTargetRefProps)(nil), // 20: weaviate.v1.BatchObject.MultiTargetRefProps + (*BatchObjectsReply_BatchError)(nil), // 21: weaviate.v1.BatchObjectsReply.BatchError + (*BatchReferencesReply_BatchError)(nil), // 22: weaviate.v1.BatchReferencesReply.BatchError + (ConsistencyLevel)(0), // 23: weaviate.v1.ConsistencyLevel + (*Vectors)(nil), // 24: weaviate.v1.Vectors + (*structpb.Struct)(nil), // 25: google.protobuf.Struct + (*NumberArrayProperties)(nil), // 26: weaviate.v1.NumberArrayProperties + (*IntArrayProperties)(nil), // 27: weaviate.v1.IntArrayProperties + (*TextArrayProperties)(nil), // 28: weaviate.v1.TextArrayProperties + (*BooleanArrayProperties)(nil), // 29: weaviate.v1.BooleanArrayProperties + (*ObjectProperties)(nil), // 30: weaviate.v1.ObjectProperties + (*ObjectArrayProperties)(nil), // 31: weaviate.v1.ObjectArrayProperties +} +var file_v1_batch_proto_depIdxs = []int32{ + 6, // 0: weaviate.v1.BatchObjectsRequest.objects:type_name -> weaviate.v1.BatchObject + 23, // 1: weaviate.v1.BatchObjectsRequest.consistency_level:type_name -> weaviate.v1.ConsistencyLevel + 7, // 2: weaviate.v1.BatchReferencesRequest.references:type_name -> weaviate.v1.BatchReference + 23, // 3: weaviate.v1.BatchReferencesRequest.consistency_level:type_name -> weaviate.v1.ConsistencyLevel + 11, // 4: weaviate.v1.BatchSendRequest.objects:type_name -> weaviate.v1.BatchSendRequest.Objects + 12, // 5: weaviate.v1.BatchSendRequest.references:type_name -> weaviate.v1.BatchSendRequest.References + 10, // 6: weaviate.v1.BatchSendRequest.stop:type_name -> weaviate.v1.BatchSendRequest.Stop + 23, // 7: weaviate.v1.BatchStreamRequest.consistency_level:type_name -> weaviate.v1.ConsistencyLevel + 17, // 8: weaviate.v1.BatchStreamMessage.error:type_name -> weaviate.v1.BatchStreamMessage.Error + 13, // 9: weaviate.v1.BatchStreamMessage.start:type_name -> weaviate.v1.BatchStreamMessage.Start + 14, // 10: weaviate.v1.BatchStreamMessage.stop:type_name -> weaviate.v1.BatchStreamMessage.Stop + 15, // 11: weaviate.v1.BatchStreamMessage.shutdown:type_name -> weaviate.v1.BatchStreamMessage.Shutdown + 16, // 12: weaviate.v1.BatchStreamMessage.shutting_down:type_name -> weaviate.v1.BatchStreamMessage.ShuttingDown + 18, // 13: weaviate.v1.BatchObject.properties:type_name -> weaviate.v1.BatchObject.Properties + 24, // 14: weaviate.v1.BatchObject.vectors:type_name -> weaviate.v1.Vectors + 21, // 15: weaviate.v1.BatchObjectsReply.errors:type_name -> weaviate.v1.BatchObjectsReply.BatchError + 22, // 16: weaviate.v1.BatchReferencesReply.errors:type_name -> weaviate.v1.BatchReferencesReply.BatchError + 6, // 17: weaviate.v1.BatchSendRequest.Objects.values:type_name -> weaviate.v1.BatchObject + 7, // 18: weaviate.v1.BatchSendRequest.References.values:type_name -> weaviate.v1.BatchReference + 25, // 19: weaviate.v1.BatchObject.Properties.non_ref_properties:type_name -> google.protobuf.Struct + 19, // 20: weaviate.v1.BatchObject.Properties.single_target_ref_props:type_name -> weaviate.v1.BatchObject.SingleTargetRefProps + 20, // 21: weaviate.v1.BatchObject.Properties.multi_target_ref_props:type_name -> weaviate.v1.BatchObject.MultiTargetRefProps + 26, // 22: weaviate.v1.BatchObject.Properties.number_array_properties:type_name -> weaviate.v1.NumberArrayProperties + 27, // 23: weaviate.v1.BatchObject.Properties.int_array_properties:type_name -> weaviate.v1.IntArrayProperties + 28, // 24: weaviate.v1.BatchObject.Properties.text_array_properties:type_name -> weaviate.v1.TextArrayProperties + 29, // 25: weaviate.v1.BatchObject.Properties.boolean_array_properties:type_name -> weaviate.v1.BooleanArrayProperties + 30, // 26: weaviate.v1.BatchObject.Properties.object_properties:type_name -> weaviate.v1.ObjectProperties + 31, // 27: weaviate.v1.BatchObject.Properties.object_array_properties:type_name -> weaviate.v1.ObjectArrayProperties + 28, // [28:28] is the sub-list for method output_type + 28, // [28:28] is the sub-list for method input_type + 28, // [28:28] is the sub-list for extension type_name + 28, // [28:28] is the sub-list for extension extendee + 0, // [0:28] is the sub-list for field type_name +} + +func init() { file_v1_batch_proto_init() } +func file_v1_batch_proto_init() { + if File_v1_batch_proto != nil { + return + } + file_v1_base_proto_init() + file_v1_batch_proto_msgTypes[0].OneofWrappers = []any{} + file_v1_batch_proto_msgTypes[1].OneofWrappers = []any{} + file_v1_batch_proto_msgTypes[2].OneofWrappers = []any{ + (*BatchSendRequest_Objects_)(nil), + (*BatchSendRequest_References_)(nil), + (*BatchSendRequest_Stop_)(nil), + } + file_v1_batch_proto_msgTypes[4].OneofWrappers = []any{} + file_v1_batch_proto_msgTypes[5].OneofWrappers = []any{ + (*BatchStreamMessage_Error_)(nil), + (*BatchStreamMessage_Start_)(nil), + (*BatchStreamMessage_Stop_)(nil), + (*BatchStreamMessage_Shutdown_)(nil), + (*BatchStreamMessage_ShuttingDown_)(nil), + } + file_v1_batch_proto_msgTypes[7].OneofWrappers = []any{} + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: unsafe.Slice(unsafe.StringData(file_v1_batch_proto_rawDesc), len(file_v1_batch_proto_rawDesc)), + NumEnums: 0, + NumMessages: 23, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_v1_batch_proto_goTypes, + DependencyIndexes: file_v1_batch_proto_depIdxs, + MessageInfos: file_v1_batch_proto_msgTypes, + }.Build() + File_v1_batch_proto = out.File + file_v1_batch_proto_goTypes = nil + file_v1_batch_proto_depIdxs = nil +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/batch_delete.pb.go b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/batch_delete.pb.go new file mode 100644 index 0000000000000000000000000000000000000000..bef0bb42874bbd45aa253917b064f18e0ff7a466 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/batch_delete.pb.go @@ -0,0 +1,330 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. + +package protocol + +import ( + reflect "reflect" + sync "sync" + unsafe "unsafe" + + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type BatchDeleteRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + Collection string `protobuf:"bytes,1,opt,name=collection,proto3" json:"collection,omitempty"` + Filters *Filters `protobuf:"bytes,2,opt,name=filters,proto3" json:"filters,omitempty"` + Verbose bool `protobuf:"varint,3,opt,name=verbose,proto3" json:"verbose,omitempty"` + DryRun bool `protobuf:"varint,4,opt,name=dry_run,json=dryRun,proto3" json:"dry_run,omitempty"` + ConsistencyLevel *ConsistencyLevel `protobuf:"varint,5,opt,name=consistency_level,json=consistencyLevel,proto3,enum=weaviate.v1.ConsistencyLevel,oneof" json:"consistency_level,omitempty"` + Tenant *string `protobuf:"bytes,6,opt,name=tenant,proto3,oneof" json:"tenant,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchDeleteRequest) Reset() { + *x = BatchDeleteRequest{} + mi := &file_v1_batch_delete_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchDeleteRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchDeleteRequest) ProtoMessage() {} + +func (x *BatchDeleteRequest) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_delete_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchDeleteRequest.ProtoReflect.Descriptor instead. +func (*BatchDeleteRequest) Descriptor() ([]byte, []int) { + return file_v1_batch_delete_proto_rawDescGZIP(), []int{0} +} + +func (x *BatchDeleteRequest) GetCollection() string { + if x != nil { + return x.Collection + } + return "" +} + +func (x *BatchDeleteRequest) GetFilters() *Filters { + if x != nil { + return x.Filters + } + return nil +} + +func (x *BatchDeleteRequest) GetVerbose() bool { + if x != nil { + return x.Verbose + } + return false +} + +func (x *BatchDeleteRequest) GetDryRun() bool { + if x != nil { + return x.DryRun + } + return false +} + +func (x *BatchDeleteRequest) GetConsistencyLevel() ConsistencyLevel { + if x != nil && x.ConsistencyLevel != nil { + return *x.ConsistencyLevel + } + return ConsistencyLevel_CONSISTENCY_LEVEL_UNSPECIFIED +} + +func (x *BatchDeleteRequest) GetTenant() string { + if x != nil && x.Tenant != nil { + return *x.Tenant + } + return "" +} + +type BatchDeleteReply struct { + state protoimpl.MessageState `protogen:"open.v1"` + Took float32 `protobuf:"fixed32,1,opt,name=took,proto3" json:"took,omitempty"` + Failed int64 `protobuf:"varint,2,opt,name=failed,proto3" json:"failed,omitempty"` + Matches int64 `protobuf:"varint,3,opt,name=matches,proto3" json:"matches,omitempty"` + Successful int64 `protobuf:"varint,4,opt,name=successful,proto3" json:"successful,omitempty"` + Objects []*BatchDeleteObject `protobuf:"bytes,5,rep,name=objects,proto3" json:"objects,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchDeleteReply) Reset() { + *x = BatchDeleteReply{} + mi := &file_v1_batch_delete_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchDeleteReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchDeleteReply) ProtoMessage() {} + +func (x *BatchDeleteReply) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_delete_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchDeleteReply.ProtoReflect.Descriptor instead. +func (*BatchDeleteReply) Descriptor() ([]byte, []int) { + return file_v1_batch_delete_proto_rawDescGZIP(), []int{1} +} + +func (x *BatchDeleteReply) GetTook() float32 { + if x != nil { + return x.Took + } + return 0 +} + +func (x *BatchDeleteReply) GetFailed() int64 { + if x != nil { + return x.Failed + } + return 0 +} + +func (x *BatchDeleteReply) GetMatches() int64 { + if x != nil { + return x.Matches + } + return 0 +} + +func (x *BatchDeleteReply) GetSuccessful() int64 { + if x != nil { + return x.Successful + } + return 0 +} + +func (x *BatchDeleteReply) GetObjects() []*BatchDeleteObject { + if x != nil { + return x.Objects + } + return nil +} + +type BatchDeleteObject struct { + state protoimpl.MessageState `protogen:"open.v1"` + Uuid []byte `protobuf:"bytes,1,opt,name=uuid,proto3" json:"uuid,omitempty"` + Successful bool `protobuf:"varint,2,opt,name=successful,proto3" json:"successful,omitempty"` + Error *string `protobuf:"bytes,3,opt,name=error,proto3,oneof" json:"error,omitempty"` // empty string means no error + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BatchDeleteObject) Reset() { + *x = BatchDeleteObject{} + mi := &file_v1_batch_delete_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchDeleteObject) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchDeleteObject) ProtoMessage() {} + +func (x *BatchDeleteObject) ProtoReflect() protoreflect.Message { + mi := &file_v1_batch_delete_proto_msgTypes[2] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchDeleteObject.ProtoReflect.Descriptor instead. +func (*BatchDeleteObject) Descriptor() ([]byte, []int) { + return file_v1_batch_delete_proto_rawDescGZIP(), []int{2} +} + +func (x *BatchDeleteObject) GetUuid() []byte { + if x != nil { + return x.Uuid + } + return nil +} + +func (x *BatchDeleteObject) GetSuccessful() bool { + if x != nil { + return x.Successful + } + return false +} + +func (x *BatchDeleteObject) GetError() string { + if x != nil && x.Error != nil { + return *x.Error + } + return "" +} + +var File_v1_batch_delete_proto protoreflect.FileDescriptor + +const file_v1_batch_delete_proto_rawDesc = "" + + "\n" + + "\x15v1/batch_delete.proto\x12\vweaviate.v1\x1a\rv1/base.proto\"\xa6\x02\n" + + "\x12BatchDeleteRequest\x12\x1e\n" + + "\n" + + "collection\x18\x01 \x01(\tR\n" + + "collection\x12.\n" + + "\afilters\x18\x02 \x01(\v2\x14.weaviate.v1.FiltersR\afilters\x12\x18\n" + + "\averbose\x18\x03 \x01(\bR\averbose\x12\x17\n" + + "\adry_run\x18\x04 \x01(\bR\x06dryRun\x12O\n" + + "\x11consistency_level\x18\x05 \x01(\x0e2\x1d.weaviate.v1.ConsistencyLevelH\x00R\x10consistencyLevel\x88\x01\x01\x12\x1b\n" + + "\x06tenant\x18\x06 \x01(\tH\x01R\x06tenant\x88\x01\x01B\x14\n" + + "\x12_consistency_levelB\t\n" + + "\a_tenant\"\xb2\x01\n" + + "\x10BatchDeleteReply\x12\x12\n" + + "\x04took\x18\x01 \x01(\x02R\x04took\x12\x16\n" + + "\x06failed\x18\x02 \x01(\x03R\x06failed\x12\x18\n" + + "\amatches\x18\x03 \x01(\x03R\amatches\x12\x1e\n" + + "\n" + + "successful\x18\x04 \x01(\x03R\n" + + "successful\x128\n" + + "\aobjects\x18\x05 \x03(\v2\x1e.weaviate.v1.BatchDeleteObjectR\aobjects\"l\n" + + "\x11BatchDeleteObject\x12\x12\n" + + "\x04uuid\x18\x01 \x01(\fR\x04uuid\x12\x1e\n" + + "\n" + + "successful\x18\x02 \x01(\bR\n" + + "successful\x12\x19\n" + + "\x05error\x18\x03 \x01(\tH\x00R\x05error\x88\x01\x01B\b\n" + + "\x06_errorBu\n" + + "#io.weaviate.client.grpc.protocol.v1B\x18WeaviateProtoBatchDeleteZ4github.com/weaviate/weaviate/grpc/generated;protocolb\x06proto3" + +var ( + file_v1_batch_delete_proto_rawDescOnce sync.Once + file_v1_batch_delete_proto_rawDescData []byte +) + +func file_v1_batch_delete_proto_rawDescGZIP() []byte { + file_v1_batch_delete_proto_rawDescOnce.Do(func() { + file_v1_batch_delete_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_v1_batch_delete_proto_rawDesc), len(file_v1_batch_delete_proto_rawDesc))) + }) + return file_v1_batch_delete_proto_rawDescData +} + +var file_v1_batch_delete_proto_msgTypes = make([]protoimpl.MessageInfo, 3) +var file_v1_batch_delete_proto_goTypes = []any{ + (*BatchDeleteRequest)(nil), // 0: weaviate.v1.BatchDeleteRequest + (*BatchDeleteReply)(nil), // 1: weaviate.v1.BatchDeleteReply + (*BatchDeleteObject)(nil), // 2: weaviate.v1.BatchDeleteObject + (*Filters)(nil), // 3: weaviate.v1.Filters + (ConsistencyLevel)(0), // 4: weaviate.v1.ConsistencyLevel +} +var file_v1_batch_delete_proto_depIdxs = []int32{ + 3, // 0: weaviate.v1.BatchDeleteRequest.filters:type_name -> weaviate.v1.Filters + 4, // 1: weaviate.v1.BatchDeleteRequest.consistency_level:type_name -> weaviate.v1.ConsistencyLevel + 2, // 2: weaviate.v1.BatchDeleteReply.objects:type_name -> weaviate.v1.BatchDeleteObject + 3, // [3:3] is the sub-list for method output_type + 3, // [3:3] is the sub-list for method input_type + 3, // [3:3] is the sub-list for extension type_name + 3, // [3:3] is the sub-list for extension extendee + 0, // [0:3] is the sub-list for field type_name +} + +func init() { file_v1_batch_delete_proto_init() } +func file_v1_batch_delete_proto_init() { + if File_v1_batch_delete_proto != nil { + return + } + file_v1_base_proto_init() + file_v1_batch_delete_proto_msgTypes[0].OneofWrappers = []any{} + file_v1_batch_delete_proto_msgTypes[2].OneofWrappers = []any{} + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: unsafe.Slice(unsafe.StringData(file_v1_batch_delete_proto_rawDesc), len(file_v1_batch_delete_proto_rawDesc)), + NumEnums: 0, + NumMessages: 3, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_v1_batch_delete_proto_goTypes, + DependencyIndexes: file_v1_batch_delete_proto_depIdxs, + MessageInfos: file_v1_batch_delete_proto_msgTypes, + }.Build() + File_v1_batch_delete_proto = out.File + file_v1_batch_delete_proto_goTypes = nil + file_v1_batch_delete_proto_depIdxs = nil +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/file_replication.pb.go b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/file_replication.pb.go new file mode 100644 index 0000000000000000000000000000000000000000..90a45494dd75be2691889a6c490b5b7ea989b8c0 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/file_replication.pb.go @@ -0,0 +1,810 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. + +package protocol + +import ( + reflect "reflect" + sync "sync" + unsafe "unsafe" + + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type CompressionType int32 + +const ( + CompressionType_COMPRESSION_TYPE_UNSPECIFIED CompressionType = 0 // No compression + CompressionType_COMPRESSION_TYPE_GZIP CompressionType = 1 // gzip (compress/gzip) + CompressionType_COMPRESSION_TYPE_ZLIB CompressionType = 2 // zlib (compress/zlib) + CompressionType_COMPRESSION_TYPE_DEFLATE CompressionType = 3 // raw DEFLATE (compress/flate) +) + +// Enum value maps for CompressionType. +var ( + CompressionType_name = map[int32]string{ + 0: "COMPRESSION_TYPE_UNSPECIFIED", + 1: "COMPRESSION_TYPE_GZIP", + 2: "COMPRESSION_TYPE_ZLIB", + 3: "COMPRESSION_TYPE_DEFLATE", + } + CompressionType_value = map[string]int32{ + "COMPRESSION_TYPE_UNSPECIFIED": 0, + "COMPRESSION_TYPE_GZIP": 1, + "COMPRESSION_TYPE_ZLIB": 2, + "COMPRESSION_TYPE_DEFLATE": 3, + } +) + +func (x CompressionType) Enum() *CompressionType { + p := new(CompressionType) + *p = x + return p +} + +func (x CompressionType) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (CompressionType) Descriptor() protoreflect.EnumDescriptor { + return file_v1_file_replication_proto_enumTypes[0].Descriptor() +} + +func (CompressionType) Type() protoreflect.EnumType { + return &file_v1_file_replication_proto_enumTypes[0] +} + +func (x CompressionType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use CompressionType.Descriptor instead. +func (CompressionType) EnumDescriptor() ([]byte, []int) { + return file_v1_file_replication_proto_rawDescGZIP(), []int{0} +} + +type PauseFileActivityRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + IndexName string `protobuf:"bytes,1,opt,name=index_name,json=indexName,proto3" json:"index_name,omitempty"` + ShardName string `protobuf:"bytes,2,opt,name=shard_name,json=shardName,proto3" json:"shard_name,omitempty"` + SchemaVersion uint64 `protobuf:"varint,3,opt,name=schema_version,json=schemaVersion,proto3" json:"schema_version,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *PauseFileActivityRequest) Reset() { + *x = PauseFileActivityRequest{} + mi := &file_v1_file_replication_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *PauseFileActivityRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PauseFileActivityRequest) ProtoMessage() {} + +func (x *PauseFileActivityRequest) ProtoReflect() protoreflect.Message { + mi := &file_v1_file_replication_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PauseFileActivityRequest.ProtoReflect.Descriptor instead. +func (*PauseFileActivityRequest) Descriptor() ([]byte, []int) { + return file_v1_file_replication_proto_rawDescGZIP(), []int{0} +} + +func (x *PauseFileActivityRequest) GetIndexName() string { + if x != nil { + return x.IndexName + } + return "" +} + +func (x *PauseFileActivityRequest) GetShardName() string { + if x != nil { + return x.ShardName + } + return "" +} + +func (x *PauseFileActivityRequest) GetSchemaVersion() uint64 { + if x != nil { + return x.SchemaVersion + } + return 0 +} + +type PauseFileActivityResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + IndexName string `protobuf:"bytes,1,opt,name=index_name,json=indexName,proto3" json:"index_name,omitempty"` + ShardName string `protobuf:"bytes,2,opt,name=shard_name,json=shardName,proto3" json:"shard_name,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *PauseFileActivityResponse) Reset() { + *x = PauseFileActivityResponse{} + mi := &file_v1_file_replication_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *PauseFileActivityResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PauseFileActivityResponse) ProtoMessage() {} + +func (x *PauseFileActivityResponse) ProtoReflect() protoreflect.Message { + mi := &file_v1_file_replication_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PauseFileActivityResponse.ProtoReflect.Descriptor instead. +func (*PauseFileActivityResponse) Descriptor() ([]byte, []int) { + return file_v1_file_replication_proto_rawDescGZIP(), []int{1} +} + +func (x *PauseFileActivityResponse) GetIndexName() string { + if x != nil { + return x.IndexName + } + return "" +} + +func (x *PauseFileActivityResponse) GetShardName() string { + if x != nil { + return x.ShardName + } + return "" +} + +type ResumeFileActivityRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + IndexName string `protobuf:"bytes,1,opt,name=index_name,json=indexName,proto3" json:"index_name,omitempty"` + ShardName string `protobuf:"bytes,2,opt,name=shard_name,json=shardName,proto3" json:"shard_name,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ResumeFileActivityRequest) Reset() { + *x = ResumeFileActivityRequest{} + mi := &file_v1_file_replication_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ResumeFileActivityRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ResumeFileActivityRequest) ProtoMessage() {} + +func (x *ResumeFileActivityRequest) ProtoReflect() protoreflect.Message { + mi := &file_v1_file_replication_proto_msgTypes[2] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ResumeFileActivityRequest.ProtoReflect.Descriptor instead. +func (*ResumeFileActivityRequest) Descriptor() ([]byte, []int) { + return file_v1_file_replication_proto_rawDescGZIP(), []int{2} +} + +func (x *ResumeFileActivityRequest) GetIndexName() string { + if x != nil { + return x.IndexName + } + return "" +} + +func (x *ResumeFileActivityRequest) GetShardName() string { + if x != nil { + return x.ShardName + } + return "" +} + +type ResumeFileActivityResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + IndexName string `protobuf:"bytes,1,opt,name=index_name,json=indexName,proto3" json:"index_name,omitempty"` + ShardName string `protobuf:"bytes,2,opt,name=shard_name,json=shardName,proto3" json:"shard_name,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ResumeFileActivityResponse) Reset() { + *x = ResumeFileActivityResponse{} + mi := &file_v1_file_replication_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ResumeFileActivityResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ResumeFileActivityResponse) ProtoMessage() {} + +func (x *ResumeFileActivityResponse) ProtoReflect() protoreflect.Message { + mi := &file_v1_file_replication_proto_msgTypes[3] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ResumeFileActivityResponse.ProtoReflect.Descriptor instead. +func (*ResumeFileActivityResponse) Descriptor() ([]byte, []int) { + return file_v1_file_replication_proto_rawDescGZIP(), []int{3} +} + +func (x *ResumeFileActivityResponse) GetIndexName() string { + if x != nil { + return x.IndexName + } + return "" +} + +func (x *ResumeFileActivityResponse) GetShardName() string { + if x != nil { + return x.ShardName + } + return "" +} + +type ListFilesRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + IndexName string `protobuf:"bytes,1,opt,name=index_name,json=indexName,proto3" json:"index_name,omitempty"` + ShardName string `protobuf:"bytes,2,opt,name=shard_name,json=shardName,proto3" json:"shard_name,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ListFilesRequest) Reset() { + *x = ListFilesRequest{} + mi := &file_v1_file_replication_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ListFilesRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListFilesRequest) ProtoMessage() {} + +func (x *ListFilesRequest) ProtoReflect() protoreflect.Message { + mi := &file_v1_file_replication_proto_msgTypes[4] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListFilesRequest.ProtoReflect.Descriptor instead. +func (*ListFilesRequest) Descriptor() ([]byte, []int) { + return file_v1_file_replication_proto_rawDescGZIP(), []int{4} +} + +func (x *ListFilesRequest) GetIndexName() string { + if x != nil { + return x.IndexName + } + return "" +} + +func (x *ListFilesRequest) GetShardName() string { + if x != nil { + return x.ShardName + } + return "" +} + +type ListFilesResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + IndexName string `protobuf:"bytes,1,opt,name=index_name,json=indexName,proto3" json:"index_name,omitempty"` + ShardName string `protobuf:"bytes,2,opt,name=shard_name,json=shardName,proto3" json:"shard_name,omitempty"` + FileNames []string `protobuf:"bytes,3,rep,name=file_names,json=fileNames,proto3" json:"file_names,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ListFilesResponse) Reset() { + *x = ListFilesResponse{} + mi := &file_v1_file_replication_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ListFilesResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListFilesResponse) ProtoMessage() {} + +func (x *ListFilesResponse) ProtoReflect() protoreflect.Message { + mi := &file_v1_file_replication_proto_msgTypes[5] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListFilesResponse.ProtoReflect.Descriptor instead. +func (*ListFilesResponse) Descriptor() ([]byte, []int) { + return file_v1_file_replication_proto_rawDescGZIP(), []int{5} +} + +func (x *ListFilesResponse) GetIndexName() string { + if x != nil { + return x.IndexName + } + return "" +} + +func (x *ListFilesResponse) GetShardName() string { + if x != nil { + return x.ShardName + } + return "" +} + +func (x *ListFilesResponse) GetFileNames() []string { + if x != nil { + return x.FileNames + } + return nil +} + +type GetFileMetadataRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + IndexName string `protobuf:"bytes,1,opt,name=index_name,json=indexName,proto3" json:"index_name,omitempty"` + ShardName string `protobuf:"bytes,2,opt,name=shard_name,json=shardName,proto3" json:"shard_name,omitempty"` + FileName string `protobuf:"bytes,3,opt,name=file_name,json=fileName,proto3" json:"file_name,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GetFileMetadataRequest) Reset() { + *x = GetFileMetadataRequest{} + mi := &file_v1_file_replication_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetFileMetadataRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetFileMetadataRequest) ProtoMessage() {} + +func (x *GetFileMetadataRequest) ProtoReflect() protoreflect.Message { + mi := &file_v1_file_replication_proto_msgTypes[6] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetFileMetadataRequest.ProtoReflect.Descriptor instead. +func (*GetFileMetadataRequest) Descriptor() ([]byte, []int) { + return file_v1_file_replication_proto_rawDescGZIP(), []int{6} +} + +func (x *GetFileMetadataRequest) GetIndexName() string { + if x != nil { + return x.IndexName + } + return "" +} + +func (x *GetFileMetadataRequest) GetShardName() string { + if x != nil { + return x.ShardName + } + return "" +} + +func (x *GetFileMetadataRequest) GetFileName() string { + if x != nil { + return x.FileName + } + return "" +} + +type FileMetadata struct { + state protoimpl.MessageState `protogen:"open.v1"` + IndexName string `protobuf:"bytes,1,opt,name=index_name,json=indexName,proto3" json:"index_name,omitempty"` + ShardName string `protobuf:"bytes,2,opt,name=shard_name,json=shardName,proto3" json:"shard_name,omitempty"` + FileName string `protobuf:"bytes,3,opt,name=file_name,json=fileName,proto3" json:"file_name,omitempty"` + Size int64 `protobuf:"varint,4,opt,name=size,proto3" json:"size,omitempty"` + Crc32 uint32 `protobuf:"varint,5,opt,name=crc32,proto3" json:"crc32,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *FileMetadata) Reset() { + *x = FileMetadata{} + mi := &file_v1_file_replication_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *FileMetadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FileMetadata) ProtoMessage() {} + +func (x *FileMetadata) ProtoReflect() protoreflect.Message { + mi := &file_v1_file_replication_proto_msgTypes[7] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FileMetadata.ProtoReflect.Descriptor instead. +func (*FileMetadata) Descriptor() ([]byte, []int) { + return file_v1_file_replication_proto_rawDescGZIP(), []int{7} +} + +func (x *FileMetadata) GetIndexName() string { + if x != nil { + return x.IndexName + } + return "" +} + +func (x *FileMetadata) GetShardName() string { + if x != nil { + return x.ShardName + } + return "" +} + +func (x *FileMetadata) GetFileName() string { + if x != nil { + return x.FileName + } + return "" +} + +func (x *FileMetadata) GetSize() int64 { + if x != nil { + return x.Size + } + return 0 +} + +func (x *FileMetadata) GetCrc32() uint32 { + if x != nil { + return x.Crc32 + } + return 0 +} + +type GetFileRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + IndexName string `protobuf:"bytes,1,opt,name=index_name,json=indexName,proto3" json:"index_name,omitempty"` + ShardName string `protobuf:"bytes,2,opt,name=shard_name,json=shardName,proto3" json:"shard_name,omitempty"` + FileName string `protobuf:"bytes,3,opt,name=file_name,json=fileName,proto3" json:"file_name,omitempty"` + Compression CompressionType `protobuf:"varint,4,opt,name=compression,proto3,enum=weaviate.v1.CompressionType" json:"compression,omitempty"` // Requested compression algorithm for streamed chunks + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GetFileRequest) Reset() { + *x = GetFileRequest{} + mi := &file_v1_file_replication_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetFileRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetFileRequest) ProtoMessage() {} + +func (x *GetFileRequest) ProtoReflect() protoreflect.Message { + mi := &file_v1_file_replication_proto_msgTypes[8] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetFileRequest.ProtoReflect.Descriptor instead. +func (*GetFileRequest) Descriptor() ([]byte, []int) { + return file_v1_file_replication_proto_rawDescGZIP(), []int{8} +} + +func (x *GetFileRequest) GetIndexName() string { + if x != nil { + return x.IndexName + } + return "" +} + +func (x *GetFileRequest) GetShardName() string { + if x != nil { + return x.ShardName + } + return "" +} + +func (x *GetFileRequest) GetFileName() string { + if x != nil { + return x.FileName + } + return "" +} + +func (x *GetFileRequest) GetCompression() CompressionType { + if x != nil { + return x.Compression + } + return CompressionType_COMPRESSION_TYPE_UNSPECIFIED +} + +type FileChunk struct { + state protoimpl.MessageState `protogen:"open.v1"` + Offset int64 `protobuf:"varint,1,opt,name=offset,proto3" json:"offset,omitempty"` // Byte offset in the uncompressed file + Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` // Compressed or raw chunk data + Eof bool `protobuf:"varint,3,opt,name=eof,proto3" json:"eof,omitempty"` // Indicates final chunk + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *FileChunk) Reset() { + *x = FileChunk{} + mi := &file_v1_file_replication_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *FileChunk) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FileChunk) ProtoMessage() {} + +func (x *FileChunk) ProtoReflect() protoreflect.Message { + mi := &file_v1_file_replication_proto_msgTypes[9] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FileChunk.ProtoReflect.Descriptor instead. +func (*FileChunk) Descriptor() ([]byte, []int) { + return file_v1_file_replication_proto_rawDescGZIP(), []int{9} +} + +func (x *FileChunk) GetOffset() int64 { + if x != nil { + return x.Offset + } + return 0 +} + +func (x *FileChunk) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +func (x *FileChunk) GetEof() bool { + if x != nil { + return x.Eof + } + return false +} + +var File_v1_file_replication_proto protoreflect.FileDescriptor + +const file_v1_file_replication_proto_rawDesc = "" + + "\n" + + "\x19v1/file_replication.proto\x12\vweaviate.v1\"\x7f\n" + + "\x18PauseFileActivityRequest\x12\x1d\n" + + "\n" + + "index_name\x18\x01 \x01(\tR\tindexName\x12\x1d\n" + + "\n" + + "shard_name\x18\x02 \x01(\tR\tshardName\x12%\n" + + "\x0eschema_version\x18\x03 \x01(\x04R\rschemaVersion\"Y\n" + + "\x19PauseFileActivityResponse\x12\x1d\n" + + "\n" + + "index_name\x18\x01 \x01(\tR\tindexName\x12\x1d\n" + + "\n" + + "shard_name\x18\x02 \x01(\tR\tshardName\"Y\n" + + "\x19ResumeFileActivityRequest\x12\x1d\n" + + "\n" + + "index_name\x18\x01 \x01(\tR\tindexName\x12\x1d\n" + + "\n" + + "shard_name\x18\x02 \x01(\tR\tshardName\"Z\n" + + "\x1aResumeFileActivityResponse\x12\x1d\n" + + "\n" + + "index_name\x18\x01 \x01(\tR\tindexName\x12\x1d\n" + + "\n" + + "shard_name\x18\x02 \x01(\tR\tshardName\"P\n" + + "\x10ListFilesRequest\x12\x1d\n" + + "\n" + + "index_name\x18\x01 \x01(\tR\tindexName\x12\x1d\n" + + "\n" + + "shard_name\x18\x02 \x01(\tR\tshardName\"p\n" + + "\x11ListFilesResponse\x12\x1d\n" + + "\n" + + "index_name\x18\x01 \x01(\tR\tindexName\x12\x1d\n" + + "\n" + + "shard_name\x18\x02 \x01(\tR\tshardName\x12\x1d\n" + + "\n" + + "file_names\x18\x03 \x03(\tR\tfileNames\"s\n" + + "\x16GetFileMetadataRequest\x12\x1d\n" + + "\n" + + "index_name\x18\x01 \x01(\tR\tindexName\x12\x1d\n" + + "\n" + + "shard_name\x18\x02 \x01(\tR\tshardName\x12\x1b\n" + + "\tfile_name\x18\x03 \x01(\tR\bfileName\"\x93\x01\n" + + "\fFileMetadata\x12\x1d\n" + + "\n" + + "index_name\x18\x01 \x01(\tR\tindexName\x12\x1d\n" + + "\n" + + "shard_name\x18\x02 \x01(\tR\tshardName\x12\x1b\n" + + "\tfile_name\x18\x03 \x01(\tR\bfileName\x12\x12\n" + + "\x04size\x18\x04 \x01(\x03R\x04size\x12\x14\n" + + "\x05crc32\x18\x05 \x01(\rR\x05crc32\"\xab\x01\n" + + "\x0eGetFileRequest\x12\x1d\n" + + "\n" + + "index_name\x18\x01 \x01(\tR\tindexName\x12\x1d\n" + + "\n" + + "shard_name\x18\x02 \x01(\tR\tshardName\x12\x1b\n" + + "\tfile_name\x18\x03 \x01(\tR\bfileName\x12>\n" + + "\vcompression\x18\x04 \x01(\x0e2\x1c.weaviate.v1.CompressionTypeR\vcompression\"I\n" + + "\tFileChunk\x12\x16\n" + + "\x06offset\x18\x01 \x01(\x03R\x06offset\x12\x12\n" + + "\x04data\x18\x02 \x01(\fR\x04data\x12\x10\n" + + "\x03eof\x18\x03 \x01(\bR\x03eof*\x87\x01\n" + + "\x0fCompressionType\x12 \n" + + "\x1cCOMPRESSION_TYPE_UNSPECIFIED\x10\x00\x12\x19\n" + + "\x15COMPRESSION_TYPE_GZIP\x10\x01\x12\x19\n" + + "\x15COMPRESSION_TYPE_ZLIB\x10\x02\x12\x1c\n" + + "\x18COMPRESSION_TYPE_DEFLATE\x10\x032\xca\x03\n" + + "\x16FileReplicationService\x12b\n" + + "\x11PauseFileActivity\x12%.weaviate.v1.PauseFileActivityRequest\x1a&.weaviate.v1.PauseFileActivityResponse\x12e\n" + + "\x12ResumeFileActivity\x12&.weaviate.v1.ResumeFileActivityRequest\x1a'.weaviate.v1.ResumeFileActivityResponse\x12J\n" + + "\tListFiles\x12\x1d.weaviate.v1.ListFilesRequest\x1a\x1e.weaviate.v1.ListFilesResponse\x12U\n" + + "\x0fGetFileMetadata\x12#.weaviate.v1.GetFileMetadataRequest\x1a\x19.weaviate.v1.FileMetadata(\x010\x01\x12B\n" + + "\aGetFile\x12\x1b.weaviate.v1.GetFileRequest\x1a\x16.weaviate.v1.FileChunk(\x010\x01Bj\n" + + "#io.weaviate.client.grpc.protocol.v1B\rWeaviateProtoZ4github.com/weaviate/weaviate/grpc/generated;protocolb\x06proto3" + +var ( + file_v1_file_replication_proto_rawDescOnce sync.Once + file_v1_file_replication_proto_rawDescData []byte +) + +func file_v1_file_replication_proto_rawDescGZIP() []byte { + file_v1_file_replication_proto_rawDescOnce.Do(func() { + file_v1_file_replication_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_v1_file_replication_proto_rawDesc), len(file_v1_file_replication_proto_rawDesc))) + }) + return file_v1_file_replication_proto_rawDescData +} + +var file_v1_file_replication_proto_enumTypes = make([]protoimpl.EnumInfo, 1) +var file_v1_file_replication_proto_msgTypes = make([]protoimpl.MessageInfo, 10) +var file_v1_file_replication_proto_goTypes = []any{ + (CompressionType)(0), // 0: weaviate.v1.CompressionType + (*PauseFileActivityRequest)(nil), // 1: weaviate.v1.PauseFileActivityRequest + (*PauseFileActivityResponse)(nil), // 2: weaviate.v1.PauseFileActivityResponse + (*ResumeFileActivityRequest)(nil), // 3: weaviate.v1.ResumeFileActivityRequest + (*ResumeFileActivityResponse)(nil), // 4: weaviate.v1.ResumeFileActivityResponse + (*ListFilesRequest)(nil), // 5: weaviate.v1.ListFilesRequest + (*ListFilesResponse)(nil), // 6: weaviate.v1.ListFilesResponse + (*GetFileMetadataRequest)(nil), // 7: weaviate.v1.GetFileMetadataRequest + (*FileMetadata)(nil), // 8: weaviate.v1.FileMetadata + (*GetFileRequest)(nil), // 9: weaviate.v1.GetFileRequest + (*FileChunk)(nil), // 10: weaviate.v1.FileChunk +} +var file_v1_file_replication_proto_depIdxs = []int32{ + 0, // 0: weaviate.v1.GetFileRequest.compression:type_name -> weaviate.v1.CompressionType + 1, // 1: weaviate.v1.FileReplicationService.PauseFileActivity:input_type -> weaviate.v1.PauseFileActivityRequest + 3, // 2: weaviate.v1.FileReplicationService.ResumeFileActivity:input_type -> weaviate.v1.ResumeFileActivityRequest + 5, // 3: weaviate.v1.FileReplicationService.ListFiles:input_type -> weaviate.v1.ListFilesRequest + 7, // 4: weaviate.v1.FileReplicationService.GetFileMetadata:input_type -> weaviate.v1.GetFileMetadataRequest + 9, // 5: weaviate.v1.FileReplicationService.GetFile:input_type -> weaviate.v1.GetFileRequest + 2, // 6: weaviate.v1.FileReplicationService.PauseFileActivity:output_type -> weaviate.v1.PauseFileActivityResponse + 4, // 7: weaviate.v1.FileReplicationService.ResumeFileActivity:output_type -> weaviate.v1.ResumeFileActivityResponse + 6, // 8: weaviate.v1.FileReplicationService.ListFiles:output_type -> weaviate.v1.ListFilesResponse + 8, // 9: weaviate.v1.FileReplicationService.GetFileMetadata:output_type -> weaviate.v1.FileMetadata + 10, // 10: weaviate.v1.FileReplicationService.GetFile:output_type -> weaviate.v1.FileChunk + 6, // [6:11] is the sub-list for method output_type + 1, // [1:6] is the sub-list for method input_type + 1, // [1:1] is the sub-list for extension type_name + 1, // [1:1] is the sub-list for extension extendee + 0, // [0:1] is the sub-list for field type_name +} + +func init() { file_v1_file_replication_proto_init() } +func file_v1_file_replication_proto_init() { + if File_v1_file_replication_proto != nil { + return + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: unsafe.Slice(unsafe.StringData(file_v1_file_replication_proto_rawDesc), len(file_v1_file_replication_proto_rawDesc)), + NumEnums: 1, + NumMessages: 10, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_v1_file_replication_proto_goTypes, + DependencyIndexes: file_v1_file_replication_proto_depIdxs, + EnumInfos: file_v1_file_replication_proto_enumTypes, + MessageInfos: file_v1_file_replication_proto_msgTypes, + }.Build() + File_v1_file_replication_proto = out.File + file_v1_file_replication_proto_goTypes = nil + file_v1_file_replication_proto_depIdxs = nil +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/file_replication_grpc.pb.go b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/file_replication_grpc.pb.go new file mode 100644 index 0000000000000000000000000000000000000000..aa5b84df5546a70168efde1f7cb4e12b21e9c7fd --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/file_replication_grpc.pb.go @@ -0,0 +1,260 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. + +package protocol + +import ( + context "context" + + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + FileReplicationService_PauseFileActivity_FullMethodName = "/weaviate.v1.FileReplicationService/PauseFileActivity" + FileReplicationService_ResumeFileActivity_FullMethodName = "/weaviate.v1.FileReplicationService/ResumeFileActivity" + FileReplicationService_ListFiles_FullMethodName = "/weaviate.v1.FileReplicationService/ListFiles" + FileReplicationService_GetFileMetadata_FullMethodName = "/weaviate.v1.FileReplicationService/GetFileMetadata" + FileReplicationService_GetFile_FullMethodName = "/weaviate.v1.FileReplicationService/GetFile" +) + +// FileReplicationServiceClient is the client API for FileReplicationService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type FileReplicationServiceClient interface { + PauseFileActivity(ctx context.Context, in *PauseFileActivityRequest, opts ...grpc.CallOption) (*PauseFileActivityResponse, error) + ResumeFileActivity(ctx context.Context, in *ResumeFileActivityRequest, opts ...grpc.CallOption) (*ResumeFileActivityResponse, error) + ListFiles(ctx context.Context, in *ListFilesRequest, opts ...grpc.CallOption) (*ListFilesResponse, error) + GetFileMetadata(ctx context.Context, opts ...grpc.CallOption) (grpc.BidiStreamingClient[GetFileMetadataRequest, FileMetadata], error) + GetFile(ctx context.Context, opts ...grpc.CallOption) (grpc.BidiStreamingClient[GetFileRequest, FileChunk], error) +} + +type fileReplicationServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewFileReplicationServiceClient(cc grpc.ClientConnInterface) FileReplicationServiceClient { + return &fileReplicationServiceClient{cc} +} + +func (c *fileReplicationServiceClient) PauseFileActivity(ctx context.Context, in *PauseFileActivityRequest, opts ...grpc.CallOption) (*PauseFileActivityResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(PauseFileActivityResponse) + err := c.cc.Invoke(ctx, FileReplicationService_PauseFileActivity_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *fileReplicationServiceClient) ResumeFileActivity(ctx context.Context, in *ResumeFileActivityRequest, opts ...grpc.CallOption) (*ResumeFileActivityResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ResumeFileActivityResponse) + err := c.cc.Invoke(ctx, FileReplicationService_ResumeFileActivity_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *fileReplicationServiceClient) ListFiles(ctx context.Context, in *ListFilesRequest, opts ...grpc.CallOption) (*ListFilesResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ListFilesResponse) + err := c.cc.Invoke(ctx, FileReplicationService_ListFiles_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *fileReplicationServiceClient) GetFileMetadata(ctx context.Context, opts ...grpc.CallOption) (grpc.BidiStreamingClient[GetFileMetadataRequest, FileMetadata], error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + stream, err := c.cc.NewStream(ctx, &FileReplicationService_ServiceDesc.Streams[0], FileReplicationService_GetFileMetadata_FullMethodName, cOpts...) + if err != nil { + return nil, err + } + x := &grpc.GenericClientStream[GetFileMetadataRequest, FileMetadata]{ClientStream: stream} + return x, nil +} + +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type FileReplicationService_GetFileMetadataClient = grpc.BidiStreamingClient[GetFileMetadataRequest, FileMetadata] + +func (c *fileReplicationServiceClient) GetFile(ctx context.Context, opts ...grpc.CallOption) (grpc.BidiStreamingClient[GetFileRequest, FileChunk], error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + stream, err := c.cc.NewStream(ctx, &FileReplicationService_ServiceDesc.Streams[1], FileReplicationService_GetFile_FullMethodName, cOpts...) + if err != nil { + return nil, err + } + x := &grpc.GenericClientStream[GetFileRequest, FileChunk]{ClientStream: stream} + return x, nil +} + +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type FileReplicationService_GetFileClient = grpc.BidiStreamingClient[GetFileRequest, FileChunk] + +// FileReplicationServiceServer is the server API for FileReplicationService service. +// All implementations must embed UnimplementedFileReplicationServiceServer +// for forward compatibility. +type FileReplicationServiceServer interface { + PauseFileActivity(context.Context, *PauseFileActivityRequest) (*PauseFileActivityResponse, error) + ResumeFileActivity(context.Context, *ResumeFileActivityRequest) (*ResumeFileActivityResponse, error) + ListFiles(context.Context, *ListFilesRequest) (*ListFilesResponse, error) + GetFileMetadata(grpc.BidiStreamingServer[GetFileMetadataRequest, FileMetadata]) error + GetFile(grpc.BidiStreamingServer[GetFileRequest, FileChunk]) error + mustEmbedUnimplementedFileReplicationServiceServer() +} + +// UnimplementedFileReplicationServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedFileReplicationServiceServer struct{} + +func (UnimplementedFileReplicationServiceServer) PauseFileActivity(context.Context, *PauseFileActivityRequest) (*PauseFileActivityResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method PauseFileActivity not implemented") +} +func (UnimplementedFileReplicationServiceServer) ResumeFileActivity(context.Context, *ResumeFileActivityRequest) (*ResumeFileActivityResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ResumeFileActivity not implemented") +} +func (UnimplementedFileReplicationServiceServer) ListFiles(context.Context, *ListFilesRequest) (*ListFilesResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListFiles not implemented") +} +func (UnimplementedFileReplicationServiceServer) GetFileMetadata(grpc.BidiStreamingServer[GetFileMetadataRequest, FileMetadata]) error { + return status.Errorf(codes.Unimplemented, "method GetFileMetadata not implemented") +} +func (UnimplementedFileReplicationServiceServer) GetFile(grpc.BidiStreamingServer[GetFileRequest, FileChunk]) error { + return status.Errorf(codes.Unimplemented, "method GetFile not implemented") +} +func (UnimplementedFileReplicationServiceServer) mustEmbedUnimplementedFileReplicationServiceServer() { +} +func (UnimplementedFileReplicationServiceServer) testEmbeddedByValue() {} + +// UnsafeFileReplicationServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to FileReplicationServiceServer will +// result in compilation errors. +type UnsafeFileReplicationServiceServer interface { + mustEmbedUnimplementedFileReplicationServiceServer() +} + +func RegisterFileReplicationServiceServer(s grpc.ServiceRegistrar, srv FileReplicationServiceServer) { + // If the following call pancis, it indicates UnimplementedFileReplicationServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&FileReplicationService_ServiceDesc, srv) +} + +func _FileReplicationService_PauseFileActivity_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PauseFileActivityRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FileReplicationServiceServer).PauseFileActivity(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: FileReplicationService_PauseFileActivity_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FileReplicationServiceServer).PauseFileActivity(ctx, req.(*PauseFileActivityRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FileReplicationService_ResumeFileActivity_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ResumeFileActivityRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FileReplicationServiceServer).ResumeFileActivity(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: FileReplicationService_ResumeFileActivity_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FileReplicationServiceServer).ResumeFileActivity(ctx, req.(*ResumeFileActivityRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FileReplicationService_ListFiles_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListFilesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FileReplicationServiceServer).ListFiles(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: FileReplicationService_ListFiles_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FileReplicationServiceServer).ListFiles(ctx, req.(*ListFilesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FileReplicationService_GetFileMetadata_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(FileReplicationServiceServer).GetFileMetadata(&grpc.GenericServerStream[GetFileMetadataRequest, FileMetadata]{ServerStream: stream}) +} + +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type FileReplicationService_GetFileMetadataServer = grpc.BidiStreamingServer[GetFileMetadataRequest, FileMetadata] + +func _FileReplicationService_GetFile_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(FileReplicationServiceServer).GetFile(&grpc.GenericServerStream[GetFileRequest, FileChunk]{ServerStream: stream}) +} + +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type FileReplicationService_GetFileServer = grpc.BidiStreamingServer[GetFileRequest, FileChunk] + +// FileReplicationService_ServiceDesc is the grpc.ServiceDesc for FileReplicationService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var FileReplicationService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "weaviate.v1.FileReplicationService", + HandlerType: (*FileReplicationServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "PauseFileActivity", + Handler: _FileReplicationService_PauseFileActivity_Handler, + }, + { + MethodName: "ResumeFileActivity", + Handler: _FileReplicationService_ResumeFileActivity_Handler, + }, + { + MethodName: "ListFiles", + Handler: _FileReplicationService_ListFiles_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "GetFileMetadata", + Handler: _FileReplicationService_GetFileMetadata_Handler, + ServerStreams: true, + ClientStreams: true, + }, + { + StreamName: "GetFile", + Handler: _FileReplicationService_GetFile_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "v1/file_replication.proto", +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/generative.pb.go b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/generative.pb.go new file mode 100644 index 0000000000000000000000000000000000000000..44d05f0bc4caf7ce26a86390c847b67303ce5ff7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/generative.pb.go @@ -0,0 +1,4193 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. + +package protocol + +import ( + reflect "reflect" + sync "sync" + unsafe "unsafe" + + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type GenerativeSearch struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Deprecated: Marked as deprecated in v1/generative.proto. + SingleResponsePrompt string `protobuf:"bytes,1,opt,name=single_response_prompt,json=singleResponsePrompt,proto3" json:"single_response_prompt,omitempty"` + // Deprecated: Marked as deprecated in v1/generative.proto. + GroupedResponseTask string `protobuf:"bytes,2,opt,name=grouped_response_task,json=groupedResponseTask,proto3" json:"grouped_response_task,omitempty"` + // Deprecated: Marked as deprecated in v1/generative.proto. + GroupedProperties []string `protobuf:"bytes,3,rep,name=grouped_properties,json=groupedProperties,proto3" json:"grouped_properties,omitempty"` + Single *GenerativeSearch_Single `protobuf:"bytes,4,opt,name=single,proto3" json:"single,omitempty"` + Grouped *GenerativeSearch_Grouped `protobuf:"bytes,5,opt,name=grouped,proto3" json:"grouped,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeSearch) Reset() { + *x = GenerativeSearch{} + mi := &file_v1_generative_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeSearch) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeSearch) ProtoMessage() {} + +func (x *GenerativeSearch) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeSearch.ProtoReflect.Descriptor instead. +func (*GenerativeSearch) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{0} +} + +// Deprecated: Marked as deprecated in v1/generative.proto. +func (x *GenerativeSearch) GetSingleResponsePrompt() string { + if x != nil { + return x.SingleResponsePrompt + } + return "" +} + +// Deprecated: Marked as deprecated in v1/generative.proto. +func (x *GenerativeSearch) GetGroupedResponseTask() string { + if x != nil { + return x.GroupedResponseTask + } + return "" +} + +// Deprecated: Marked as deprecated in v1/generative.proto. +func (x *GenerativeSearch) GetGroupedProperties() []string { + if x != nil { + return x.GroupedProperties + } + return nil +} + +func (x *GenerativeSearch) GetSingle() *GenerativeSearch_Single { + if x != nil { + return x.Single + } + return nil +} + +func (x *GenerativeSearch) GetGrouped() *GenerativeSearch_Grouped { + if x != nil { + return x.Grouped + } + return nil +} + +type GenerativeProvider struct { + state protoimpl.MessageState `protogen:"open.v1"` + ReturnMetadata bool `protobuf:"varint,1,opt,name=return_metadata,json=returnMetadata,proto3" json:"return_metadata,omitempty"` + // Types that are valid to be assigned to Kind: + // + // *GenerativeProvider_Anthropic + // *GenerativeProvider_Anyscale + // *GenerativeProvider_Aws + // *GenerativeProvider_Cohere + // *GenerativeProvider_Dummy + // *GenerativeProvider_Mistral + // *GenerativeProvider_Ollama + // *GenerativeProvider_Openai + // *GenerativeProvider_Google + // *GenerativeProvider_Databricks + // *GenerativeProvider_Friendliai + // *GenerativeProvider_Nvidia + // *GenerativeProvider_Xai + Kind isGenerativeProvider_Kind `protobuf_oneof:"kind"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeProvider) Reset() { + *x = GenerativeProvider{} + mi := &file_v1_generative_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeProvider) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeProvider) ProtoMessage() {} + +func (x *GenerativeProvider) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeProvider.ProtoReflect.Descriptor instead. +func (*GenerativeProvider) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{1} +} + +func (x *GenerativeProvider) GetReturnMetadata() bool { + if x != nil { + return x.ReturnMetadata + } + return false +} + +func (x *GenerativeProvider) GetKind() isGenerativeProvider_Kind { + if x != nil { + return x.Kind + } + return nil +} + +func (x *GenerativeProvider) GetAnthropic() *GenerativeAnthropic { + if x != nil { + if x, ok := x.Kind.(*GenerativeProvider_Anthropic); ok { + return x.Anthropic + } + } + return nil +} + +func (x *GenerativeProvider) GetAnyscale() *GenerativeAnyscale { + if x != nil { + if x, ok := x.Kind.(*GenerativeProvider_Anyscale); ok { + return x.Anyscale + } + } + return nil +} + +func (x *GenerativeProvider) GetAws() *GenerativeAWS { + if x != nil { + if x, ok := x.Kind.(*GenerativeProvider_Aws); ok { + return x.Aws + } + } + return nil +} + +func (x *GenerativeProvider) GetCohere() *GenerativeCohere { + if x != nil { + if x, ok := x.Kind.(*GenerativeProvider_Cohere); ok { + return x.Cohere + } + } + return nil +} + +func (x *GenerativeProvider) GetDummy() *GenerativeDummy { + if x != nil { + if x, ok := x.Kind.(*GenerativeProvider_Dummy); ok { + return x.Dummy + } + } + return nil +} + +func (x *GenerativeProvider) GetMistral() *GenerativeMistral { + if x != nil { + if x, ok := x.Kind.(*GenerativeProvider_Mistral); ok { + return x.Mistral + } + } + return nil +} + +func (x *GenerativeProvider) GetOllama() *GenerativeOllama { + if x != nil { + if x, ok := x.Kind.(*GenerativeProvider_Ollama); ok { + return x.Ollama + } + } + return nil +} + +func (x *GenerativeProvider) GetOpenai() *GenerativeOpenAI { + if x != nil { + if x, ok := x.Kind.(*GenerativeProvider_Openai); ok { + return x.Openai + } + } + return nil +} + +func (x *GenerativeProvider) GetGoogle() *GenerativeGoogle { + if x != nil { + if x, ok := x.Kind.(*GenerativeProvider_Google); ok { + return x.Google + } + } + return nil +} + +func (x *GenerativeProvider) GetDatabricks() *GenerativeDatabricks { + if x != nil { + if x, ok := x.Kind.(*GenerativeProvider_Databricks); ok { + return x.Databricks + } + } + return nil +} + +func (x *GenerativeProvider) GetFriendliai() *GenerativeFriendliAI { + if x != nil { + if x, ok := x.Kind.(*GenerativeProvider_Friendliai); ok { + return x.Friendliai + } + } + return nil +} + +func (x *GenerativeProvider) GetNvidia() *GenerativeNvidia { + if x != nil { + if x, ok := x.Kind.(*GenerativeProvider_Nvidia); ok { + return x.Nvidia + } + } + return nil +} + +func (x *GenerativeProvider) GetXai() *GenerativeXAI { + if x != nil { + if x, ok := x.Kind.(*GenerativeProvider_Xai); ok { + return x.Xai + } + } + return nil +} + +type isGenerativeProvider_Kind interface { + isGenerativeProvider_Kind() +} + +type GenerativeProvider_Anthropic struct { + Anthropic *GenerativeAnthropic `protobuf:"bytes,2,opt,name=anthropic,proto3,oneof"` +} + +type GenerativeProvider_Anyscale struct { + Anyscale *GenerativeAnyscale `protobuf:"bytes,3,opt,name=anyscale,proto3,oneof"` +} + +type GenerativeProvider_Aws struct { + Aws *GenerativeAWS `protobuf:"bytes,4,opt,name=aws,proto3,oneof"` +} + +type GenerativeProvider_Cohere struct { + Cohere *GenerativeCohere `protobuf:"bytes,5,opt,name=cohere,proto3,oneof"` +} + +type GenerativeProvider_Dummy struct { + Dummy *GenerativeDummy `protobuf:"bytes,6,opt,name=dummy,proto3,oneof"` +} + +type GenerativeProvider_Mistral struct { + Mistral *GenerativeMistral `protobuf:"bytes,7,opt,name=mistral,proto3,oneof"` +} + +type GenerativeProvider_Ollama struct { + Ollama *GenerativeOllama `protobuf:"bytes,8,opt,name=ollama,proto3,oneof"` +} + +type GenerativeProvider_Openai struct { + Openai *GenerativeOpenAI `protobuf:"bytes,9,opt,name=openai,proto3,oneof"` +} + +type GenerativeProvider_Google struct { + Google *GenerativeGoogle `protobuf:"bytes,10,opt,name=google,proto3,oneof"` +} + +type GenerativeProvider_Databricks struct { + Databricks *GenerativeDatabricks `protobuf:"bytes,11,opt,name=databricks,proto3,oneof"` +} + +type GenerativeProvider_Friendliai struct { + Friendliai *GenerativeFriendliAI `protobuf:"bytes,12,opt,name=friendliai,proto3,oneof"` +} + +type GenerativeProvider_Nvidia struct { + Nvidia *GenerativeNvidia `protobuf:"bytes,13,opt,name=nvidia,proto3,oneof"` +} + +type GenerativeProvider_Xai struct { + Xai *GenerativeXAI `protobuf:"bytes,14,opt,name=xai,proto3,oneof"` +} + +func (*GenerativeProvider_Anthropic) isGenerativeProvider_Kind() {} + +func (*GenerativeProvider_Anyscale) isGenerativeProvider_Kind() {} + +func (*GenerativeProvider_Aws) isGenerativeProvider_Kind() {} + +func (*GenerativeProvider_Cohere) isGenerativeProvider_Kind() {} + +func (*GenerativeProvider_Dummy) isGenerativeProvider_Kind() {} + +func (*GenerativeProvider_Mistral) isGenerativeProvider_Kind() {} + +func (*GenerativeProvider_Ollama) isGenerativeProvider_Kind() {} + +func (*GenerativeProvider_Openai) isGenerativeProvider_Kind() {} + +func (*GenerativeProvider_Google) isGenerativeProvider_Kind() {} + +func (*GenerativeProvider_Databricks) isGenerativeProvider_Kind() {} + +func (*GenerativeProvider_Friendliai) isGenerativeProvider_Kind() {} + +func (*GenerativeProvider_Nvidia) isGenerativeProvider_Kind() {} + +func (*GenerativeProvider_Xai) isGenerativeProvider_Kind() {} + +type GenerativeAnthropic struct { + state protoimpl.MessageState `protogen:"open.v1"` + BaseUrl *string `protobuf:"bytes,1,opt,name=base_url,json=baseUrl,proto3,oneof" json:"base_url,omitempty"` + MaxTokens *int64 `protobuf:"varint,2,opt,name=max_tokens,json=maxTokens,proto3,oneof" json:"max_tokens,omitempty"` + Model *string `protobuf:"bytes,3,opt,name=model,proto3,oneof" json:"model,omitempty"` + Temperature *float64 `protobuf:"fixed64,4,opt,name=temperature,proto3,oneof" json:"temperature,omitempty"` + TopK *int64 `protobuf:"varint,5,opt,name=top_k,json=topK,proto3,oneof" json:"top_k,omitempty"` + TopP *float64 `protobuf:"fixed64,6,opt,name=top_p,json=topP,proto3,oneof" json:"top_p,omitempty"` + StopSequences *TextArray `protobuf:"bytes,7,opt,name=stop_sequences,json=stopSequences,proto3,oneof" json:"stop_sequences,omitempty"` + Images *TextArray `protobuf:"bytes,8,opt,name=images,proto3,oneof" json:"images,omitempty"` + ImageProperties *TextArray `protobuf:"bytes,9,opt,name=image_properties,json=imageProperties,proto3,oneof" json:"image_properties,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeAnthropic) Reset() { + *x = GenerativeAnthropic{} + mi := &file_v1_generative_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeAnthropic) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeAnthropic) ProtoMessage() {} + +func (x *GenerativeAnthropic) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[2] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeAnthropic.ProtoReflect.Descriptor instead. +func (*GenerativeAnthropic) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{2} +} + +func (x *GenerativeAnthropic) GetBaseUrl() string { + if x != nil && x.BaseUrl != nil { + return *x.BaseUrl + } + return "" +} + +func (x *GenerativeAnthropic) GetMaxTokens() int64 { + if x != nil && x.MaxTokens != nil { + return *x.MaxTokens + } + return 0 +} + +func (x *GenerativeAnthropic) GetModel() string { + if x != nil && x.Model != nil { + return *x.Model + } + return "" +} + +func (x *GenerativeAnthropic) GetTemperature() float64 { + if x != nil && x.Temperature != nil { + return *x.Temperature + } + return 0 +} + +func (x *GenerativeAnthropic) GetTopK() int64 { + if x != nil && x.TopK != nil { + return *x.TopK + } + return 0 +} + +func (x *GenerativeAnthropic) GetTopP() float64 { + if x != nil && x.TopP != nil { + return *x.TopP + } + return 0 +} + +func (x *GenerativeAnthropic) GetStopSequences() *TextArray { + if x != nil { + return x.StopSequences + } + return nil +} + +func (x *GenerativeAnthropic) GetImages() *TextArray { + if x != nil { + return x.Images + } + return nil +} + +func (x *GenerativeAnthropic) GetImageProperties() *TextArray { + if x != nil { + return x.ImageProperties + } + return nil +} + +type GenerativeAnyscale struct { + state protoimpl.MessageState `protogen:"open.v1"` + BaseUrl *string `protobuf:"bytes,1,opt,name=base_url,json=baseUrl,proto3,oneof" json:"base_url,omitempty"` + Model *string `protobuf:"bytes,2,opt,name=model,proto3,oneof" json:"model,omitempty"` + Temperature *float64 `protobuf:"fixed64,3,opt,name=temperature,proto3,oneof" json:"temperature,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeAnyscale) Reset() { + *x = GenerativeAnyscale{} + mi := &file_v1_generative_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeAnyscale) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeAnyscale) ProtoMessage() {} + +func (x *GenerativeAnyscale) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[3] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeAnyscale.ProtoReflect.Descriptor instead. +func (*GenerativeAnyscale) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{3} +} + +func (x *GenerativeAnyscale) GetBaseUrl() string { + if x != nil && x.BaseUrl != nil { + return *x.BaseUrl + } + return "" +} + +func (x *GenerativeAnyscale) GetModel() string { + if x != nil && x.Model != nil { + return *x.Model + } + return "" +} + +func (x *GenerativeAnyscale) GetTemperature() float64 { + if x != nil && x.Temperature != nil { + return *x.Temperature + } + return 0 +} + +type GenerativeAWS struct { + state protoimpl.MessageState `protogen:"open.v1"` + Model *string `protobuf:"bytes,3,opt,name=model,proto3,oneof" json:"model,omitempty"` + Temperature *float64 `protobuf:"fixed64,8,opt,name=temperature,proto3,oneof" json:"temperature,omitempty"` + Service *string `protobuf:"bytes,9,opt,name=service,proto3,oneof" json:"service,omitempty"` + Region *string `protobuf:"bytes,10,opt,name=region,proto3,oneof" json:"region,omitempty"` + Endpoint *string `protobuf:"bytes,11,opt,name=endpoint,proto3,oneof" json:"endpoint,omitempty"` + TargetModel *string `protobuf:"bytes,12,opt,name=target_model,json=targetModel,proto3,oneof" json:"target_model,omitempty"` + TargetVariant *string `protobuf:"bytes,13,opt,name=target_variant,json=targetVariant,proto3,oneof" json:"target_variant,omitempty"` + Images *TextArray `protobuf:"bytes,14,opt,name=images,proto3,oneof" json:"images,omitempty"` + ImageProperties *TextArray `protobuf:"bytes,15,opt,name=image_properties,json=imageProperties,proto3,oneof" json:"image_properties,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeAWS) Reset() { + *x = GenerativeAWS{} + mi := &file_v1_generative_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeAWS) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeAWS) ProtoMessage() {} + +func (x *GenerativeAWS) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[4] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeAWS.ProtoReflect.Descriptor instead. +func (*GenerativeAWS) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{4} +} + +func (x *GenerativeAWS) GetModel() string { + if x != nil && x.Model != nil { + return *x.Model + } + return "" +} + +func (x *GenerativeAWS) GetTemperature() float64 { + if x != nil && x.Temperature != nil { + return *x.Temperature + } + return 0 +} + +func (x *GenerativeAWS) GetService() string { + if x != nil && x.Service != nil { + return *x.Service + } + return "" +} + +func (x *GenerativeAWS) GetRegion() string { + if x != nil && x.Region != nil { + return *x.Region + } + return "" +} + +func (x *GenerativeAWS) GetEndpoint() string { + if x != nil && x.Endpoint != nil { + return *x.Endpoint + } + return "" +} + +func (x *GenerativeAWS) GetTargetModel() string { + if x != nil && x.TargetModel != nil { + return *x.TargetModel + } + return "" +} + +func (x *GenerativeAWS) GetTargetVariant() string { + if x != nil && x.TargetVariant != nil { + return *x.TargetVariant + } + return "" +} + +func (x *GenerativeAWS) GetImages() *TextArray { + if x != nil { + return x.Images + } + return nil +} + +func (x *GenerativeAWS) GetImageProperties() *TextArray { + if x != nil { + return x.ImageProperties + } + return nil +} + +type GenerativeCohere struct { + state protoimpl.MessageState `protogen:"open.v1"` + BaseUrl *string `protobuf:"bytes,1,opt,name=base_url,json=baseUrl,proto3,oneof" json:"base_url,omitempty"` + FrequencyPenalty *float64 `protobuf:"fixed64,2,opt,name=frequency_penalty,json=frequencyPenalty,proto3,oneof" json:"frequency_penalty,omitempty"` + MaxTokens *int64 `protobuf:"varint,3,opt,name=max_tokens,json=maxTokens,proto3,oneof" json:"max_tokens,omitempty"` + Model *string `protobuf:"bytes,4,opt,name=model,proto3,oneof" json:"model,omitempty"` + K *int64 `protobuf:"varint,5,opt,name=k,proto3,oneof" json:"k,omitempty"` + P *float64 `protobuf:"fixed64,6,opt,name=p,proto3,oneof" json:"p,omitempty"` + PresencePenalty *float64 `protobuf:"fixed64,7,opt,name=presence_penalty,json=presencePenalty,proto3,oneof" json:"presence_penalty,omitempty"` + StopSequences *TextArray `protobuf:"bytes,8,opt,name=stop_sequences,json=stopSequences,proto3,oneof" json:"stop_sequences,omitempty"` + Temperature *float64 `protobuf:"fixed64,9,opt,name=temperature,proto3,oneof" json:"temperature,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeCohere) Reset() { + *x = GenerativeCohere{} + mi := &file_v1_generative_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeCohere) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeCohere) ProtoMessage() {} + +func (x *GenerativeCohere) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[5] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeCohere.ProtoReflect.Descriptor instead. +func (*GenerativeCohere) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{5} +} + +func (x *GenerativeCohere) GetBaseUrl() string { + if x != nil && x.BaseUrl != nil { + return *x.BaseUrl + } + return "" +} + +func (x *GenerativeCohere) GetFrequencyPenalty() float64 { + if x != nil && x.FrequencyPenalty != nil { + return *x.FrequencyPenalty + } + return 0 +} + +func (x *GenerativeCohere) GetMaxTokens() int64 { + if x != nil && x.MaxTokens != nil { + return *x.MaxTokens + } + return 0 +} + +func (x *GenerativeCohere) GetModel() string { + if x != nil && x.Model != nil { + return *x.Model + } + return "" +} + +func (x *GenerativeCohere) GetK() int64 { + if x != nil && x.K != nil { + return *x.K + } + return 0 +} + +func (x *GenerativeCohere) GetP() float64 { + if x != nil && x.P != nil { + return *x.P + } + return 0 +} + +func (x *GenerativeCohere) GetPresencePenalty() float64 { + if x != nil && x.PresencePenalty != nil { + return *x.PresencePenalty + } + return 0 +} + +func (x *GenerativeCohere) GetStopSequences() *TextArray { + if x != nil { + return x.StopSequences + } + return nil +} + +func (x *GenerativeCohere) GetTemperature() float64 { + if x != nil && x.Temperature != nil { + return *x.Temperature + } + return 0 +} + +type GenerativeDummy struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeDummy) Reset() { + *x = GenerativeDummy{} + mi := &file_v1_generative_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeDummy) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeDummy) ProtoMessage() {} + +func (x *GenerativeDummy) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[6] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeDummy.ProtoReflect.Descriptor instead. +func (*GenerativeDummy) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{6} +} + +type GenerativeMistral struct { + state protoimpl.MessageState `protogen:"open.v1"` + BaseUrl *string `protobuf:"bytes,1,opt,name=base_url,json=baseUrl,proto3,oneof" json:"base_url,omitempty"` + MaxTokens *int64 `protobuf:"varint,2,opt,name=max_tokens,json=maxTokens,proto3,oneof" json:"max_tokens,omitempty"` + Model *string `protobuf:"bytes,3,opt,name=model,proto3,oneof" json:"model,omitempty"` + Temperature *float64 `protobuf:"fixed64,4,opt,name=temperature,proto3,oneof" json:"temperature,omitempty"` + TopP *float64 `protobuf:"fixed64,5,opt,name=top_p,json=topP,proto3,oneof" json:"top_p,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeMistral) Reset() { + *x = GenerativeMistral{} + mi := &file_v1_generative_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeMistral) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeMistral) ProtoMessage() {} + +func (x *GenerativeMistral) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[7] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeMistral.ProtoReflect.Descriptor instead. +func (*GenerativeMistral) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{7} +} + +func (x *GenerativeMistral) GetBaseUrl() string { + if x != nil && x.BaseUrl != nil { + return *x.BaseUrl + } + return "" +} + +func (x *GenerativeMistral) GetMaxTokens() int64 { + if x != nil && x.MaxTokens != nil { + return *x.MaxTokens + } + return 0 +} + +func (x *GenerativeMistral) GetModel() string { + if x != nil && x.Model != nil { + return *x.Model + } + return "" +} + +func (x *GenerativeMistral) GetTemperature() float64 { + if x != nil && x.Temperature != nil { + return *x.Temperature + } + return 0 +} + +func (x *GenerativeMistral) GetTopP() float64 { + if x != nil && x.TopP != nil { + return *x.TopP + } + return 0 +} + +type GenerativeOllama struct { + state protoimpl.MessageState `protogen:"open.v1"` + ApiEndpoint *string `protobuf:"bytes,1,opt,name=api_endpoint,json=apiEndpoint,proto3,oneof" json:"api_endpoint,omitempty"` + Model *string `protobuf:"bytes,2,opt,name=model,proto3,oneof" json:"model,omitempty"` + Temperature *float64 `protobuf:"fixed64,3,opt,name=temperature,proto3,oneof" json:"temperature,omitempty"` + Images *TextArray `protobuf:"bytes,4,opt,name=images,proto3,oneof" json:"images,omitempty"` + ImageProperties *TextArray `protobuf:"bytes,5,opt,name=image_properties,json=imageProperties,proto3,oneof" json:"image_properties,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeOllama) Reset() { + *x = GenerativeOllama{} + mi := &file_v1_generative_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeOllama) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeOllama) ProtoMessage() {} + +func (x *GenerativeOllama) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[8] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeOllama.ProtoReflect.Descriptor instead. +func (*GenerativeOllama) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{8} +} + +func (x *GenerativeOllama) GetApiEndpoint() string { + if x != nil && x.ApiEndpoint != nil { + return *x.ApiEndpoint + } + return "" +} + +func (x *GenerativeOllama) GetModel() string { + if x != nil && x.Model != nil { + return *x.Model + } + return "" +} + +func (x *GenerativeOllama) GetTemperature() float64 { + if x != nil && x.Temperature != nil { + return *x.Temperature + } + return 0 +} + +func (x *GenerativeOllama) GetImages() *TextArray { + if x != nil { + return x.Images + } + return nil +} + +func (x *GenerativeOllama) GetImageProperties() *TextArray { + if x != nil { + return x.ImageProperties + } + return nil +} + +type GenerativeOpenAI struct { + state protoimpl.MessageState `protogen:"open.v1"` + FrequencyPenalty *float64 `protobuf:"fixed64,1,opt,name=frequency_penalty,json=frequencyPenalty,proto3,oneof" json:"frequency_penalty,omitempty"` + MaxTokens *int64 `protobuf:"varint,2,opt,name=max_tokens,json=maxTokens,proto3,oneof" json:"max_tokens,omitempty"` + Model *string `protobuf:"bytes,3,opt,name=model,proto3,oneof" json:"model,omitempty"` + N *int64 `protobuf:"varint,4,opt,name=n,proto3,oneof" json:"n,omitempty"` + PresencePenalty *float64 `protobuf:"fixed64,5,opt,name=presence_penalty,json=presencePenalty,proto3,oneof" json:"presence_penalty,omitempty"` + Stop *TextArray `protobuf:"bytes,6,opt,name=stop,proto3,oneof" json:"stop,omitempty"` + Temperature *float64 `protobuf:"fixed64,7,opt,name=temperature,proto3,oneof" json:"temperature,omitempty"` + TopP *float64 `protobuf:"fixed64,8,opt,name=top_p,json=topP,proto3,oneof" json:"top_p,omitempty"` + BaseUrl *string `protobuf:"bytes,9,opt,name=base_url,json=baseUrl,proto3,oneof" json:"base_url,omitempty"` + ApiVersion *string `protobuf:"bytes,10,opt,name=api_version,json=apiVersion,proto3,oneof" json:"api_version,omitempty"` + ResourceName *string `protobuf:"bytes,11,opt,name=resource_name,json=resourceName,proto3,oneof" json:"resource_name,omitempty"` + DeploymentId *string `protobuf:"bytes,12,opt,name=deployment_id,json=deploymentId,proto3,oneof" json:"deployment_id,omitempty"` + IsAzure *bool `protobuf:"varint,13,opt,name=is_azure,json=isAzure,proto3,oneof" json:"is_azure,omitempty"` + Images *TextArray `protobuf:"bytes,14,opt,name=images,proto3,oneof" json:"images,omitempty"` + ImageProperties *TextArray `protobuf:"bytes,15,opt,name=image_properties,json=imageProperties,proto3,oneof" json:"image_properties,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeOpenAI) Reset() { + *x = GenerativeOpenAI{} + mi := &file_v1_generative_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeOpenAI) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeOpenAI) ProtoMessage() {} + +func (x *GenerativeOpenAI) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[9] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeOpenAI.ProtoReflect.Descriptor instead. +func (*GenerativeOpenAI) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{9} +} + +func (x *GenerativeOpenAI) GetFrequencyPenalty() float64 { + if x != nil && x.FrequencyPenalty != nil { + return *x.FrequencyPenalty + } + return 0 +} + +func (x *GenerativeOpenAI) GetMaxTokens() int64 { + if x != nil && x.MaxTokens != nil { + return *x.MaxTokens + } + return 0 +} + +func (x *GenerativeOpenAI) GetModel() string { + if x != nil && x.Model != nil { + return *x.Model + } + return "" +} + +func (x *GenerativeOpenAI) GetN() int64 { + if x != nil && x.N != nil { + return *x.N + } + return 0 +} + +func (x *GenerativeOpenAI) GetPresencePenalty() float64 { + if x != nil && x.PresencePenalty != nil { + return *x.PresencePenalty + } + return 0 +} + +func (x *GenerativeOpenAI) GetStop() *TextArray { + if x != nil { + return x.Stop + } + return nil +} + +func (x *GenerativeOpenAI) GetTemperature() float64 { + if x != nil && x.Temperature != nil { + return *x.Temperature + } + return 0 +} + +func (x *GenerativeOpenAI) GetTopP() float64 { + if x != nil && x.TopP != nil { + return *x.TopP + } + return 0 +} + +func (x *GenerativeOpenAI) GetBaseUrl() string { + if x != nil && x.BaseUrl != nil { + return *x.BaseUrl + } + return "" +} + +func (x *GenerativeOpenAI) GetApiVersion() string { + if x != nil && x.ApiVersion != nil { + return *x.ApiVersion + } + return "" +} + +func (x *GenerativeOpenAI) GetResourceName() string { + if x != nil && x.ResourceName != nil { + return *x.ResourceName + } + return "" +} + +func (x *GenerativeOpenAI) GetDeploymentId() string { + if x != nil && x.DeploymentId != nil { + return *x.DeploymentId + } + return "" +} + +func (x *GenerativeOpenAI) GetIsAzure() bool { + if x != nil && x.IsAzure != nil { + return *x.IsAzure + } + return false +} + +func (x *GenerativeOpenAI) GetImages() *TextArray { + if x != nil { + return x.Images + } + return nil +} + +func (x *GenerativeOpenAI) GetImageProperties() *TextArray { + if x != nil { + return x.ImageProperties + } + return nil +} + +type GenerativeGoogle struct { + state protoimpl.MessageState `protogen:"open.v1"` + FrequencyPenalty *float64 `protobuf:"fixed64,1,opt,name=frequency_penalty,json=frequencyPenalty,proto3,oneof" json:"frequency_penalty,omitempty"` + MaxTokens *int64 `protobuf:"varint,2,opt,name=max_tokens,json=maxTokens,proto3,oneof" json:"max_tokens,omitempty"` + Model *string `protobuf:"bytes,3,opt,name=model,proto3,oneof" json:"model,omitempty"` + PresencePenalty *float64 `protobuf:"fixed64,4,opt,name=presence_penalty,json=presencePenalty,proto3,oneof" json:"presence_penalty,omitempty"` + Temperature *float64 `protobuf:"fixed64,5,opt,name=temperature,proto3,oneof" json:"temperature,omitempty"` + TopK *int64 `protobuf:"varint,6,opt,name=top_k,json=topK,proto3,oneof" json:"top_k,omitempty"` + TopP *float64 `protobuf:"fixed64,7,opt,name=top_p,json=topP,proto3,oneof" json:"top_p,omitempty"` + StopSequences *TextArray `protobuf:"bytes,8,opt,name=stop_sequences,json=stopSequences,proto3,oneof" json:"stop_sequences,omitempty"` + ApiEndpoint *string `protobuf:"bytes,9,opt,name=api_endpoint,json=apiEndpoint,proto3,oneof" json:"api_endpoint,omitempty"` + ProjectId *string `protobuf:"bytes,10,opt,name=project_id,json=projectId,proto3,oneof" json:"project_id,omitempty"` + EndpointId *string `protobuf:"bytes,11,opt,name=endpoint_id,json=endpointId,proto3,oneof" json:"endpoint_id,omitempty"` + Region *string `protobuf:"bytes,12,opt,name=region,proto3,oneof" json:"region,omitempty"` + Images *TextArray `protobuf:"bytes,13,opt,name=images,proto3,oneof" json:"images,omitempty"` + ImageProperties *TextArray `protobuf:"bytes,14,opt,name=image_properties,json=imageProperties,proto3,oneof" json:"image_properties,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeGoogle) Reset() { + *x = GenerativeGoogle{} + mi := &file_v1_generative_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeGoogle) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeGoogle) ProtoMessage() {} + +func (x *GenerativeGoogle) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[10] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeGoogle.ProtoReflect.Descriptor instead. +func (*GenerativeGoogle) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{10} +} + +func (x *GenerativeGoogle) GetFrequencyPenalty() float64 { + if x != nil && x.FrequencyPenalty != nil { + return *x.FrequencyPenalty + } + return 0 +} + +func (x *GenerativeGoogle) GetMaxTokens() int64 { + if x != nil && x.MaxTokens != nil { + return *x.MaxTokens + } + return 0 +} + +func (x *GenerativeGoogle) GetModel() string { + if x != nil && x.Model != nil { + return *x.Model + } + return "" +} + +func (x *GenerativeGoogle) GetPresencePenalty() float64 { + if x != nil && x.PresencePenalty != nil { + return *x.PresencePenalty + } + return 0 +} + +func (x *GenerativeGoogle) GetTemperature() float64 { + if x != nil && x.Temperature != nil { + return *x.Temperature + } + return 0 +} + +func (x *GenerativeGoogle) GetTopK() int64 { + if x != nil && x.TopK != nil { + return *x.TopK + } + return 0 +} + +func (x *GenerativeGoogle) GetTopP() float64 { + if x != nil && x.TopP != nil { + return *x.TopP + } + return 0 +} + +func (x *GenerativeGoogle) GetStopSequences() *TextArray { + if x != nil { + return x.StopSequences + } + return nil +} + +func (x *GenerativeGoogle) GetApiEndpoint() string { + if x != nil && x.ApiEndpoint != nil { + return *x.ApiEndpoint + } + return "" +} + +func (x *GenerativeGoogle) GetProjectId() string { + if x != nil && x.ProjectId != nil { + return *x.ProjectId + } + return "" +} + +func (x *GenerativeGoogle) GetEndpointId() string { + if x != nil && x.EndpointId != nil { + return *x.EndpointId + } + return "" +} + +func (x *GenerativeGoogle) GetRegion() string { + if x != nil && x.Region != nil { + return *x.Region + } + return "" +} + +func (x *GenerativeGoogle) GetImages() *TextArray { + if x != nil { + return x.Images + } + return nil +} + +func (x *GenerativeGoogle) GetImageProperties() *TextArray { + if x != nil { + return x.ImageProperties + } + return nil +} + +type GenerativeDatabricks struct { + state protoimpl.MessageState `protogen:"open.v1"` + Endpoint *string `protobuf:"bytes,1,opt,name=endpoint,proto3,oneof" json:"endpoint,omitempty"` + Model *string `protobuf:"bytes,2,opt,name=model,proto3,oneof" json:"model,omitempty"` + FrequencyPenalty *float64 `protobuf:"fixed64,3,opt,name=frequency_penalty,json=frequencyPenalty,proto3,oneof" json:"frequency_penalty,omitempty"` + LogProbs *bool `protobuf:"varint,4,opt,name=log_probs,json=logProbs,proto3,oneof" json:"log_probs,omitempty"` + TopLogProbs *int64 `protobuf:"varint,5,opt,name=top_log_probs,json=topLogProbs,proto3,oneof" json:"top_log_probs,omitempty"` + MaxTokens *int64 `protobuf:"varint,6,opt,name=max_tokens,json=maxTokens,proto3,oneof" json:"max_tokens,omitempty"` + N *int64 `protobuf:"varint,7,opt,name=n,proto3,oneof" json:"n,omitempty"` + PresencePenalty *float64 `protobuf:"fixed64,8,opt,name=presence_penalty,json=presencePenalty,proto3,oneof" json:"presence_penalty,omitempty"` + Stop *TextArray `protobuf:"bytes,9,opt,name=stop,proto3,oneof" json:"stop,omitempty"` + Temperature *float64 `protobuf:"fixed64,10,opt,name=temperature,proto3,oneof" json:"temperature,omitempty"` + TopP *float64 `protobuf:"fixed64,11,opt,name=top_p,json=topP,proto3,oneof" json:"top_p,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeDatabricks) Reset() { + *x = GenerativeDatabricks{} + mi := &file_v1_generative_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeDatabricks) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeDatabricks) ProtoMessage() {} + +func (x *GenerativeDatabricks) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[11] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeDatabricks.ProtoReflect.Descriptor instead. +func (*GenerativeDatabricks) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{11} +} + +func (x *GenerativeDatabricks) GetEndpoint() string { + if x != nil && x.Endpoint != nil { + return *x.Endpoint + } + return "" +} + +func (x *GenerativeDatabricks) GetModel() string { + if x != nil && x.Model != nil { + return *x.Model + } + return "" +} + +func (x *GenerativeDatabricks) GetFrequencyPenalty() float64 { + if x != nil && x.FrequencyPenalty != nil { + return *x.FrequencyPenalty + } + return 0 +} + +func (x *GenerativeDatabricks) GetLogProbs() bool { + if x != nil && x.LogProbs != nil { + return *x.LogProbs + } + return false +} + +func (x *GenerativeDatabricks) GetTopLogProbs() int64 { + if x != nil && x.TopLogProbs != nil { + return *x.TopLogProbs + } + return 0 +} + +func (x *GenerativeDatabricks) GetMaxTokens() int64 { + if x != nil && x.MaxTokens != nil { + return *x.MaxTokens + } + return 0 +} + +func (x *GenerativeDatabricks) GetN() int64 { + if x != nil && x.N != nil { + return *x.N + } + return 0 +} + +func (x *GenerativeDatabricks) GetPresencePenalty() float64 { + if x != nil && x.PresencePenalty != nil { + return *x.PresencePenalty + } + return 0 +} + +func (x *GenerativeDatabricks) GetStop() *TextArray { + if x != nil { + return x.Stop + } + return nil +} + +func (x *GenerativeDatabricks) GetTemperature() float64 { + if x != nil && x.Temperature != nil { + return *x.Temperature + } + return 0 +} + +func (x *GenerativeDatabricks) GetTopP() float64 { + if x != nil && x.TopP != nil { + return *x.TopP + } + return 0 +} + +type GenerativeFriendliAI struct { + state protoimpl.MessageState `protogen:"open.v1"` + BaseUrl *string `protobuf:"bytes,1,opt,name=base_url,json=baseUrl,proto3,oneof" json:"base_url,omitempty"` + Model *string `protobuf:"bytes,2,opt,name=model,proto3,oneof" json:"model,omitempty"` + MaxTokens *int64 `protobuf:"varint,3,opt,name=max_tokens,json=maxTokens,proto3,oneof" json:"max_tokens,omitempty"` + Temperature *float64 `protobuf:"fixed64,4,opt,name=temperature,proto3,oneof" json:"temperature,omitempty"` + N *int64 `protobuf:"varint,5,opt,name=n,proto3,oneof" json:"n,omitempty"` + TopP *float64 `protobuf:"fixed64,6,opt,name=top_p,json=topP,proto3,oneof" json:"top_p,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeFriendliAI) Reset() { + *x = GenerativeFriendliAI{} + mi := &file_v1_generative_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeFriendliAI) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeFriendliAI) ProtoMessage() {} + +func (x *GenerativeFriendliAI) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[12] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeFriendliAI.ProtoReflect.Descriptor instead. +func (*GenerativeFriendliAI) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{12} +} + +func (x *GenerativeFriendliAI) GetBaseUrl() string { + if x != nil && x.BaseUrl != nil { + return *x.BaseUrl + } + return "" +} + +func (x *GenerativeFriendliAI) GetModel() string { + if x != nil && x.Model != nil { + return *x.Model + } + return "" +} + +func (x *GenerativeFriendliAI) GetMaxTokens() int64 { + if x != nil && x.MaxTokens != nil { + return *x.MaxTokens + } + return 0 +} + +func (x *GenerativeFriendliAI) GetTemperature() float64 { + if x != nil && x.Temperature != nil { + return *x.Temperature + } + return 0 +} + +func (x *GenerativeFriendliAI) GetN() int64 { + if x != nil && x.N != nil { + return *x.N + } + return 0 +} + +func (x *GenerativeFriendliAI) GetTopP() float64 { + if x != nil && x.TopP != nil { + return *x.TopP + } + return 0 +} + +type GenerativeNvidia struct { + state protoimpl.MessageState `protogen:"open.v1"` + BaseUrl *string `protobuf:"bytes,1,opt,name=base_url,json=baseUrl,proto3,oneof" json:"base_url,omitempty"` + Model *string `protobuf:"bytes,2,opt,name=model,proto3,oneof" json:"model,omitempty"` + Temperature *float64 `protobuf:"fixed64,3,opt,name=temperature,proto3,oneof" json:"temperature,omitempty"` + TopP *float64 `protobuf:"fixed64,4,opt,name=top_p,json=topP,proto3,oneof" json:"top_p,omitempty"` + MaxTokens *int64 `protobuf:"varint,5,opt,name=max_tokens,json=maxTokens,proto3,oneof" json:"max_tokens,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeNvidia) Reset() { + *x = GenerativeNvidia{} + mi := &file_v1_generative_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeNvidia) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeNvidia) ProtoMessage() {} + +func (x *GenerativeNvidia) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[13] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeNvidia.ProtoReflect.Descriptor instead. +func (*GenerativeNvidia) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{13} +} + +func (x *GenerativeNvidia) GetBaseUrl() string { + if x != nil && x.BaseUrl != nil { + return *x.BaseUrl + } + return "" +} + +func (x *GenerativeNvidia) GetModel() string { + if x != nil && x.Model != nil { + return *x.Model + } + return "" +} + +func (x *GenerativeNvidia) GetTemperature() float64 { + if x != nil && x.Temperature != nil { + return *x.Temperature + } + return 0 +} + +func (x *GenerativeNvidia) GetTopP() float64 { + if x != nil && x.TopP != nil { + return *x.TopP + } + return 0 +} + +func (x *GenerativeNvidia) GetMaxTokens() int64 { + if x != nil && x.MaxTokens != nil { + return *x.MaxTokens + } + return 0 +} + +type GenerativeXAI struct { + state protoimpl.MessageState `protogen:"open.v1"` + BaseUrl *string `protobuf:"bytes,1,opt,name=base_url,json=baseUrl,proto3,oneof" json:"base_url,omitempty"` + Model *string `protobuf:"bytes,2,opt,name=model,proto3,oneof" json:"model,omitempty"` + Temperature *float64 `protobuf:"fixed64,3,opt,name=temperature,proto3,oneof" json:"temperature,omitempty"` + TopP *float64 `protobuf:"fixed64,4,opt,name=top_p,json=topP,proto3,oneof" json:"top_p,omitempty"` + MaxTokens *int64 `protobuf:"varint,5,opt,name=max_tokens,json=maxTokens,proto3,oneof" json:"max_tokens,omitempty"` + Images *TextArray `protobuf:"bytes,6,opt,name=images,proto3,oneof" json:"images,omitempty"` + ImageProperties *TextArray `protobuf:"bytes,7,opt,name=image_properties,json=imageProperties,proto3,oneof" json:"image_properties,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeXAI) Reset() { + *x = GenerativeXAI{} + mi := &file_v1_generative_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeXAI) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeXAI) ProtoMessage() {} + +func (x *GenerativeXAI) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[14] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeXAI.ProtoReflect.Descriptor instead. +func (*GenerativeXAI) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{14} +} + +func (x *GenerativeXAI) GetBaseUrl() string { + if x != nil && x.BaseUrl != nil { + return *x.BaseUrl + } + return "" +} + +func (x *GenerativeXAI) GetModel() string { + if x != nil && x.Model != nil { + return *x.Model + } + return "" +} + +func (x *GenerativeXAI) GetTemperature() float64 { + if x != nil && x.Temperature != nil { + return *x.Temperature + } + return 0 +} + +func (x *GenerativeXAI) GetTopP() float64 { + if x != nil && x.TopP != nil { + return *x.TopP + } + return 0 +} + +func (x *GenerativeXAI) GetMaxTokens() int64 { + if x != nil && x.MaxTokens != nil { + return *x.MaxTokens + } + return 0 +} + +func (x *GenerativeXAI) GetImages() *TextArray { + if x != nil { + return x.Images + } + return nil +} + +func (x *GenerativeXAI) GetImageProperties() *TextArray { + if x != nil { + return x.ImageProperties + } + return nil +} + +type GenerativeAnthropicMetadata struct { + state protoimpl.MessageState `protogen:"open.v1"` + Usage *GenerativeAnthropicMetadata_Usage `protobuf:"bytes,1,opt,name=usage,proto3" json:"usage,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeAnthropicMetadata) Reset() { + *x = GenerativeAnthropicMetadata{} + mi := &file_v1_generative_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeAnthropicMetadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeAnthropicMetadata) ProtoMessage() {} + +func (x *GenerativeAnthropicMetadata) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[15] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeAnthropicMetadata.ProtoReflect.Descriptor instead. +func (*GenerativeAnthropicMetadata) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{15} +} + +func (x *GenerativeAnthropicMetadata) GetUsage() *GenerativeAnthropicMetadata_Usage { + if x != nil { + return x.Usage + } + return nil +} + +type GenerativeAnyscaleMetadata struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeAnyscaleMetadata) Reset() { + *x = GenerativeAnyscaleMetadata{} + mi := &file_v1_generative_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeAnyscaleMetadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeAnyscaleMetadata) ProtoMessage() {} + +func (x *GenerativeAnyscaleMetadata) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[16] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeAnyscaleMetadata.ProtoReflect.Descriptor instead. +func (*GenerativeAnyscaleMetadata) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{16} +} + +type GenerativeAWSMetadata struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeAWSMetadata) Reset() { + *x = GenerativeAWSMetadata{} + mi := &file_v1_generative_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeAWSMetadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeAWSMetadata) ProtoMessage() {} + +func (x *GenerativeAWSMetadata) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[17] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeAWSMetadata.ProtoReflect.Descriptor instead. +func (*GenerativeAWSMetadata) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{17} +} + +type GenerativeCohereMetadata struct { + state protoimpl.MessageState `protogen:"open.v1"` + ApiVersion *GenerativeCohereMetadata_ApiVersion `protobuf:"bytes,1,opt,name=api_version,json=apiVersion,proto3,oneof" json:"api_version,omitempty"` + BilledUnits *GenerativeCohereMetadata_BilledUnits `protobuf:"bytes,2,opt,name=billed_units,json=billedUnits,proto3,oneof" json:"billed_units,omitempty"` + Tokens *GenerativeCohereMetadata_Tokens `protobuf:"bytes,3,opt,name=tokens,proto3,oneof" json:"tokens,omitempty"` + Warnings *TextArray `protobuf:"bytes,4,opt,name=warnings,proto3,oneof" json:"warnings,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeCohereMetadata) Reset() { + *x = GenerativeCohereMetadata{} + mi := &file_v1_generative_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeCohereMetadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeCohereMetadata) ProtoMessage() {} + +func (x *GenerativeCohereMetadata) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[18] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeCohereMetadata.ProtoReflect.Descriptor instead. +func (*GenerativeCohereMetadata) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{18} +} + +func (x *GenerativeCohereMetadata) GetApiVersion() *GenerativeCohereMetadata_ApiVersion { + if x != nil { + return x.ApiVersion + } + return nil +} + +func (x *GenerativeCohereMetadata) GetBilledUnits() *GenerativeCohereMetadata_BilledUnits { + if x != nil { + return x.BilledUnits + } + return nil +} + +func (x *GenerativeCohereMetadata) GetTokens() *GenerativeCohereMetadata_Tokens { + if x != nil { + return x.Tokens + } + return nil +} + +func (x *GenerativeCohereMetadata) GetWarnings() *TextArray { + if x != nil { + return x.Warnings + } + return nil +} + +type GenerativeDummyMetadata struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeDummyMetadata) Reset() { + *x = GenerativeDummyMetadata{} + mi := &file_v1_generative_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeDummyMetadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeDummyMetadata) ProtoMessage() {} + +func (x *GenerativeDummyMetadata) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[19] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeDummyMetadata.ProtoReflect.Descriptor instead. +func (*GenerativeDummyMetadata) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{19} +} + +type GenerativeMistralMetadata struct { + state protoimpl.MessageState `protogen:"open.v1"` + Usage *GenerativeMistralMetadata_Usage `protobuf:"bytes,1,opt,name=usage,proto3,oneof" json:"usage,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeMistralMetadata) Reset() { + *x = GenerativeMistralMetadata{} + mi := &file_v1_generative_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeMistralMetadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeMistralMetadata) ProtoMessage() {} + +func (x *GenerativeMistralMetadata) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[20] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeMistralMetadata.ProtoReflect.Descriptor instead. +func (*GenerativeMistralMetadata) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{20} +} + +func (x *GenerativeMistralMetadata) GetUsage() *GenerativeMistralMetadata_Usage { + if x != nil { + return x.Usage + } + return nil +} + +type GenerativeOllamaMetadata struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeOllamaMetadata) Reset() { + *x = GenerativeOllamaMetadata{} + mi := &file_v1_generative_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeOllamaMetadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeOllamaMetadata) ProtoMessage() {} + +func (x *GenerativeOllamaMetadata) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[21] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeOllamaMetadata.ProtoReflect.Descriptor instead. +func (*GenerativeOllamaMetadata) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{21} +} + +type GenerativeOpenAIMetadata struct { + state protoimpl.MessageState `protogen:"open.v1"` + Usage *GenerativeOpenAIMetadata_Usage `protobuf:"bytes,1,opt,name=usage,proto3,oneof" json:"usage,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeOpenAIMetadata) Reset() { + *x = GenerativeOpenAIMetadata{} + mi := &file_v1_generative_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeOpenAIMetadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeOpenAIMetadata) ProtoMessage() {} + +func (x *GenerativeOpenAIMetadata) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[22] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeOpenAIMetadata.ProtoReflect.Descriptor instead. +func (*GenerativeOpenAIMetadata) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{22} +} + +func (x *GenerativeOpenAIMetadata) GetUsage() *GenerativeOpenAIMetadata_Usage { + if x != nil { + return x.Usage + } + return nil +} + +type GenerativeGoogleMetadata struct { + state protoimpl.MessageState `protogen:"open.v1"` + Metadata *GenerativeGoogleMetadata_Metadata `protobuf:"bytes,1,opt,name=metadata,proto3,oneof" json:"metadata,omitempty"` + UsageMetadata *GenerativeGoogleMetadata_UsageMetadata `protobuf:"bytes,2,opt,name=usage_metadata,json=usageMetadata,proto3,oneof" json:"usage_metadata,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeGoogleMetadata) Reset() { + *x = GenerativeGoogleMetadata{} + mi := &file_v1_generative_proto_msgTypes[23] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeGoogleMetadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeGoogleMetadata) ProtoMessage() {} + +func (x *GenerativeGoogleMetadata) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[23] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeGoogleMetadata.ProtoReflect.Descriptor instead. +func (*GenerativeGoogleMetadata) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{23} +} + +func (x *GenerativeGoogleMetadata) GetMetadata() *GenerativeGoogleMetadata_Metadata { + if x != nil { + return x.Metadata + } + return nil +} + +func (x *GenerativeGoogleMetadata) GetUsageMetadata() *GenerativeGoogleMetadata_UsageMetadata { + if x != nil { + return x.UsageMetadata + } + return nil +} + +type GenerativeDatabricksMetadata struct { + state protoimpl.MessageState `protogen:"open.v1"` + Usage *GenerativeDatabricksMetadata_Usage `protobuf:"bytes,1,opt,name=usage,proto3,oneof" json:"usage,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeDatabricksMetadata) Reset() { + *x = GenerativeDatabricksMetadata{} + mi := &file_v1_generative_proto_msgTypes[24] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeDatabricksMetadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeDatabricksMetadata) ProtoMessage() {} + +func (x *GenerativeDatabricksMetadata) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[24] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeDatabricksMetadata.ProtoReflect.Descriptor instead. +func (*GenerativeDatabricksMetadata) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{24} +} + +func (x *GenerativeDatabricksMetadata) GetUsage() *GenerativeDatabricksMetadata_Usage { + if x != nil { + return x.Usage + } + return nil +} + +type GenerativeFriendliAIMetadata struct { + state protoimpl.MessageState `protogen:"open.v1"` + Usage *GenerativeFriendliAIMetadata_Usage `protobuf:"bytes,1,opt,name=usage,proto3,oneof" json:"usage,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeFriendliAIMetadata) Reset() { + *x = GenerativeFriendliAIMetadata{} + mi := &file_v1_generative_proto_msgTypes[25] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeFriendliAIMetadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeFriendliAIMetadata) ProtoMessage() {} + +func (x *GenerativeFriendliAIMetadata) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[25] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeFriendliAIMetadata.ProtoReflect.Descriptor instead. +func (*GenerativeFriendliAIMetadata) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{25} +} + +func (x *GenerativeFriendliAIMetadata) GetUsage() *GenerativeFriendliAIMetadata_Usage { + if x != nil { + return x.Usage + } + return nil +} + +type GenerativeNvidiaMetadata struct { + state protoimpl.MessageState `protogen:"open.v1"` + Usage *GenerativeNvidiaMetadata_Usage `protobuf:"bytes,1,opt,name=usage,proto3,oneof" json:"usage,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeNvidiaMetadata) Reset() { + *x = GenerativeNvidiaMetadata{} + mi := &file_v1_generative_proto_msgTypes[26] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeNvidiaMetadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeNvidiaMetadata) ProtoMessage() {} + +func (x *GenerativeNvidiaMetadata) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[26] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeNvidiaMetadata.ProtoReflect.Descriptor instead. +func (*GenerativeNvidiaMetadata) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{26} +} + +func (x *GenerativeNvidiaMetadata) GetUsage() *GenerativeNvidiaMetadata_Usage { + if x != nil { + return x.Usage + } + return nil +} + +type GenerativeXAIMetadata struct { + state protoimpl.MessageState `protogen:"open.v1"` + Usage *GenerativeXAIMetadata_Usage `protobuf:"bytes,1,opt,name=usage,proto3,oneof" json:"usage,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeXAIMetadata) Reset() { + *x = GenerativeXAIMetadata{} + mi := &file_v1_generative_proto_msgTypes[27] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeXAIMetadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeXAIMetadata) ProtoMessage() {} + +func (x *GenerativeXAIMetadata) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[27] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeXAIMetadata.ProtoReflect.Descriptor instead. +func (*GenerativeXAIMetadata) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{27} +} + +func (x *GenerativeXAIMetadata) GetUsage() *GenerativeXAIMetadata_Usage { + if x != nil { + return x.Usage + } + return nil +} + +type GenerativeMetadata struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Types that are valid to be assigned to Kind: + // + // *GenerativeMetadata_Anthropic + // *GenerativeMetadata_Anyscale + // *GenerativeMetadata_Aws + // *GenerativeMetadata_Cohere + // *GenerativeMetadata_Dummy + // *GenerativeMetadata_Mistral + // *GenerativeMetadata_Ollama + // *GenerativeMetadata_Openai + // *GenerativeMetadata_Google + // *GenerativeMetadata_Databricks + // *GenerativeMetadata_Friendliai + // *GenerativeMetadata_Nvidia + // *GenerativeMetadata_Xai + Kind isGenerativeMetadata_Kind `protobuf_oneof:"kind"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeMetadata) Reset() { + *x = GenerativeMetadata{} + mi := &file_v1_generative_proto_msgTypes[28] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeMetadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeMetadata) ProtoMessage() {} + +func (x *GenerativeMetadata) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[28] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeMetadata.ProtoReflect.Descriptor instead. +func (*GenerativeMetadata) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{28} +} + +func (x *GenerativeMetadata) GetKind() isGenerativeMetadata_Kind { + if x != nil { + return x.Kind + } + return nil +} + +func (x *GenerativeMetadata) GetAnthropic() *GenerativeAnthropicMetadata { + if x != nil { + if x, ok := x.Kind.(*GenerativeMetadata_Anthropic); ok { + return x.Anthropic + } + } + return nil +} + +func (x *GenerativeMetadata) GetAnyscale() *GenerativeAnyscaleMetadata { + if x != nil { + if x, ok := x.Kind.(*GenerativeMetadata_Anyscale); ok { + return x.Anyscale + } + } + return nil +} + +func (x *GenerativeMetadata) GetAws() *GenerativeAWSMetadata { + if x != nil { + if x, ok := x.Kind.(*GenerativeMetadata_Aws); ok { + return x.Aws + } + } + return nil +} + +func (x *GenerativeMetadata) GetCohere() *GenerativeCohereMetadata { + if x != nil { + if x, ok := x.Kind.(*GenerativeMetadata_Cohere); ok { + return x.Cohere + } + } + return nil +} + +func (x *GenerativeMetadata) GetDummy() *GenerativeDummyMetadata { + if x != nil { + if x, ok := x.Kind.(*GenerativeMetadata_Dummy); ok { + return x.Dummy + } + } + return nil +} + +func (x *GenerativeMetadata) GetMistral() *GenerativeMistralMetadata { + if x != nil { + if x, ok := x.Kind.(*GenerativeMetadata_Mistral); ok { + return x.Mistral + } + } + return nil +} + +func (x *GenerativeMetadata) GetOllama() *GenerativeOllamaMetadata { + if x != nil { + if x, ok := x.Kind.(*GenerativeMetadata_Ollama); ok { + return x.Ollama + } + } + return nil +} + +func (x *GenerativeMetadata) GetOpenai() *GenerativeOpenAIMetadata { + if x != nil { + if x, ok := x.Kind.(*GenerativeMetadata_Openai); ok { + return x.Openai + } + } + return nil +} + +func (x *GenerativeMetadata) GetGoogle() *GenerativeGoogleMetadata { + if x != nil { + if x, ok := x.Kind.(*GenerativeMetadata_Google); ok { + return x.Google + } + } + return nil +} + +func (x *GenerativeMetadata) GetDatabricks() *GenerativeDatabricksMetadata { + if x != nil { + if x, ok := x.Kind.(*GenerativeMetadata_Databricks); ok { + return x.Databricks + } + } + return nil +} + +func (x *GenerativeMetadata) GetFriendliai() *GenerativeFriendliAIMetadata { + if x != nil { + if x, ok := x.Kind.(*GenerativeMetadata_Friendliai); ok { + return x.Friendliai + } + } + return nil +} + +func (x *GenerativeMetadata) GetNvidia() *GenerativeNvidiaMetadata { + if x != nil { + if x, ok := x.Kind.(*GenerativeMetadata_Nvidia); ok { + return x.Nvidia + } + } + return nil +} + +func (x *GenerativeMetadata) GetXai() *GenerativeXAIMetadata { + if x != nil { + if x, ok := x.Kind.(*GenerativeMetadata_Xai); ok { + return x.Xai + } + } + return nil +} + +type isGenerativeMetadata_Kind interface { + isGenerativeMetadata_Kind() +} + +type GenerativeMetadata_Anthropic struct { + Anthropic *GenerativeAnthropicMetadata `protobuf:"bytes,1,opt,name=anthropic,proto3,oneof"` +} + +type GenerativeMetadata_Anyscale struct { + Anyscale *GenerativeAnyscaleMetadata `protobuf:"bytes,2,opt,name=anyscale,proto3,oneof"` +} + +type GenerativeMetadata_Aws struct { + Aws *GenerativeAWSMetadata `protobuf:"bytes,3,opt,name=aws,proto3,oneof"` +} + +type GenerativeMetadata_Cohere struct { + Cohere *GenerativeCohereMetadata `protobuf:"bytes,4,opt,name=cohere,proto3,oneof"` +} + +type GenerativeMetadata_Dummy struct { + Dummy *GenerativeDummyMetadata `protobuf:"bytes,5,opt,name=dummy,proto3,oneof"` +} + +type GenerativeMetadata_Mistral struct { + Mistral *GenerativeMistralMetadata `protobuf:"bytes,6,opt,name=mistral,proto3,oneof"` +} + +type GenerativeMetadata_Ollama struct { + Ollama *GenerativeOllamaMetadata `protobuf:"bytes,7,opt,name=ollama,proto3,oneof"` +} + +type GenerativeMetadata_Openai struct { + Openai *GenerativeOpenAIMetadata `protobuf:"bytes,8,opt,name=openai,proto3,oneof"` +} + +type GenerativeMetadata_Google struct { + Google *GenerativeGoogleMetadata `protobuf:"bytes,9,opt,name=google,proto3,oneof"` +} + +type GenerativeMetadata_Databricks struct { + Databricks *GenerativeDatabricksMetadata `protobuf:"bytes,10,opt,name=databricks,proto3,oneof"` +} + +type GenerativeMetadata_Friendliai struct { + Friendliai *GenerativeFriendliAIMetadata `protobuf:"bytes,11,opt,name=friendliai,proto3,oneof"` +} + +type GenerativeMetadata_Nvidia struct { + Nvidia *GenerativeNvidiaMetadata `protobuf:"bytes,12,opt,name=nvidia,proto3,oneof"` +} + +type GenerativeMetadata_Xai struct { + Xai *GenerativeXAIMetadata `protobuf:"bytes,13,opt,name=xai,proto3,oneof"` +} + +func (*GenerativeMetadata_Anthropic) isGenerativeMetadata_Kind() {} + +func (*GenerativeMetadata_Anyscale) isGenerativeMetadata_Kind() {} + +func (*GenerativeMetadata_Aws) isGenerativeMetadata_Kind() {} + +func (*GenerativeMetadata_Cohere) isGenerativeMetadata_Kind() {} + +func (*GenerativeMetadata_Dummy) isGenerativeMetadata_Kind() {} + +func (*GenerativeMetadata_Mistral) isGenerativeMetadata_Kind() {} + +func (*GenerativeMetadata_Ollama) isGenerativeMetadata_Kind() {} + +func (*GenerativeMetadata_Openai) isGenerativeMetadata_Kind() {} + +func (*GenerativeMetadata_Google) isGenerativeMetadata_Kind() {} + +func (*GenerativeMetadata_Databricks) isGenerativeMetadata_Kind() {} + +func (*GenerativeMetadata_Friendliai) isGenerativeMetadata_Kind() {} + +func (*GenerativeMetadata_Nvidia) isGenerativeMetadata_Kind() {} + +func (*GenerativeMetadata_Xai) isGenerativeMetadata_Kind() {} + +type GenerativeReply struct { + state protoimpl.MessageState `protogen:"open.v1"` + Result string `protobuf:"bytes,1,opt,name=result,proto3" json:"result,omitempty"` + Debug *GenerativeDebug `protobuf:"bytes,2,opt,name=debug,proto3,oneof" json:"debug,omitempty"` + Metadata *GenerativeMetadata `protobuf:"bytes,3,opt,name=metadata,proto3,oneof" json:"metadata,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeReply) Reset() { + *x = GenerativeReply{} + mi := &file_v1_generative_proto_msgTypes[29] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeReply) ProtoMessage() {} + +func (x *GenerativeReply) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[29] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeReply.ProtoReflect.Descriptor instead. +func (*GenerativeReply) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{29} +} + +func (x *GenerativeReply) GetResult() string { + if x != nil { + return x.Result + } + return "" +} + +func (x *GenerativeReply) GetDebug() *GenerativeDebug { + if x != nil { + return x.Debug + } + return nil +} + +func (x *GenerativeReply) GetMetadata() *GenerativeMetadata { + if x != nil { + return x.Metadata + } + return nil +} + +type GenerativeResult struct { + state protoimpl.MessageState `protogen:"open.v1"` + Values []*GenerativeReply `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeResult) Reset() { + *x = GenerativeResult{} + mi := &file_v1_generative_proto_msgTypes[30] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeResult) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeResult) ProtoMessage() {} + +func (x *GenerativeResult) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[30] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeResult.ProtoReflect.Descriptor instead. +func (*GenerativeResult) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{30} +} + +func (x *GenerativeResult) GetValues() []*GenerativeReply { + if x != nil { + return x.Values + } + return nil +} + +type GenerativeDebug struct { + state protoimpl.MessageState `protogen:"open.v1"` + FullPrompt *string `protobuf:"bytes,1,opt,name=full_prompt,json=fullPrompt,proto3,oneof" json:"full_prompt,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeDebug) Reset() { + *x = GenerativeDebug{} + mi := &file_v1_generative_proto_msgTypes[31] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeDebug) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeDebug) ProtoMessage() {} + +func (x *GenerativeDebug) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[31] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeDebug.ProtoReflect.Descriptor instead. +func (*GenerativeDebug) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{31} +} + +func (x *GenerativeDebug) GetFullPrompt() string { + if x != nil && x.FullPrompt != nil { + return *x.FullPrompt + } + return "" +} + +type GenerativeSearch_Single struct { + state protoimpl.MessageState `protogen:"open.v1"` + Prompt string `protobuf:"bytes,1,opt,name=prompt,proto3" json:"prompt,omitempty"` + Debug bool `protobuf:"varint,2,opt,name=debug,proto3" json:"debug,omitempty"` + // only allow one at the beginning, but multiple in the future + Queries []*GenerativeProvider `protobuf:"bytes,3,rep,name=queries,proto3" json:"queries,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeSearch_Single) Reset() { + *x = GenerativeSearch_Single{} + mi := &file_v1_generative_proto_msgTypes[32] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeSearch_Single) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeSearch_Single) ProtoMessage() {} + +func (x *GenerativeSearch_Single) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[32] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeSearch_Single.ProtoReflect.Descriptor instead. +func (*GenerativeSearch_Single) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{0, 0} +} + +func (x *GenerativeSearch_Single) GetPrompt() string { + if x != nil { + return x.Prompt + } + return "" +} + +func (x *GenerativeSearch_Single) GetDebug() bool { + if x != nil { + return x.Debug + } + return false +} + +func (x *GenerativeSearch_Single) GetQueries() []*GenerativeProvider { + if x != nil { + return x.Queries + } + return nil +} + +type GenerativeSearch_Grouped struct { + state protoimpl.MessageState `protogen:"open.v1"` + Task string `protobuf:"bytes,1,opt,name=task,proto3" json:"task,omitempty"` + Properties *TextArray `protobuf:"bytes,2,opt,name=properties,proto3,oneof" json:"properties,omitempty"` + // only allow one at the beginning, but multiple in the future + Queries []*GenerativeProvider `protobuf:"bytes,3,rep,name=queries,proto3" json:"queries,omitempty"` + Debug bool `protobuf:"varint,4,opt,name=debug,proto3" json:"debug,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeSearch_Grouped) Reset() { + *x = GenerativeSearch_Grouped{} + mi := &file_v1_generative_proto_msgTypes[33] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeSearch_Grouped) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeSearch_Grouped) ProtoMessage() {} + +func (x *GenerativeSearch_Grouped) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[33] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeSearch_Grouped.ProtoReflect.Descriptor instead. +func (*GenerativeSearch_Grouped) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{0, 1} +} + +func (x *GenerativeSearch_Grouped) GetTask() string { + if x != nil { + return x.Task + } + return "" +} + +func (x *GenerativeSearch_Grouped) GetProperties() *TextArray { + if x != nil { + return x.Properties + } + return nil +} + +func (x *GenerativeSearch_Grouped) GetQueries() []*GenerativeProvider { + if x != nil { + return x.Queries + } + return nil +} + +func (x *GenerativeSearch_Grouped) GetDebug() bool { + if x != nil { + return x.Debug + } + return false +} + +type GenerativeAnthropicMetadata_Usage struct { + state protoimpl.MessageState `protogen:"open.v1"` + InputTokens int64 `protobuf:"varint,1,opt,name=input_tokens,json=inputTokens,proto3" json:"input_tokens,omitempty"` + OutputTokens int64 `protobuf:"varint,2,opt,name=output_tokens,json=outputTokens,proto3" json:"output_tokens,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeAnthropicMetadata_Usage) Reset() { + *x = GenerativeAnthropicMetadata_Usage{} + mi := &file_v1_generative_proto_msgTypes[34] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeAnthropicMetadata_Usage) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeAnthropicMetadata_Usage) ProtoMessage() {} + +func (x *GenerativeAnthropicMetadata_Usage) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[34] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeAnthropicMetadata_Usage.ProtoReflect.Descriptor instead. +func (*GenerativeAnthropicMetadata_Usage) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{15, 0} +} + +func (x *GenerativeAnthropicMetadata_Usage) GetInputTokens() int64 { + if x != nil { + return x.InputTokens + } + return 0 +} + +func (x *GenerativeAnthropicMetadata_Usage) GetOutputTokens() int64 { + if x != nil { + return x.OutputTokens + } + return 0 +} + +type GenerativeCohereMetadata_ApiVersion struct { + state protoimpl.MessageState `protogen:"open.v1"` + Version *string `protobuf:"bytes,1,opt,name=version,proto3,oneof" json:"version,omitempty"` + IsDeprecated *bool `protobuf:"varint,2,opt,name=is_deprecated,json=isDeprecated,proto3,oneof" json:"is_deprecated,omitempty"` + IsExperimental *bool `protobuf:"varint,3,opt,name=is_experimental,json=isExperimental,proto3,oneof" json:"is_experimental,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeCohereMetadata_ApiVersion) Reset() { + *x = GenerativeCohereMetadata_ApiVersion{} + mi := &file_v1_generative_proto_msgTypes[35] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeCohereMetadata_ApiVersion) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeCohereMetadata_ApiVersion) ProtoMessage() {} + +func (x *GenerativeCohereMetadata_ApiVersion) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[35] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeCohereMetadata_ApiVersion.ProtoReflect.Descriptor instead. +func (*GenerativeCohereMetadata_ApiVersion) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{18, 0} +} + +func (x *GenerativeCohereMetadata_ApiVersion) GetVersion() string { + if x != nil && x.Version != nil { + return *x.Version + } + return "" +} + +func (x *GenerativeCohereMetadata_ApiVersion) GetIsDeprecated() bool { + if x != nil && x.IsDeprecated != nil { + return *x.IsDeprecated + } + return false +} + +func (x *GenerativeCohereMetadata_ApiVersion) GetIsExperimental() bool { + if x != nil && x.IsExperimental != nil { + return *x.IsExperimental + } + return false +} + +type GenerativeCohereMetadata_BilledUnits struct { + state protoimpl.MessageState `protogen:"open.v1"` + InputTokens *float64 `protobuf:"fixed64,1,opt,name=input_tokens,json=inputTokens,proto3,oneof" json:"input_tokens,omitempty"` + OutputTokens *float64 `protobuf:"fixed64,2,opt,name=output_tokens,json=outputTokens,proto3,oneof" json:"output_tokens,omitempty"` + SearchUnits *float64 `protobuf:"fixed64,3,opt,name=search_units,json=searchUnits,proto3,oneof" json:"search_units,omitempty"` + Classifications *float64 `protobuf:"fixed64,4,opt,name=classifications,proto3,oneof" json:"classifications,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeCohereMetadata_BilledUnits) Reset() { + *x = GenerativeCohereMetadata_BilledUnits{} + mi := &file_v1_generative_proto_msgTypes[36] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeCohereMetadata_BilledUnits) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeCohereMetadata_BilledUnits) ProtoMessage() {} + +func (x *GenerativeCohereMetadata_BilledUnits) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[36] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeCohereMetadata_BilledUnits.ProtoReflect.Descriptor instead. +func (*GenerativeCohereMetadata_BilledUnits) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{18, 1} +} + +func (x *GenerativeCohereMetadata_BilledUnits) GetInputTokens() float64 { + if x != nil && x.InputTokens != nil { + return *x.InputTokens + } + return 0 +} + +func (x *GenerativeCohereMetadata_BilledUnits) GetOutputTokens() float64 { + if x != nil && x.OutputTokens != nil { + return *x.OutputTokens + } + return 0 +} + +func (x *GenerativeCohereMetadata_BilledUnits) GetSearchUnits() float64 { + if x != nil && x.SearchUnits != nil { + return *x.SearchUnits + } + return 0 +} + +func (x *GenerativeCohereMetadata_BilledUnits) GetClassifications() float64 { + if x != nil && x.Classifications != nil { + return *x.Classifications + } + return 0 +} + +type GenerativeCohereMetadata_Tokens struct { + state protoimpl.MessageState `protogen:"open.v1"` + InputTokens *float64 `protobuf:"fixed64,1,opt,name=input_tokens,json=inputTokens,proto3,oneof" json:"input_tokens,omitempty"` + OutputTokens *float64 `protobuf:"fixed64,2,opt,name=output_tokens,json=outputTokens,proto3,oneof" json:"output_tokens,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeCohereMetadata_Tokens) Reset() { + *x = GenerativeCohereMetadata_Tokens{} + mi := &file_v1_generative_proto_msgTypes[37] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeCohereMetadata_Tokens) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeCohereMetadata_Tokens) ProtoMessage() {} + +func (x *GenerativeCohereMetadata_Tokens) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[37] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeCohereMetadata_Tokens.ProtoReflect.Descriptor instead. +func (*GenerativeCohereMetadata_Tokens) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{18, 2} +} + +func (x *GenerativeCohereMetadata_Tokens) GetInputTokens() float64 { + if x != nil && x.InputTokens != nil { + return *x.InputTokens + } + return 0 +} + +func (x *GenerativeCohereMetadata_Tokens) GetOutputTokens() float64 { + if x != nil && x.OutputTokens != nil { + return *x.OutputTokens + } + return 0 +} + +type GenerativeMistralMetadata_Usage struct { + state protoimpl.MessageState `protogen:"open.v1"` + PromptTokens *int64 `protobuf:"varint,1,opt,name=prompt_tokens,json=promptTokens,proto3,oneof" json:"prompt_tokens,omitempty"` + CompletionTokens *int64 `protobuf:"varint,2,opt,name=completion_tokens,json=completionTokens,proto3,oneof" json:"completion_tokens,omitempty"` + TotalTokens *int64 `protobuf:"varint,3,opt,name=total_tokens,json=totalTokens,proto3,oneof" json:"total_tokens,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeMistralMetadata_Usage) Reset() { + *x = GenerativeMistralMetadata_Usage{} + mi := &file_v1_generative_proto_msgTypes[38] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeMistralMetadata_Usage) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeMistralMetadata_Usage) ProtoMessage() {} + +func (x *GenerativeMistralMetadata_Usage) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[38] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeMistralMetadata_Usage.ProtoReflect.Descriptor instead. +func (*GenerativeMistralMetadata_Usage) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{20, 0} +} + +func (x *GenerativeMistralMetadata_Usage) GetPromptTokens() int64 { + if x != nil && x.PromptTokens != nil { + return *x.PromptTokens + } + return 0 +} + +func (x *GenerativeMistralMetadata_Usage) GetCompletionTokens() int64 { + if x != nil && x.CompletionTokens != nil { + return *x.CompletionTokens + } + return 0 +} + +func (x *GenerativeMistralMetadata_Usage) GetTotalTokens() int64 { + if x != nil && x.TotalTokens != nil { + return *x.TotalTokens + } + return 0 +} + +type GenerativeOpenAIMetadata_Usage struct { + state protoimpl.MessageState `protogen:"open.v1"` + PromptTokens *int64 `protobuf:"varint,1,opt,name=prompt_tokens,json=promptTokens,proto3,oneof" json:"prompt_tokens,omitempty"` + CompletionTokens *int64 `protobuf:"varint,2,opt,name=completion_tokens,json=completionTokens,proto3,oneof" json:"completion_tokens,omitempty"` + TotalTokens *int64 `protobuf:"varint,3,opt,name=total_tokens,json=totalTokens,proto3,oneof" json:"total_tokens,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeOpenAIMetadata_Usage) Reset() { + *x = GenerativeOpenAIMetadata_Usage{} + mi := &file_v1_generative_proto_msgTypes[39] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeOpenAIMetadata_Usage) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeOpenAIMetadata_Usage) ProtoMessage() {} + +func (x *GenerativeOpenAIMetadata_Usage) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[39] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeOpenAIMetadata_Usage.ProtoReflect.Descriptor instead. +func (*GenerativeOpenAIMetadata_Usage) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{22, 0} +} + +func (x *GenerativeOpenAIMetadata_Usage) GetPromptTokens() int64 { + if x != nil && x.PromptTokens != nil { + return *x.PromptTokens + } + return 0 +} + +func (x *GenerativeOpenAIMetadata_Usage) GetCompletionTokens() int64 { + if x != nil && x.CompletionTokens != nil { + return *x.CompletionTokens + } + return 0 +} + +func (x *GenerativeOpenAIMetadata_Usage) GetTotalTokens() int64 { + if x != nil && x.TotalTokens != nil { + return *x.TotalTokens + } + return 0 +} + +type GenerativeGoogleMetadata_TokenCount struct { + state protoimpl.MessageState `protogen:"open.v1"` + TotalBillableCharacters *int64 `protobuf:"varint,1,opt,name=total_billable_characters,json=totalBillableCharacters,proto3,oneof" json:"total_billable_characters,omitempty"` + TotalTokens *int64 `protobuf:"varint,2,opt,name=total_tokens,json=totalTokens,proto3,oneof" json:"total_tokens,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeGoogleMetadata_TokenCount) Reset() { + *x = GenerativeGoogleMetadata_TokenCount{} + mi := &file_v1_generative_proto_msgTypes[40] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeGoogleMetadata_TokenCount) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeGoogleMetadata_TokenCount) ProtoMessage() {} + +func (x *GenerativeGoogleMetadata_TokenCount) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[40] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeGoogleMetadata_TokenCount.ProtoReflect.Descriptor instead. +func (*GenerativeGoogleMetadata_TokenCount) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{23, 0} +} + +func (x *GenerativeGoogleMetadata_TokenCount) GetTotalBillableCharacters() int64 { + if x != nil && x.TotalBillableCharacters != nil { + return *x.TotalBillableCharacters + } + return 0 +} + +func (x *GenerativeGoogleMetadata_TokenCount) GetTotalTokens() int64 { + if x != nil && x.TotalTokens != nil { + return *x.TotalTokens + } + return 0 +} + +type GenerativeGoogleMetadata_TokenMetadata struct { + state protoimpl.MessageState `protogen:"open.v1"` + InputTokenCount *GenerativeGoogleMetadata_TokenCount `protobuf:"bytes,1,opt,name=input_token_count,json=inputTokenCount,proto3,oneof" json:"input_token_count,omitempty"` + OutputTokenCount *GenerativeGoogleMetadata_TokenCount `protobuf:"bytes,2,opt,name=output_token_count,json=outputTokenCount,proto3,oneof" json:"output_token_count,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeGoogleMetadata_TokenMetadata) Reset() { + *x = GenerativeGoogleMetadata_TokenMetadata{} + mi := &file_v1_generative_proto_msgTypes[41] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeGoogleMetadata_TokenMetadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeGoogleMetadata_TokenMetadata) ProtoMessage() {} + +func (x *GenerativeGoogleMetadata_TokenMetadata) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[41] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeGoogleMetadata_TokenMetadata.ProtoReflect.Descriptor instead. +func (*GenerativeGoogleMetadata_TokenMetadata) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{23, 1} +} + +func (x *GenerativeGoogleMetadata_TokenMetadata) GetInputTokenCount() *GenerativeGoogleMetadata_TokenCount { + if x != nil { + return x.InputTokenCount + } + return nil +} + +func (x *GenerativeGoogleMetadata_TokenMetadata) GetOutputTokenCount() *GenerativeGoogleMetadata_TokenCount { + if x != nil { + return x.OutputTokenCount + } + return nil +} + +type GenerativeGoogleMetadata_Metadata struct { + state protoimpl.MessageState `protogen:"open.v1"` + TokenMetadata *GenerativeGoogleMetadata_TokenMetadata `protobuf:"bytes,1,opt,name=token_metadata,json=tokenMetadata,proto3,oneof" json:"token_metadata,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeGoogleMetadata_Metadata) Reset() { + *x = GenerativeGoogleMetadata_Metadata{} + mi := &file_v1_generative_proto_msgTypes[42] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeGoogleMetadata_Metadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeGoogleMetadata_Metadata) ProtoMessage() {} + +func (x *GenerativeGoogleMetadata_Metadata) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[42] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeGoogleMetadata_Metadata.ProtoReflect.Descriptor instead. +func (*GenerativeGoogleMetadata_Metadata) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{23, 2} +} + +func (x *GenerativeGoogleMetadata_Metadata) GetTokenMetadata() *GenerativeGoogleMetadata_TokenMetadata { + if x != nil { + return x.TokenMetadata + } + return nil +} + +type GenerativeGoogleMetadata_UsageMetadata struct { + state protoimpl.MessageState `protogen:"open.v1"` + PromptTokenCount *int64 `protobuf:"varint,1,opt,name=prompt_token_count,json=promptTokenCount,proto3,oneof" json:"prompt_token_count,omitempty"` + CandidatesTokenCount *int64 `protobuf:"varint,2,opt,name=candidates_token_count,json=candidatesTokenCount,proto3,oneof" json:"candidates_token_count,omitempty"` + TotalTokenCount *int64 `protobuf:"varint,3,opt,name=total_token_count,json=totalTokenCount,proto3,oneof" json:"total_token_count,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeGoogleMetadata_UsageMetadata) Reset() { + *x = GenerativeGoogleMetadata_UsageMetadata{} + mi := &file_v1_generative_proto_msgTypes[43] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeGoogleMetadata_UsageMetadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeGoogleMetadata_UsageMetadata) ProtoMessage() {} + +func (x *GenerativeGoogleMetadata_UsageMetadata) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[43] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeGoogleMetadata_UsageMetadata.ProtoReflect.Descriptor instead. +func (*GenerativeGoogleMetadata_UsageMetadata) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{23, 3} +} + +func (x *GenerativeGoogleMetadata_UsageMetadata) GetPromptTokenCount() int64 { + if x != nil && x.PromptTokenCount != nil { + return *x.PromptTokenCount + } + return 0 +} + +func (x *GenerativeGoogleMetadata_UsageMetadata) GetCandidatesTokenCount() int64 { + if x != nil && x.CandidatesTokenCount != nil { + return *x.CandidatesTokenCount + } + return 0 +} + +func (x *GenerativeGoogleMetadata_UsageMetadata) GetTotalTokenCount() int64 { + if x != nil && x.TotalTokenCount != nil { + return *x.TotalTokenCount + } + return 0 +} + +type GenerativeDatabricksMetadata_Usage struct { + state protoimpl.MessageState `protogen:"open.v1"` + PromptTokens *int64 `protobuf:"varint,1,opt,name=prompt_tokens,json=promptTokens,proto3,oneof" json:"prompt_tokens,omitempty"` + CompletionTokens *int64 `protobuf:"varint,2,opt,name=completion_tokens,json=completionTokens,proto3,oneof" json:"completion_tokens,omitempty"` + TotalTokens *int64 `protobuf:"varint,3,opt,name=total_tokens,json=totalTokens,proto3,oneof" json:"total_tokens,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeDatabricksMetadata_Usage) Reset() { + *x = GenerativeDatabricksMetadata_Usage{} + mi := &file_v1_generative_proto_msgTypes[44] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeDatabricksMetadata_Usage) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeDatabricksMetadata_Usage) ProtoMessage() {} + +func (x *GenerativeDatabricksMetadata_Usage) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[44] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeDatabricksMetadata_Usage.ProtoReflect.Descriptor instead. +func (*GenerativeDatabricksMetadata_Usage) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{24, 0} +} + +func (x *GenerativeDatabricksMetadata_Usage) GetPromptTokens() int64 { + if x != nil && x.PromptTokens != nil { + return *x.PromptTokens + } + return 0 +} + +func (x *GenerativeDatabricksMetadata_Usage) GetCompletionTokens() int64 { + if x != nil && x.CompletionTokens != nil { + return *x.CompletionTokens + } + return 0 +} + +func (x *GenerativeDatabricksMetadata_Usage) GetTotalTokens() int64 { + if x != nil && x.TotalTokens != nil { + return *x.TotalTokens + } + return 0 +} + +type GenerativeFriendliAIMetadata_Usage struct { + state protoimpl.MessageState `protogen:"open.v1"` + PromptTokens *int64 `protobuf:"varint,1,opt,name=prompt_tokens,json=promptTokens,proto3,oneof" json:"prompt_tokens,omitempty"` + CompletionTokens *int64 `protobuf:"varint,2,opt,name=completion_tokens,json=completionTokens,proto3,oneof" json:"completion_tokens,omitempty"` + TotalTokens *int64 `protobuf:"varint,3,opt,name=total_tokens,json=totalTokens,proto3,oneof" json:"total_tokens,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeFriendliAIMetadata_Usage) Reset() { + *x = GenerativeFriendliAIMetadata_Usage{} + mi := &file_v1_generative_proto_msgTypes[45] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeFriendliAIMetadata_Usage) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeFriendliAIMetadata_Usage) ProtoMessage() {} + +func (x *GenerativeFriendliAIMetadata_Usage) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[45] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeFriendliAIMetadata_Usage.ProtoReflect.Descriptor instead. +func (*GenerativeFriendliAIMetadata_Usage) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{25, 0} +} + +func (x *GenerativeFriendliAIMetadata_Usage) GetPromptTokens() int64 { + if x != nil && x.PromptTokens != nil { + return *x.PromptTokens + } + return 0 +} + +func (x *GenerativeFriendliAIMetadata_Usage) GetCompletionTokens() int64 { + if x != nil && x.CompletionTokens != nil { + return *x.CompletionTokens + } + return 0 +} + +func (x *GenerativeFriendliAIMetadata_Usage) GetTotalTokens() int64 { + if x != nil && x.TotalTokens != nil { + return *x.TotalTokens + } + return 0 +} + +type GenerativeNvidiaMetadata_Usage struct { + state protoimpl.MessageState `protogen:"open.v1"` + PromptTokens *int64 `protobuf:"varint,1,opt,name=prompt_tokens,json=promptTokens,proto3,oneof" json:"prompt_tokens,omitempty"` + CompletionTokens *int64 `protobuf:"varint,2,opt,name=completion_tokens,json=completionTokens,proto3,oneof" json:"completion_tokens,omitempty"` + TotalTokens *int64 `protobuf:"varint,3,opt,name=total_tokens,json=totalTokens,proto3,oneof" json:"total_tokens,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeNvidiaMetadata_Usage) Reset() { + *x = GenerativeNvidiaMetadata_Usage{} + mi := &file_v1_generative_proto_msgTypes[46] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeNvidiaMetadata_Usage) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeNvidiaMetadata_Usage) ProtoMessage() {} + +func (x *GenerativeNvidiaMetadata_Usage) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[46] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeNvidiaMetadata_Usage.ProtoReflect.Descriptor instead. +func (*GenerativeNvidiaMetadata_Usage) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{26, 0} +} + +func (x *GenerativeNvidiaMetadata_Usage) GetPromptTokens() int64 { + if x != nil && x.PromptTokens != nil { + return *x.PromptTokens + } + return 0 +} + +func (x *GenerativeNvidiaMetadata_Usage) GetCompletionTokens() int64 { + if x != nil && x.CompletionTokens != nil { + return *x.CompletionTokens + } + return 0 +} + +func (x *GenerativeNvidiaMetadata_Usage) GetTotalTokens() int64 { + if x != nil && x.TotalTokens != nil { + return *x.TotalTokens + } + return 0 +} + +type GenerativeXAIMetadata_Usage struct { + state protoimpl.MessageState `protogen:"open.v1"` + PromptTokens *int64 `protobuf:"varint,1,opt,name=prompt_tokens,json=promptTokens,proto3,oneof" json:"prompt_tokens,omitempty"` + CompletionTokens *int64 `protobuf:"varint,2,opt,name=completion_tokens,json=completionTokens,proto3,oneof" json:"completion_tokens,omitempty"` + TotalTokens *int64 `protobuf:"varint,3,opt,name=total_tokens,json=totalTokens,proto3,oneof" json:"total_tokens,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GenerativeXAIMetadata_Usage) Reset() { + *x = GenerativeXAIMetadata_Usage{} + mi := &file_v1_generative_proto_msgTypes[47] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GenerativeXAIMetadata_Usage) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GenerativeXAIMetadata_Usage) ProtoMessage() {} + +func (x *GenerativeXAIMetadata_Usage) ProtoReflect() protoreflect.Message { + mi := &file_v1_generative_proto_msgTypes[47] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GenerativeXAIMetadata_Usage.ProtoReflect.Descriptor instead. +func (*GenerativeXAIMetadata_Usage) Descriptor() ([]byte, []int) { + return file_v1_generative_proto_rawDescGZIP(), []int{27, 0} +} + +func (x *GenerativeXAIMetadata_Usage) GetPromptTokens() int64 { + if x != nil && x.PromptTokens != nil { + return *x.PromptTokens + } + return 0 +} + +func (x *GenerativeXAIMetadata_Usage) GetCompletionTokens() int64 { + if x != nil && x.CompletionTokens != nil { + return *x.CompletionTokens + } + return 0 +} + +func (x *GenerativeXAIMetadata_Usage) GetTotalTokens() int64 { + if x != nil && x.TotalTokens != nil { + return *x.TotalTokens + } + return 0 +} + +var File_v1_generative_proto protoreflect.FileDescriptor + +const file_v1_generative_proto_rawDesc = "" + + "\n" + + "\x13v1/generative.proto\x12\vweaviate.v1\x1a\rv1/base.proto\"\xe6\x04\n" + + "\x10GenerativeSearch\x128\n" + + "\x16single_response_prompt\x18\x01 \x01(\tB\x02\x18\x01R\x14singleResponsePrompt\x126\n" + + "\x15grouped_response_task\x18\x02 \x01(\tB\x02\x18\x01R\x13groupedResponseTask\x121\n" + + "\x12grouped_properties\x18\x03 \x03(\tB\x02\x18\x01R\x11groupedProperties\x12<\n" + + "\x06single\x18\x04 \x01(\v2$.weaviate.v1.GenerativeSearch.SingleR\x06single\x12?\n" + + "\agrouped\x18\x05 \x01(\v2%.weaviate.v1.GenerativeSearch.GroupedR\agrouped\x1aq\n" + + "\x06Single\x12\x16\n" + + "\x06prompt\x18\x01 \x01(\tR\x06prompt\x12\x14\n" + + "\x05debug\x18\x02 \x01(\bR\x05debug\x129\n" + + "\aqueries\x18\x03 \x03(\v2\x1f.weaviate.v1.GenerativeProviderR\aqueries\x1a\xba\x01\n" + + "\aGrouped\x12\x12\n" + + "\x04task\x18\x01 \x01(\tR\x04task\x12;\n" + + "\n" + + "properties\x18\x02 \x01(\v2\x16.weaviate.v1.TextArrayH\x00R\n" + + "properties\x88\x01\x01\x129\n" + + "\aqueries\x18\x03 \x03(\v2\x1f.weaviate.v1.GenerativeProviderR\aqueries\x12\x14\n" + + "\x05debug\x18\x04 \x01(\bR\x05debugB\r\n" + + "\v_properties\"\xbf\x06\n" + + "\x12GenerativeProvider\x12'\n" + + "\x0freturn_metadata\x18\x01 \x01(\bR\x0ereturnMetadata\x12@\n" + + "\tanthropic\x18\x02 \x01(\v2 .weaviate.v1.GenerativeAnthropicH\x00R\tanthropic\x12=\n" + + "\banyscale\x18\x03 \x01(\v2\x1f.weaviate.v1.GenerativeAnyscaleH\x00R\banyscale\x12.\n" + + "\x03aws\x18\x04 \x01(\v2\x1a.weaviate.v1.GenerativeAWSH\x00R\x03aws\x127\n" + + "\x06cohere\x18\x05 \x01(\v2\x1d.weaviate.v1.GenerativeCohereH\x00R\x06cohere\x124\n" + + "\x05dummy\x18\x06 \x01(\v2\x1c.weaviate.v1.GenerativeDummyH\x00R\x05dummy\x12:\n" + + "\amistral\x18\a \x01(\v2\x1e.weaviate.v1.GenerativeMistralH\x00R\amistral\x127\n" + + "\x06ollama\x18\b \x01(\v2\x1d.weaviate.v1.GenerativeOllamaH\x00R\x06ollama\x127\n" + + "\x06openai\x18\t \x01(\v2\x1d.weaviate.v1.GenerativeOpenAIH\x00R\x06openai\x127\n" + + "\x06google\x18\n" + + " \x01(\v2\x1d.weaviate.v1.GenerativeGoogleH\x00R\x06google\x12C\n" + + "\n" + + "databricks\x18\v \x01(\v2!.weaviate.v1.GenerativeDatabricksH\x00R\n" + + "databricks\x12C\n" + + "\n" + + "friendliai\x18\f \x01(\v2!.weaviate.v1.GenerativeFriendliAIH\x00R\n" + + "friendliai\x127\n" + + "\x06nvidia\x18\r \x01(\v2\x1d.weaviate.v1.GenerativeNvidiaH\x00R\x06nvidia\x12.\n" + + "\x03xai\x18\x0e \x01(\v2\x1a.weaviate.v1.GenerativeXAIH\x00R\x03xaiB\x06\n" + + "\x04kind\"\x8d\x04\n" + + "\x13GenerativeAnthropic\x12\x1e\n" + + "\bbase_url\x18\x01 \x01(\tH\x00R\abaseUrl\x88\x01\x01\x12\"\n" + + "\n" + + "max_tokens\x18\x02 \x01(\x03H\x01R\tmaxTokens\x88\x01\x01\x12\x19\n" + + "\x05model\x18\x03 \x01(\tH\x02R\x05model\x88\x01\x01\x12%\n" + + "\vtemperature\x18\x04 \x01(\x01H\x03R\vtemperature\x88\x01\x01\x12\x18\n" + + "\x05top_k\x18\x05 \x01(\x03H\x04R\x04topK\x88\x01\x01\x12\x18\n" + + "\x05top_p\x18\x06 \x01(\x01H\x05R\x04topP\x88\x01\x01\x12B\n" + + "\x0estop_sequences\x18\a \x01(\v2\x16.weaviate.v1.TextArrayH\x06R\rstopSequences\x88\x01\x01\x123\n" + + "\x06images\x18\b \x01(\v2\x16.weaviate.v1.TextArrayH\aR\x06images\x88\x01\x01\x12F\n" + + "\x10image_properties\x18\t \x01(\v2\x16.weaviate.v1.TextArrayH\bR\x0fimageProperties\x88\x01\x01B\v\n" + + "\t_base_urlB\r\n" + + "\v_max_tokensB\b\n" + + "\x06_modelB\x0e\n" + + "\f_temperatureB\b\n" + + "\x06_top_kB\b\n" + + "\x06_top_pB\x11\n" + + "\x0f_stop_sequencesB\t\n" + + "\a_imagesB\x13\n" + + "\x11_image_properties\"\x9d\x01\n" + + "\x12GenerativeAnyscale\x12\x1e\n" + + "\bbase_url\x18\x01 \x01(\tH\x00R\abaseUrl\x88\x01\x01\x12\x19\n" + + "\x05model\x18\x02 \x01(\tH\x01R\x05model\x88\x01\x01\x12%\n" + + "\vtemperature\x18\x03 \x01(\x01H\x02R\vtemperature\x88\x01\x01B\v\n" + + "\t_base_urlB\b\n" + + "\x06_modelB\x0e\n" + + "\f_temperature\"\x81\x04\n" + + "\rGenerativeAWS\x12\x19\n" + + "\x05model\x18\x03 \x01(\tH\x00R\x05model\x88\x01\x01\x12%\n" + + "\vtemperature\x18\b \x01(\x01H\x01R\vtemperature\x88\x01\x01\x12\x1d\n" + + "\aservice\x18\t \x01(\tH\x02R\aservice\x88\x01\x01\x12\x1b\n" + + "\x06region\x18\n" + + " \x01(\tH\x03R\x06region\x88\x01\x01\x12\x1f\n" + + "\bendpoint\x18\v \x01(\tH\x04R\bendpoint\x88\x01\x01\x12&\n" + + "\ftarget_model\x18\f \x01(\tH\x05R\vtargetModel\x88\x01\x01\x12*\n" + + "\x0etarget_variant\x18\r \x01(\tH\x06R\rtargetVariant\x88\x01\x01\x123\n" + + "\x06images\x18\x0e \x01(\v2\x16.weaviate.v1.TextArrayH\aR\x06images\x88\x01\x01\x12F\n" + + "\x10image_properties\x18\x0f \x01(\v2\x16.weaviate.v1.TextArrayH\bR\x0fimageProperties\x88\x01\x01B\b\n" + + "\x06_modelB\x0e\n" + + "\f_temperatureB\n" + + "\n" + + "\b_serviceB\t\n" + + "\a_regionB\v\n" + + "\t_endpointB\x0f\n" + + "\r_target_modelB\x11\n" + + "\x0f_target_variantB\t\n" + + "\a_imagesB\x13\n" + + "\x11_image_properties\"\xe4\x03\n" + + "\x10GenerativeCohere\x12\x1e\n" + + "\bbase_url\x18\x01 \x01(\tH\x00R\abaseUrl\x88\x01\x01\x120\n" + + "\x11frequency_penalty\x18\x02 \x01(\x01H\x01R\x10frequencyPenalty\x88\x01\x01\x12\"\n" + + "\n" + + "max_tokens\x18\x03 \x01(\x03H\x02R\tmaxTokens\x88\x01\x01\x12\x19\n" + + "\x05model\x18\x04 \x01(\tH\x03R\x05model\x88\x01\x01\x12\x11\n" + + "\x01k\x18\x05 \x01(\x03H\x04R\x01k\x88\x01\x01\x12\x11\n" + + "\x01p\x18\x06 \x01(\x01H\x05R\x01p\x88\x01\x01\x12.\n" + + "\x10presence_penalty\x18\a \x01(\x01H\x06R\x0fpresencePenalty\x88\x01\x01\x12B\n" + + "\x0estop_sequences\x18\b \x01(\v2\x16.weaviate.v1.TextArrayH\aR\rstopSequences\x88\x01\x01\x12%\n" + + "\vtemperature\x18\t \x01(\x01H\bR\vtemperature\x88\x01\x01B\v\n" + + "\t_base_urlB\x14\n" + + "\x12_frequency_penaltyB\r\n" + + "\v_max_tokensB\b\n" + + "\x06_modelB\x04\n" + + "\x02_kB\x04\n" + + "\x02_pB\x13\n" + + "\x11_presence_penaltyB\x11\n" + + "\x0f_stop_sequencesB\x0e\n" + + "\f_temperature\"\x11\n" + + "\x0fGenerativeDummy\"\xf3\x01\n" + + "\x11GenerativeMistral\x12\x1e\n" + + "\bbase_url\x18\x01 \x01(\tH\x00R\abaseUrl\x88\x01\x01\x12\"\n" + + "\n" + + "max_tokens\x18\x02 \x01(\x03H\x01R\tmaxTokens\x88\x01\x01\x12\x19\n" + + "\x05model\x18\x03 \x01(\tH\x02R\x05model\x88\x01\x01\x12%\n" + + "\vtemperature\x18\x04 \x01(\x01H\x03R\vtemperature\x88\x01\x01\x12\x18\n" + + "\x05top_p\x18\x05 \x01(\x01H\x04R\x04topP\x88\x01\x01B\v\n" + + "\t_base_urlB\r\n" + + "\v_max_tokensB\b\n" + + "\x06_modelB\x0e\n" + + "\f_temperatureB\b\n" + + "\x06_top_p\"\xc4\x02\n" + + "\x10GenerativeOllama\x12&\n" + + "\fapi_endpoint\x18\x01 \x01(\tH\x00R\vapiEndpoint\x88\x01\x01\x12\x19\n" + + "\x05model\x18\x02 \x01(\tH\x01R\x05model\x88\x01\x01\x12%\n" + + "\vtemperature\x18\x03 \x01(\x01H\x02R\vtemperature\x88\x01\x01\x123\n" + + "\x06images\x18\x04 \x01(\v2\x16.weaviate.v1.TextArrayH\x03R\x06images\x88\x01\x01\x12F\n" + + "\x10image_properties\x18\x05 \x01(\v2\x16.weaviate.v1.TextArrayH\x04R\x0fimageProperties\x88\x01\x01B\x0f\n" + + "\r_api_endpointB\b\n" + + "\x06_modelB\x0e\n" + + "\f_temperatureB\t\n" + + "\a_imagesB\x13\n" + + "\x11_image_properties\"\xca\x06\n" + + "\x10GenerativeOpenAI\x120\n" + + "\x11frequency_penalty\x18\x01 \x01(\x01H\x00R\x10frequencyPenalty\x88\x01\x01\x12\"\n" + + "\n" + + "max_tokens\x18\x02 \x01(\x03H\x01R\tmaxTokens\x88\x01\x01\x12\x19\n" + + "\x05model\x18\x03 \x01(\tH\x02R\x05model\x88\x01\x01\x12\x11\n" + + "\x01n\x18\x04 \x01(\x03H\x03R\x01n\x88\x01\x01\x12.\n" + + "\x10presence_penalty\x18\x05 \x01(\x01H\x04R\x0fpresencePenalty\x88\x01\x01\x12/\n" + + "\x04stop\x18\x06 \x01(\v2\x16.weaviate.v1.TextArrayH\x05R\x04stop\x88\x01\x01\x12%\n" + + "\vtemperature\x18\a \x01(\x01H\x06R\vtemperature\x88\x01\x01\x12\x18\n" + + "\x05top_p\x18\b \x01(\x01H\aR\x04topP\x88\x01\x01\x12\x1e\n" + + "\bbase_url\x18\t \x01(\tH\bR\abaseUrl\x88\x01\x01\x12$\n" + + "\vapi_version\x18\n" + + " \x01(\tH\tR\n" + + "apiVersion\x88\x01\x01\x12(\n" + + "\rresource_name\x18\v \x01(\tH\n" + + "R\fresourceName\x88\x01\x01\x12(\n" + + "\rdeployment_id\x18\f \x01(\tH\vR\fdeploymentId\x88\x01\x01\x12\x1e\n" + + "\bis_azure\x18\r \x01(\bH\fR\aisAzure\x88\x01\x01\x123\n" + + "\x06images\x18\x0e \x01(\v2\x16.weaviate.v1.TextArrayH\rR\x06images\x88\x01\x01\x12F\n" + + "\x10image_properties\x18\x0f \x01(\v2\x16.weaviate.v1.TextArrayH\x0eR\x0fimageProperties\x88\x01\x01B\x14\n" + + "\x12_frequency_penaltyB\r\n" + + "\v_max_tokensB\b\n" + + "\x06_modelB\x04\n" + + "\x02_nB\x13\n" + + "\x11_presence_penaltyB\a\n" + + "\x05_stopB\x0e\n" + + "\f_temperatureB\b\n" + + "\x06_top_pB\v\n" + + "\t_base_urlB\x0e\n" + + "\f_api_versionB\x10\n" + + "\x0e_resource_nameB\x10\n" + + "\x0e_deployment_idB\v\n" + + "\t_is_azureB\t\n" + + "\a_imagesB\x13\n" + + "\x11_image_properties\"\xb4\x06\n" + + "\x10GenerativeGoogle\x120\n" + + "\x11frequency_penalty\x18\x01 \x01(\x01H\x00R\x10frequencyPenalty\x88\x01\x01\x12\"\n" + + "\n" + + "max_tokens\x18\x02 \x01(\x03H\x01R\tmaxTokens\x88\x01\x01\x12\x19\n" + + "\x05model\x18\x03 \x01(\tH\x02R\x05model\x88\x01\x01\x12.\n" + + "\x10presence_penalty\x18\x04 \x01(\x01H\x03R\x0fpresencePenalty\x88\x01\x01\x12%\n" + + "\vtemperature\x18\x05 \x01(\x01H\x04R\vtemperature\x88\x01\x01\x12\x18\n" + + "\x05top_k\x18\x06 \x01(\x03H\x05R\x04topK\x88\x01\x01\x12\x18\n" + + "\x05top_p\x18\a \x01(\x01H\x06R\x04topP\x88\x01\x01\x12B\n" + + "\x0estop_sequences\x18\b \x01(\v2\x16.weaviate.v1.TextArrayH\aR\rstopSequences\x88\x01\x01\x12&\n" + + "\fapi_endpoint\x18\t \x01(\tH\bR\vapiEndpoint\x88\x01\x01\x12\"\n" + + "\n" + + "project_id\x18\n" + + " \x01(\tH\tR\tprojectId\x88\x01\x01\x12$\n" + + "\vendpoint_id\x18\v \x01(\tH\n" + + "R\n" + + "endpointId\x88\x01\x01\x12\x1b\n" + + "\x06region\x18\f \x01(\tH\vR\x06region\x88\x01\x01\x123\n" + + "\x06images\x18\r \x01(\v2\x16.weaviate.v1.TextArrayH\fR\x06images\x88\x01\x01\x12F\n" + + "\x10image_properties\x18\x0e \x01(\v2\x16.weaviate.v1.TextArrayH\rR\x0fimageProperties\x88\x01\x01B\x14\n" + + "\x12_frequency_penaltyB\r\n" + + "\v_max_tokensB\b\n" + + "\x06_modelB\x13\n" + + "\x11_presence_penaltyB\x0e\n" + + "\f_temperatureB\b\n" + + "\x06_top_kB\b\n" + + "\x06_top_pB\x11\n" + + "\x0f_stop_sequencesB\x0f\n" + + "\r_api_endpointB\r\n" + + "\v_project_idB\x0e\n" + + "\f_endpoint_idB\t\n" + + "\a_regionB\t\n" + + "\a_imagesB\x13\n" + + "\x11_image_properties\"\xc2\x04\n" + + "\x14GenerativeDatabricks\x12\x1f\n" + + "\bendpoint\x18\x01 \x01(\tH\x00R\bendpoint\x88\x01\x01\x12\x19\n" + + "\x05model\x18\x02 \x01(\tH\x01R\x05model\x88\x01\x01\x120\n" + + "\x11frequency_penalty\x18\x03 \x01(\x01H\x02R\x10frequencyPenalty\x88\x01\x01\x12 \n" + + "\tlog_probs\x18\x04 \x01(\bH\x03R\blogProbs\x88\x01\x01\x12'\n" + + "\rtop_log_probs\x18\x05 \x01(\x03H\x04R\vtopLogProbs\x88\x01\x01\x12\"\n" + + "\n" + + "max_tokens\x18\x06 \x01(\x03H\x05R\tmaxTokens\x88\x01\x01\x12\x11\n" + + "\x01n\x18\a \x01(\x03H\x06R\x01n\x88\x01\x01\x12.\n" + + "\x10presence_penalty\x18\b \x01(\x01H\aR\x0fpresencePenalty\x88\x01\x01\x12/\n" + + "\x04stop\x18\t \x01(\v2\x16.weaviate.v1.TextArrayH\bR\x04stop\x88\x01\x01\x12%\n" + + "\vtemperature\x18\n" + + " \x01(\x01H\tR\vtemperature\x88\x01\x01\x12\x18\n" + + "\x05top_p\x18\v \x01(\x01H\n" + + "R\x04topP\x88\x01\x01B\v\n" + + "\t_endpointB\b\n" + + "\x06_modelB\x14\n" + + "\x12_frequency_penaltyB\f\n" + + "\n" + + "_log_probsB\x10\n" + + "\x0e_top_log_probsB\r\n" + + "\v_max_tokensB\x04\n" + + "\x02_nB\x13\n" + + "\x11_presence_penaltyB\a\n" + + "\x05_stopB\x0e\n" + + "\f_temperatureB\b\n" + + "\x06_top_p\"\x8f\x02\n" + + "\x14GenerativeFriendliAI\x12\x1e\n" + + "\bbase_url\x18\x01 \x01(\tH\x00R\abaseUrl\x88\x01\x01\x12\x19\n" + + "\x05model\x18\x02 \x01(\tH\x01R\x05model\x88\x01\x01\x12\"\n" + + "\n" + + "max_tokens\x18\x03 \x01(\x03H\x02R\tmaxTokens\x88\x01\x01\x12%\n" + + "\vtemperature\x18\x04 \x01(\x01H\x03R\vtemperature\x88\x01\x01\x12\x11\n" + + "\x01n\x18\x05 \x01(\x03H\x04R\x01n\x88\x01\x01\x12\x18\n" + + "\x05top_p\x18\x06 \x01(\x01H\x05R\x04topP\x88\x01\x01B\v\n" + + "\t_base_urlB\b\n" + + "\x06_modelB\r\n" + + "\v_max_tokensB\x0e\n" + + "\f_temperatureB\x04\n" + + "\x02_nB\b\n" + + "\x06_top_p\"\xf2\x01\n" + + "\x10GenerativeNvidia\x12\x1e\n" + + "\bbase_url\x18\x01 \x01(\tH\x00R\abaseUrl\x88\x01\x01\x12\x19\n" + + "\x05model\x18\x02 \x01(\tH\x01R\x05model\x88\x01\x01\x12%\n" + + "\vtemperature\x18\x03 \x01(\x01H\x02R\vtemperature\x88\x01\x01\x12\x18\n" + + "\x05top_p\x18\x04 \x01(\x01H\x03R\x04topP\x88\x01\x01\x12\"\n" + + "\n" + + "max_tokens\x18\x05 \x01(\x03H\x04R\tmaxTokens\x88\x01\x01B\v\n" + + "\t_base_urlB\b\n" + + "\x06_modelB\x0e\n" + + "\f_temperatureB\b\n" + + "\x06_top_pB\r\n" + + "\v_max_tokens\"\x8c\x03\n" + + "\rGenerativeXAI\x12\x1e\n" + + "\bbase_url\x18\x01 \x01(\tH\x00R\abaseUrl\x88\x01\x01\x12\x19\n" + + "\x05model\x18\x02 \x01(\tH\x01R\x05model\x88\x01\x01\x12%\n" + + "\vtemperature\x18\x03 \x01(\x01H\x02R\vtemperature\x88\x01\x01\x12\x18\n" + + "\x05top_p\x18\x04 \x01(\x01H\x03R\x04topP\x88\x01\x01\x12\"\n" + + "\n" + + "max_tokens\x18\x05 \x01(\x03H\x04R\tmaxTokens\x88\x01\x01\x123\n" + + "\x06images\x18\x06 \x01(\v2\x16.weaviate.v1.TextArrayH\x05R\x06images\x88\x01\x01\x12F\n" + + "\x10image_properties\x18\a \x01(\v2\x16.weaviate.v1.TextArrayH\x06R\x0fimageProperties\x88\x01\x01B\v\n" + + "\t_base_urlB\b\n" + + "\x06_modelB\x0e\n" + + "\f_temperatureB\b\n" + + "\x06_top_pB\r\n" + + "\v_max_tokensB\t\n" + + "\a_imagesB\x13\n" + + "\x11_image_properties\"\xb4\x01\n" + + "\x1bGenerativeAnthropicMetadata\x12D\n" + + "\x05usage\x18\x01 \x01(\v2..weaviate.v1.GenerativeAnthropicMetadata.UsageR\x05usage\x1aO\n" + + "\x05Usage\x12!\n" + + "\finput_tokens\x18\x01 \x01(\x03R\vinputTokens\x12#\n" + + "\routput_tokens\x18\x02 \x01(\x03R\foutputTokens\"\x1c\n" + + "\x1aGenerativeAnyscaleMetadata\"\x17\n" + + "\x15GenerativeAWSMetadata\"\xc2\a\n" + + "\x18GenerativeCohereMetadata\x12V\n" + + "\vapi_version\x18\x01 \x01(\v20.weaviate.v1.GenerativeCohereMetadata.ApiVersionH\x00R\n" + + "apiVersion\x88\x01\x01\x12Y\n" + + "\fbilled_units\x18\x02 \x01(\v21.weaviate.v1.GenerativeCohereMetadata.BilledUnitsH\x01R\vbilledUnits\x88\x01\x01\x12I\n" + + "\x06tokens\x18\x03 \x01(\v2,.weaviate.v1.GenerativeCohereMetadata.TokensH\x02R\x06tokens\x88\x01\x01\x127\n" + + "\bwarnings\x18\x04 \x01(\v2\x16.weaviate.v1.TextArrayH\x03R\bwarnings\x88\x01\x01\x1a\xb5\x01\n" + + "\n" + + "ApiVersion\x12\x1d\n" + + "\aversion\x18\x01 \x01(\tH\x00R\aversion\x88\x01\x01\x12(\n" + + "\ris_deprecated\x18\x02 \x01(\bH\x01R\fisDeprecated\x88\x01\x01\x12,\n" + + "\x0fis_experimental\x18\x03 \x01(\bH\x02R\x0eisExperimental\x88\x01\x01B\n" + + "\n" + + "\b_versionB\x10\n" + + "\x0e_is_deprecatedB\x12\n" + + "\x10_is_experimental\x1a\xfe\x01\n" + + "\vBilledUnits\x12&\n" + + "\finput_tokens\x18\x01 \x01(\x01H\x00R\vinputTokens\x88\x01\x01\x12(\n" + + "\routput_tokens\x18\x02 \x01(\x01H\x01R\foutputTokens\x88\x01\x01\x12&\n" + + "\fsearch_units\x18\x03 \x01(\x01H\x02R\vsearchUnits\x88\x01\x01\x12-\n" + + "\x0fclassifications\x18\x04 \x01(\x01H\x03R\x0fclassifications\x88\x01\x01B\x0f\n" + + "\r_input_tokensB\x10\n" + + "\x0e_output_tokensB\x0f\n" + + "\r_search_unitsB\x12\n" + + "\x10_classifications\x1a}\n" + + "\x06Tokens\x12&\n" + + "\finput_tokens\x18\x01 \x01(\x01H\x00R\vinputTokens\x88\x01\x01\x12(\n" + + "\routput_tokens\x18\x02 \x01(\x01H\x01R\foutputTokens\x88\x01\x01B\x0f\n" + + "\r_input_tokensB\x10\n" + + "\x0e_output_tokensB\x0e\n" + + "\f_api_versionB\x0f\n" + + "\r_billed_unitsB\t\n" + + "\a_tokensB\v\n" + + "\t_warnings\"\x19\n" + + "\x17GenerativeDummyMetadata\"\xb5\x02\n" + + "\x19GenerativeMistralMetadata\x12G\n" + + "\x05usage\x18\x01 \x01(\v2,.weaviate.v1.GenerativeMistralMetadata.UsageH\x00R\x05usage\x88\x01\x01\x1a\xc4\x01\n" + + "\x05Usage\x12(\n" + + "\rprompt_tokens\x18\x01 \x01(\x03H\x00R\fpromptTokens\x88\x01\x01\x120\n" + + "\x11completion_tokens\x18\x02 \x01(\x03H\x01R\x10completionTokens\x88\x01\x01\x12&\n" + + "\ftotal_tokens\x18\x03 \x01(\x03H\x02R\vtotalTokens\x88\x01\x01B\x10\n" + + "\x0e_prompt_tokensB\x14\n" + + "\x12_completion_tokensB\x0f\n" + + "\r_total_tokensB\b\n" + + "\x06_usage\"\x1a\n" + + "\x18GenerativeOllamaMetadata\"\xb3\x02\n" + + "\x18GenerativeOpenAIMetadata\x12F\n" + + "\x05usage\x18\x01 \x01(\v2+.weaviate.v1.GenerativeOpenAIMetadata.UsageH\x00R\x05usage\x88\x01\x01\x1a\xc4\x01\n" + + "\x05Usage\x12(\n" + + "\rprompt_tokens\x18\x01 \x01(\x03H\x00R\fpromptTokens\x88\x01\x01\x120\n" + + "\x11completion_tokens\x18\x02 \x01(\x03H\x01R\x10completionTokens\x88\x01\x01\x12&\n" + + "\ftotal_tokens\x18\x03 \x01(\x03H\x02R\vtotalTokens\x88\x01\x01B\x10\n" + + "\x0e_prompt_tokensB\x14\n" + + "\x12_completion_tokensB\x0f\n" + + "\r_total_tokensB\b\n" + + "\x06_usage\"\x93\b\n" + + "\x18GenerativeGoogleMetadata\x12O\n" + + "\bmetadata\x18\x01 \x01(\v2..weaviate.v1.GenerativeGoogleMetadata.MetadataH\x00R\bmetadata\x88\x01\x01\x12_\n" + + "\x0eusage_metadata\x18\x02 \x01(\v23.weaviate.v1.GenerativeGoogleMetadata.UsageMetadataH\x01R\rusageMetadata\x88\x01\x01\x1a\xa4\x01\n" + + "\n" + + "TokenCount\x12?\n" + + "\x19total_billable_characters\x18\x01 \x01(\x03H\x00R\x17totalBillableCharacters\x88\x01\x01\x12&\n" + + "\ftotal_tokens\x18\x02 \x01(\x03H\x01R\vtotalTokens\x88\x01\x01B\x1c\n" + + "\x1a_total_billable_charactersB\x0f\n" + + "\r_total_tokens\x1a\x84\x02\n" + + "\rTokenMetadata\x12a\n" + + "\x11input_token_count\x18\x01 \x01(\v20.weaviate.v1.GenerativeGoogleMetadata.TokenCountH\x00R\x0finputTokenCount\x88\x01\x01\x12c\n" + + "\x12output_token_count\x18\x02 \x01(\v20.weaviate.v1.GenerativeGoogleMetadata.TokenCountH\x01R\x10outputTokenCount\x88\x01\x01B\x14\n" + + "\x12_input_token_countB\x15\n" + + "\x13_output_token_count\x1a~\n" + + "\bMetadata\x12_\n" + + "\x0etoken_metadata\x18\x01 \x01(\v23.weaviate.v1.GenerativeGoogleMetadata.TokenMetadataH\x00R\rtokenMetadata\x88\x01\x01B\x11\n" + + "\x0f_token_metadata\x1a\xf6\x01\n" + + "\rUsageMetadata\x121\n" + + "\x12prompt_token_count\x18\x01 \x01(\x03H\x00R\x10promptTokenCount\x88\x01\x01\x129\n" + + "\x16candidates_token_count\x18\x02 \x01(\x03H\x01R\x14candidatesTokenCount\x88\x01\x01\x12/\n" + + "\x11total_token_count\x18\x03 \x01(\x03H\x02R\x0ftotalTokenCount\x88\x01\x01B\x15\n" + + "\x13_prompt_token_countB\x19\n" + + "\x17_candidates_token_countB\x14\n" + + "\x12_total_token_countB\v\n" + + "\t_metadataB\x11\n" + + "\x0f_usage_metadata\"\xbb\x02\n" + + "\x1cGenerativeDatabricksMetadata\x12J\n" + + "\x05usage\x18\x01 \x01(\v2/.weaviate.v1.GenerativeDatabricksMetadata.UsageH\x00R\x05usage\x88\x01\x01\x1a\xc4\x01\n" + + "\x05Usage\x12(\n" + + "\rprompt_tokens\x18\x01 \x01(\x03H\x00R\fpromptTokens\x88\x01\x01\x120\n" + + "\x11completion_tokens\x18\x02 \x01(\x03H\x01R\x10completionTokens\x88\x01\x01\x12&\n" + + "\ftotal_tokens\x18\x03 \x01(\x03H\x02R\vtotalTokens\x88\x01\x01B\x10\n" + + "\x0e_prompt_tokensB\x14\n" + + "\x12_completion_tokensB\x0f\n" + + "\r_total_tokensB\b\n" + + "\x06_usage\"\xbb\x02\n" + + "\x1cGenerativeFriendliAIMetadata\x12J\n" + + "\x05usage\x18\x01 \x01(\v2/.weaviate.v1.GenerativeFriendliAIMetadata.UsageH\x00R\x05usage\x88\x01\x01\x1a\xc4\x01\n" + + "\x05Usage\x12(\n" + + "\rprompt_tokens\x18\x01 \x01(\x03H\x00R\fpromptTokens\x88\x01\x01\x120\n" + + "\x11completion_tokens\x18\x02 \x01(\x03H\x01R\x10completionTokens\x88\x01\x01\x12&\n" + + "\ftotal_tokens\x18\x03 \x01(\x03H\x02R\vtotalTokens\x88\x01\x01B\x10\n" + + "\x0e_prompt_tokensB\x14\n" + + "\x12_completion_tokensB\x0f\n" + + "\r_total_tokensB\b\n" + + "\x06_usage\"\xb3\x02\n" + + "\x18GenerativeNvidiaMetadata\x12F\n" + + "\x05usage\x18\x01 \x01(\v2+.weaviate.v1.GenerativeNvidiaMetadata.UsageH\x00R\x05usage\x88\x01\x01\x1a\xc4\x01\n" + + "\x05Usage\x12(\n" + + "\rprompt_tokens\x18\x01 \x01(\x03H\x00R\fpromptTokens\x88\x01\x01\x120\n" + + "\x11completion_tokens\x18\x02 \x01(\x03H\x01R\x10completionTokens\x88\x01\x01\x12&\n" + + "\ftotal_tokens\x18\x03 \x01(\x03H\x02R\vtotalTokens\x88\x01\x01B\x10\n" + + "\x0e_prompt_tokensB\x14\n" + + "\x12_completion_tokensB\x0f\n" + + "\r_total_tokensB\b\n" + + "\x06_usage\"\xad\x02\n" + + "\x15GenerativeXAIMetadata\x12C\n" + + "\x05usage\x18\x01 \x01(\v2(.weaviate.v1.GenerativeXAIMetadata.UsageH\x00R\x05usage\x88\x01\x01\x1a\xc4\x01\n" + + "\x05Usage\x12(\n" + + "\rprompt_tokens\x18\x01 \x01(\x03H\x00R\fpromptTokens\x88\x01\x01\x120\n" + + "\x11completion_tokens\x18\x02 \x01(\x03H\x01R\x10completionTokens\x88\x01\x01\x12&\n" + + "\ftotal_tokens\x18\x03 \x01(\x03H\x02R\vtotalTokens\x88\x01\x01B\x10\n" + + "\x0e_prompt_tokensB\x14\n" + + "\x12_completion_tokensB\x0f\n" + + "\r_total_tokensB\b\n" + + "\x06_usage\"\xfe\x06\n" + + "\x12GenerativeMetadata\x12H\n" + + "\tanthropic\x18\x01 \x01(\v2(.weaviate.v1.GenerativeAnthropicMetadataH\x00R\tanthropic\x12E\n" + + "\banyscale\x18\x02 \x01(\v2'.weaviate.v1.GenerativeAnyscaleMetadataH\x00R\banyscale\x126\n" + + "\x03aws\x18\x03 \x01(\v2\".weaviate.v1.GenerativeAWSMetadataH\x00R\x03aws\x12?\n" + + "\x06cohere\x18\x04 \x01(\v2%.weaviate.v1.GenerativeCohereMetadataH\x00R\x06cohere\x12<\n" + + "\x05dummy\x18\x05 \x01(\v2$.weaviate.v1.GenerativeDummyMetadataH\x00R\x05dummy\x12B\n" + + "\amistral\x18\x06 \x01(\v2&.weaviate.v1.GenerativeMistralMetadataH\x00R\amistral\x12?\n" + + "\x06ollama\x18\a \x01(\v2%.weaviate.v1.GenerativeOllamaMetadataH\x00R\x06ollama\x12?\n" + + "\x06openai\x18\b \x01(\v2%.weaviate.v1.GenerativeOpenAIMetadataH\x00R\x06openai\x12?\n" + + "\x06google\x18\t \x01(\v2%.weaviate.v1.GenerativeGoogleMetadataH\x00R\x06google\x12K\n" + + "\n" + + "databricks\x18\n" + + " \x01(\v2).weaviate.v1.GenerativeDatabricksMetadataH\x00R\n" + + "databricks\x12K\n" + + "\n" + + "friendliai\x18\v \x01(\v2).weaviate.v1.GenerativeFriendliAIMetadataH\x00R\n" + + "friendliai\x12?\n" + + "\x06nvidia\x18\f \x01(\v2%.weaviate.v1.GenerativeNvidiaMetadataH\x00R\x06nvidia\x126\n" + + "\x03xai\x18\r \x01(\v2\".weaviate.v1.GenerativeXAIMetadataH\x00R\x03xaiB\x06\n" + + "\x04kind\"\xbb\x01\n" + + "\x0fGenerativeReply\x12\x16\n" + + "\x06result\x18\x01 \x01(\tR\x06result\x127\n" + + "\x05debug\x18\x02 \x01(\v2\x1c.weaviate.v1.GenerativeDebugH\x00R\x05debug\x88\x01\x01\x12@\n" + + "\bmetadata\x18\x03 \x01(\v2\x1f.weaviate.v1.GenerativeMetadataH\x01R\bmetadata\x88\x01\x01B\b\n" + + "\x06_debugB\v\n" + + "\t_metadata\"H\n" + + "\x10GenerativeResult\x124\n" + + "\x06values\x18\x01 \x03(\v2\x1c.weaviate.v1.GenerativeReplyR\x06values\"G\n" + + "\x0fGenerativeDebug\x12$\n" + + "\vfull_prompt\x18\x01 \x01(\tH\x00R\n" + + "fullPrompt\x88\x01\x01B\x0e\n" + + "\f_full_promptBt\n" + + "#io.weaviate.client.grpc.protocol.v1B\x17WeaviateProtoGenerativeZ4github.com/weaviate/weaviate/grpc/generated;protocolb\x06proto3" + +var ( + file_v1_generative_proto_rawDescOnce sync.Once + file_v1_generative_proto_rawDescData []byte +) + +func file_v1_generative_proto_rawDescGZIP() []byte { + file_v1_generative_proto_rawDescOnce.Do(func() { + file_v1_generative_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_v1_generative_proto_rawDesc), len(file_v1_generative_proto_rawDesc))) + }) + return file_v1_generative_proto_rawDescData +} + +var file_v1_generative_proto_msgTypes = make([]protoimpl.MessageInfo, 48) +var file_v1_generative_proto_goTypes = []any{ + (*GenerativeSearch)(nil), // 0: weaviate.v1.GenerativeSearch + (*GenerativeProvider)(nil), // 1: weaviate.v1.GenerativeProvider + (*GenerativeAnthropic)(nil), // 2: weaviate.v1.GenerativeAnthropic + (*GenerativeAnyscale)(nil), // 3: weaviate.v1.GenerativeAnyscale + (*GenerativeAWS)(nil), // 4: weaviate.v1.GenerativeAWS + (*GenerativeCohere)(nil), // 5: weaviate.v1.GenerativeCohere + (*GenerativeDummy)(nil), // 6: weaviate.v1.GenerativeDummy + (*GenerativeMistral)(nil), // 7: weaviate.v1.GenerativeMistral + (*GenerativeOllama)(nil), // 8: weaviate.v1.GenerativeOllama + (*GenerativeOpenAI)(nil), // 9: weaviate.v1.GenerativeOpenAI + (*GenerativeGoogle)(nil), // 10: weaviate.v1.GenerativeGoogle + (*GenerativeDatabricks)(nil), // 11: weaviate.v1.GenerativeDatabricks + (*GenerativeFriendliAI)(nil), // 12: weaviate.v1.GenerativeFriendliAI + (*GenerativeNvidia)(nil), // 13: weaviate.v1.GenerativeNvidia + (*GenerativeXAI)(nil), // 14: weaviate.v1.GenerativeXAI + (*GenerativeAnthropicMetadata)(nil), // 15: weaviate.v1.GenerativeAnthropicMetadata + (*GenerativeAnyscaleMetadata)(nil), // 16: weaviate.v1.GenerativeAnyscaleMetadata + (*GenerativeAWSMetadata)(nil), // 17: weaviate.v1.GenerativeAWSMetadata + (*GenerativeCohereMetadata)(nil), // 18: weaviate.v1.GenerativeCohereMetadata + (*GenerativeDummyMetadata)(nil), // 19: weaviate.v1.GenerativeDummyMetadata + (*GenerativeMistralMetadata)(nil), // 20: weaviate.v1.GenerativeMistralMetadata + (*GenerativeOllamaMetadata)(nil), // 21: weaviate.v1.GenerativeOllamaMetadata + (*GenerativeOpenAIMetadata)(nil), // 22: weaviate.v1.GenerativeOpenAIMetadata + (*GenerativeGoogleMetadata)(nil), // 23: weaviate.v1.GenerativeGoogleMetadata + (*GenerativeDatabricksMetadata)(nil), // 24: weaviate.v1.GenerativeDatabricksMetadata + (*GenerativeFriendliAIMetadata)(nil), // 25: weaviate.v1.GenerativeFriendliAIMetadata + (*GenerativeNvidiaMetadata)(nil), // 26: weaviate.v1.GenerativeNvidiaMetadata + (*GenerativeXAIMetadata)(nil), // 27: weaviate.v1.GenerativeXAIMetadata + (*GenerativeMetadata)(nil), // 28: weaviate.v1.GenerativeMetadata + (*GenerativeReply)(nil), // 29: weaviate.v1.GenerativeReply + (*GenerativeResult)(nil), // 30: weaviate.v1.GenerativeResult + (*GenerativeDebug)(nil), // 31: weaviate.v1.GenerativeDebug + (*GenerativeSearch_Single)(nil), // 32: weaviate.v1.GenerativeSearch.Single + (*GenerativeSearch_Grouped)(nil), // 33: weaviate.v1.GenerativeSearch.Grouped + (*GenerativeAnthropicMetadata_Usage)(nil), // 34: weaviate.v1.GenerativeAnthropicMetadata.Usage + (*GenerativeCohereMetadata_ApiVersion)(nil), // 35: weaviate.v1.GenerativeCohereMetadata.ApiVersion + (*GenerativeCohereMetadata_BilledUnits)(nil), // 36: weaviate.v1.GenerativeCohereMetadata.BilledUnits + (*GenerativeCohereMetadata_Tokens)(nil), // 37: weaviate.v1.GenerativeCohereMetadata.Tokens + (*GenerativeMistralMetadata_Usage)(nil), // 38: weaviate.v1.GenerativeMistralMetadata.Usage + (*GenerativeOpenAIMetadata_Usage)(nil), // 39: weaviate.v1.GenerativeOpenAIMetadata.Usage + (*GenerativeGoogleMetadata_TokenCount)(nil), // 40: weaviate.v1.GenerativeGoogleMetadata.TokenCount + (*GenerativeGoogleMetadata_TokenMetadata)(nil), // 41: weaviate.v1.GenerativeGoogleMetadata.TokenMetadata + (*GenerativeGoogleMetadata_Metadata)(nil), // 42: weaviate.v1.GenerativeGoogleMetadata.Metadata + (*GenerativeGoogleMetadata_UsageMetadata)(nil), // 43: weaviate.v1.GenerativeGoogleMetadata.UsageMetadata + (*GenerativeDatabricksMetadata_Usage)(nil), // 44: weaviate.v1.GenerativeDatabricksMetadata.Usage + (*GenerativeFriendliAIMetadata_Usage)(nil), // 45: weaviate.v1.GenerativeFriendliAIMetadata.Usage + (*GenerativeNvidiaMetadata_Usage)(nil), // 46: weaviate.v1.GenerativeNvidiaMetadata.Usage + (*GenerativeXAIMetadata_Usage)(nil), // 47: weaviate.v1.GenerativeXAIMetadata.Usage + (*TextArray)(nil), // 48: weaviate.v1.TextArray +} +var file_v1_generative_proto_depIdxs = []int32{ + 32, // 0: weaviate.v1.GenerativeSearch.single:type_name -> weaviate.v1.GenerativeSearch.Single + 33, // 1: weaviate.v1.GenerativeSearch.grouped:type_name -> weaviate.v1.GenerativeSearch.Grouped + 2, // 2: weaviate.v1.GenerativeProvider.anthropic:type_name -> weaviate.v1.GenerativeAnthropic + 3, // 3: weaviate.v1.GenerativeProvider.anyscale:type_name -> weaviate.v1.GenerativeAnyscale + 4, // 4: weaviate.v1.GenerativeProvider.aws:type_name -> weaviate.v1.GenerativeAWS + 5, // 5: weaviate.v1.GenerativeProvider.cohere:type_name -> weaviate.v1.GenerativeCohere + 6, // 6: weaviate.v1.GenerativeProvider.dummy:type_name -> weaviate.v1.GenerativeDummy + 7, // 7: weaviate.v1.GenerativeProvider.mistral:type_name -> weaviate.v1.GenerativeMistral + 8, // 8: weaviate.v1.GenerativeProvider.ollama:type_name -> weaviate.v1.GenerativeOllama + 9, // 9: weaviate.v1.GenerativeProvider.openai:type_name -> weaviate.v1.GenerativeOpenAI + 10, // 10: weaviate.v1.GenerativeProvider.google:type_name -> weaviate.v1.GenerativeGoogle + 11, // 11: weaviate.v1.GenerativeProvider.databricks:type_name -> weaviate.v1.GenerativeDatabricks + 12, // 12: weaviate.v1.GenerativeProvider.friendliai:type_name -> weaviate.v1.GenerativeFriendliAI + 13, // 13: weaviate.v1.GenerativeProvider.nvidia:type_name -> weaviate.v1.GenerativeNvidia + 14, // 14: weaviate.v1.GenerativeProvider.xai:type_name -> weaviate.v1.GenerativeXAI + 48, // 15: weaviate.v1.GenerativeAnthropic.stop_sequences:type_name -> weaviate.v1.TextArray + 48, // 16: weaviate.v1.GenerativeAnthropic.images:type_name -> weaviate.v1.TextArray + 48, // 17: weaviate.v1.GenerativeAnthropic.image_properties:type_name -> weaviate.v1.TextArray + 48, // 18: weaviate.v1.GenerativeAWS.images:type_name -> weaviate.v1.TextArray + 48, // 19: weaviate.v1.GenerativeAWS.image_properties:type_name -> weaviate.v1.TextArray + 48, // 20: weaviate.v1.GenerativeCohere.stop_sequences:type_name -> weaviate.v1.TextArray + 48, // 21: weaviate.v1.GenerativeOllama.images:type_name -> weaviate.v1.TextArray + 48, // 22: weaviate.v1.GenerativeOllama.image_properties:type_name -> weaviate.v1.TextArray + 48, // 23: weaviate.v1.GenerativeOpenAI.stop:type_name -> weaviate.v1.TextArray + 48, // 24: weaviate.v1.GenerativeOpenAI.images:type_name -> weaviate.v1.TextArray + 48, // 25: weaviate.v1.GenerativeOpenAI.image_properties:type_name -> weaviate.v1.TextArray + 48, // 26: weaviate.v1.GenerativeGoogle.stop_sequences:type_name -> weaviate.v1.TextArray + 48, // 27: weaviate.v1.GenerativeGoogle.images:type_name -> weaviate.v1.TextArray + 48, // 28: weaviate.v1.GenerativeGoogle.image_properties:type_name -> weaviate.v1.TextArray + 48, // 29: weaviate.v1.GenerativeDatabricks.stop:type_name -> weaviate.v1.TextArray + 48, // 30: weaviate.v1.GenerativeXAI.images:type_name -> weaviate.v1.TextArray + 48, // 31: weaviate.v1.GenerativeXAI.image_properties:type_name -> weaviate.v1.TextArray + 34, // 32: weaviate.v1.GenerativeAnthropicMetadata.usage:type_name -> weaviate.v1.GenerativeAnthropicMetadata.Usage + 35, // 33: weaviate.v1.GenerativeCohereMetadata.api_version:type_name -> weaviate.v1.GenerativeCohereMetadata.ApiVersion + 36, // 34: weaviate.v1.GenerativeCohereMetadata.billed_units:type_name -> weaviate.v1.GenerativeCohereMetadata.BilledUnits + 37, // 35: weaviate.v1.GenerativeCohereMetadata.tokens:type_name -> weaviate.v1.GenerativeCohereMetadata.Tokens + 48, // 36: weaviate.v1.GenerativeCohereMetadata.warnings:type_name -> weaviate.v1.TextArray + 38, // 37: weaviate.v1.GenerativeMistralMetadata.usage:type_name -> weaviate.v1.GenerativeMistralMetadata.Usage + 39, // 38: weaviate.v1.GenerativeOpenAIMetadata.usage:type_name -> weaviate.v1.GenerativeOpenAIMetadata.Usage + 42, // 39: weaviate.v1.GenerativeGoogleMetadata.metadata:type_name -> weaviate.v1.GenerativeGoogleMetadata.Metadata + 43, // 40: weaviate.v1.GenerativeGoogleMetadata.usage_metadata:type_name -> weaviate.v1.GenerativeGoogleMetadata.UsageMetadata + 44, // 41: weaviate.v1.GenerativeDatabricksMetadata.usage:type_name -> weaviate.v1.GenerativeDatabricksMetadata.Usage + 45, // 42: weaviate.v1.GenerativeFriendliAIMetadata.usage:type_name -> weaviate.v1.GenerativeFriendliAIMetadata.Usage + 46, // 43: weaviate.v1.GenerativeNvidiaMetadata.usage:type_name -> weaviate.v1.GenerativeNvidiaMetadata.Usage + 47, // 44: weaviate.v1.GenerativeXAIMetadata.usage:type_name -> weaviate.v1.GenerativeXAIMetadata.Usage + 15, // 45: weaviate.v1.GenerativeMetadata.anthropic:type_name -> weaviate.v1.GenerativeAnthropicMetadata + 16, // 46: weaviate.v1.GenerativeMetadata.anyscale:type_name -> weaviate.v1.GenerativeAnyscaleMetadata + 17, // 47: weaviate.v1.GenerativeMetadata.aws:type_name -> weaviate.v1.GenerativeAWSMetadata + 18, // 48: weaviate.v1.GenerativeMetadata.cohere:type_name -> weaviate.v1.GenerativeCohereMetadata + 19, // 49: weaviate.v1.GenerativeMetadata.dummy:type_name -> weaviate.v1.GenerativeDummyMetadata + 20, // 50: weaviate.v1.GenerativeMetadata.mistral:type_name -> weaviate.v1.GenerativeMistralMetadata + 21, // 51: weaviate.v1.GenerativeMetadata.ollama:type_name -> weaviate.v1.GenerativeOllamaMetadata + 22, // 52: weaviate.v1.GenerativeMetadata.openai:type_name -> weaviate.v1.GenerativeOpenAIMetadata + 23, // 53: weaviate.v1.GenerativeMetadata.google:type_name -> weaviate.v1.GenerativeGoogleMetadata + 24, // 54: weaviate.v1.GenerativeMetadata.databricks:type_name -> weaviate.v1.GenerativeDatabricksMetadata + 25, // 55: weaviate.v1.GenerativeMetadata.friendliai:type_name -> weaviate.v1.GenerativeFriendliAIMetadata + 26, // 56: weaviate.v1.GenerativeMetadata.nvidia:type_name -> weaviate.v1.GenerativeNvidiaMetadata + 27, // 57: weaviate.v1.GenerativeMetadata.xai:type_name -> weaviate.v1.GenerativeXAIMetadata + 31, // 58: weaviate.v1.GenerativeReply.debug:type_name -> weaviate.v1.GenerativeDebug + 28, // 59: weaviate.v1.GenerativeReply.metadata:type_name -> weaviate.v1.GenerativeMetadata + 29, // 60: weaviate.v1.GenerativeResult.values:type_name -> weaviate.v1.GenerativeReply + 1, // 61: weaviate.v1.GenerativeSearch.Single.queries:type_name -> weaviate.v1.GenerativeProvider + 48, // 62: weaviate.v1.GenerativeSearch.Grouped.properties:type_name -> weaviate.v1.TextArray + 1, // 63: weaviate.v1.GenerativeSearch.Grouped.queries:type_name -> weaviate.v1.GenerativeProvider + 40, // 64: weaviate.v1.GenerativeGoogleMetadata.TokenMetadata.input_token_count:type_name -> weaviate.v1.GenerativeGoogleMetadata.TokenCount + 40, // 65: weaviate.v1.GenerativeGoogleMetadata.TokenMetadata.output_token_count:type_name -> weaviate.v1.GenerativeGoogleMetadata.TokenCount + 41, // 66: weaviate.v1.GenerativeGoogleMetadata.Metadata.token_metadata:type_name -> weaviate.v1.GenerativeGoogleMetadata.TokenMetadata + 67, // [67:67] is the sub-list for method output_type + 67, // [67:67] is the sub-list for method input_type + 67, // [67:67] is the sub-list for extension type_name + 67, // [67:67] is the sub-list for extension extendee + 0, // [0:67] is the sub-list for field type_name +} + +func init() { file_v1_generative_proto_init() } +func file_v1_generative_proto_init() { + if File_v1_generative_proto != nil { + return + } + file_v1_base_proto_init() + file_v1_generative_proto_msgTypes[1].OneofWrappers = []any{ + (*GenerativeProvider_Anthropic)(nil), + (*GenerativeProvider_Anyscale)(nil), + (*GenerativeProvider_Aws)(nil), + (*GenerativeProvider_Cohere)(nil), + (*GenerativeProvider_Dummy)(nil), + (*GenerativeProvider_Mistral)(nil), + (*GenerativeProvider_Ollama)(nil), + (*GenerativeProvider_Openai)(nil), + (*GenerativeProvider_Google)(nil), + (*GenerativeProvider_Databricks)(nil), + (*GenerativeProvider_Friendliai)(nil), + (*GenerativeProvider_Nvidia)(nil), + (*GenerativeProvider_Xai)(nil), + } + file_v1_generative_proto_msgTypes[2].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[3].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[4].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[5].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[7].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[8].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[9].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[10].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[11].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[12].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[13].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[14].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[18].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[20].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[22].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[23].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[24].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[25].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[26].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[27].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[28].OneofWrappers = []any{ + (*GenerativeMetadata_Anthropic)(nil), + (*GenerativeMetadata_Anyscale)(nil), + (*GenerativeMetadata_Aws)(nil), + (*GenerativeMetadata_Cohere)(nil), + (*GenerativeMetadata_Dummy)(nil), + (*GenerativeMetadata_Mistral)(nil), + (*GenerativeMetadata_Ollama)(nil), + (*GenerativeMetadata_Openai)(nil), + (*GenerativeMetadata_Google)(nil), + (*GenerativeMetadata_Databricks)(nil), + (*GenerativeMetadata_Friendliai)(nil), + (*GenerativeMetadata_Nvidia)(nil), + (*GenerativeMetadata_Xai)(nil), + } + file_v1_generative_proto_msgTypes[29].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[31].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[33].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[35].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[36].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[37].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[38].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[39].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[40].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[41].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[42].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[43].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[44].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[45].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[46].OneofWrappers = []any{} + file_v1_generative_proto_msgTypes[47].OneofWrappers = []any{} + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: unsafe.Slice(unsafe.StringData(file_v1_generative_proto_rawDesc), len(file_v1_generative_proto_rawDesc)), + NumEnums: 0, + NumMessages: 48, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_v1_generative_proto_goTypes, + DependencyIndexes: file_v1_generative_proto_depIdxs, + MessageInfos: file_v1_generative_proto_msgTypes, + }.Build() + File_v1_generative_proto = out.File + file_v1_generative_proto_goTypes = nil + file_v1_generative_proto_depIdxs = nil +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/properties.pb.go b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/properties.pb.go new file mode 100644 index 0000000000000000000000000000000000000000..302641ba6ded4fa41e2b455ca21fa3cf65e4b557 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/properties.pb.go @@ -0,0 +1,1105 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. + +package protocol + +import ( + reflect "reflect" + sync "sync" + unsafe "unsafe" + + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + structpb "google.golang.org/protobuf/types/known/structpb" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type Properties struct { + state protoimpl.MessageState `protogen:"open.v1"` + Fields map[string]*Value `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *Properties) Reset() { + *x = Properties{} + mi := &file_v1_properties_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *Properties) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Properties) ProtoMessage() {} + +func (x *Properties) ProtoReflect() protoreflect.Message { + mi := &file_v1_properties_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Properties.ProtoReflect.Descriptor instead. +func (*Properties) Descriptor() ([]byte, []int) { + return file_v1_properties_proto_rawDescGZIP(), []int{0} +} + +func (x *Properties) GetFields() map[string]*Value { + if x != nil { + return x.Fields + } + return nil +} + +type Value struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Types that are valid to be assigned to Kind: + // + // *Value_NumberValue + // *Value_BoolValue + // *Value_ObjectValue + // *Value_ListValue + // *Value_DateValue + // *Value_UuidValue + // *Value_IntValue + // *Value_GeoValue + // *Value_BlobValue + // *Value_PhoneValue + // *Value_NullValue + // *Value_TextValue + Kind isValue_Kind `protobuf_oneof:"kind"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *Value) Reset() { + *x = Value{} + mi := &file_v1_properties_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *Value) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value) ProtoMessage() {} + +func (x *Value) ProtoReflect() protoreflect.Message { + mi := &file_v1_properties_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value.ProtoReflect.Descriptor instead. +func (*Value) Descriptor() ([]byte, []int) { + return file_v1_properties_proto_rawDescGZIP(), []int{1} +} + +func (x *Value) GetKind() isValue_Kind { + if x != nil { + return x.Kind + } + return nil +} + +func (x *Value) GetNumberValue() float64 { + if x != nil { + if x, ok := x.Kind.(*Value_NumberValue); ok { + return x.NumberValue + } + } + return 0 +} + +func (x *Value) GetBoolValue() bool { + if x != nil { + if x, ok := x.Kind.(*Value_BoolValue); ok { + return x.BoolValue + } + } + return false +} + +func (x *Value) GetObjectValue() *Properties { + if x != nil { + if x, ok := x.Kind.(*Value_ObjectValue); ok { + return x.ObjectValue + } + } + return nil +} + +func (x *Value) GetListValue() *ListValue { + if x != nil { + if x, ok := x.Kind.(*Value_ListValue); ok { + return x.ListValue + } + } + return nil +} + +func (x *Value) GetDateValue() string { + if x != nil { + if x, ok := x.Kind.(*Value_DateValue); ok { + return x.DateValue + } + } + return "" +} + +func (x *Value) GetUuidValue() string { + if x != nil { + if x, ok := x.Kind.(*Value_UuidValue); ok { + return x.UuidValue + } + } + return "" +} + +func (x *Value) GetIntValue() int64 { + if x != nil { + if x, ok := x.Kind.(*Value_IntValue); ok { + return x.IntValue + } + } + return 0 +} + +func (x *Value) GetGeoValue() *GeoCoordinate { + if x != nil { + if x, ok := x.Kind.(*Value_GeoValue); ok { + return x.GeoValue + } + } + return nil +} + +func (x *Value) GetBlobValue() string { + if x != nil { + if x, ok := x.Kind.(*Value_BlobValue); ok { + return x.BlobValue + } + } + return "" +} + +func (x *Value) GetPhoneValue() *PhoneNumber { + if x != nil { + if x, ok := x.Kind.(*Value_PhoneValue); ok { + return x.PhoneValue + } + } + return nil +} + +func (x *Value) GetNullValue() structpb.NullValue { + if x != nil { + if x, ok := x.Kind.(*Value_NullValue); ok { + return x.NullValue + } + } + return structpb.NullValue(0) +} + +func (x *Value) GetTextValue() string { + if x != nil { + if x, ok := x.Kind.(*Value_TextValue); ok { + return x.TextValue + } + } + return "" +} + +type isValue_Kind interface { + isValue_Kind() +} + +type Value_NumberValue struct { + NumberValue float64 `protobuf:"fixed64,1,opt,name=number_value,json=numberValue,proto3,oneof"` +} + +type Value_BoolValue struct { + // dont reuse 2, old field that has been removed; Was "string string_value = 2;" + BoolValue bool `protobuf:"varint,3,opt,name=bool_value,json=boolValue,proto3,oneof"` +} + +type Value_ObjectValue struct { + ObjectValue *Properties `protobuf:"bytes,4,opt,name=object_value,json=objectValue,proto3,oneof"` +} + +type Value_ListValue struct { + ListValue *ListValue `protobuf:"bytes,5,opt,name=list_value,json=listValue,proto3,oneof"` +} + +type Value_DateValue struct { + DateValue string `protobuf:"bytes,6,opt,name=date_value,json=dateValue,proto3,oneof"` +} + +type Value_UuidValue struct { + UuidValue string `protobuf:"bytes,7,opt,name=uuid_value,json=uuidValue,proto3,oneof"` +} + +type Value_IntValue struct { + IntValue int64 `protobuf:"varint,8,opt,name=int_value,json=intValue,proto3,oneof"` +} + +type Value_GeoValue struct { + GeoValue *GeoCoordinate `protobuf:"bytes,9,opt,name=geo_value,json=geoValue,proto3,oneof"` +} + +type Value_BlobValue struct { + BlobValue string `protobuf:"bytes,10,opt,name=blob_value,json=blobValue,proto3,oneof"` +} + +type Value_PhoneValue struct { + PhoneValue *PhoneNumber `protobuf:"bytes,11,opt,name=phone_value,json=phoneValue,proto3,oneof"` +} + +type Value_NullValue struct { + NullValue structpb.NullValue `protobuf:"varint,12,opt,name=null_value,json=nullValue,proto3,enum=google.protobuf.NullValue,oneof"` +} + +type Value_TextValue struct { + TextValue string `protobuf:"bytes,13,opt,name=text_value,json=textValue,proto3,oneof"` +} + +func (*Value_NumberValue) isValue_Kind() {} + +func (*Value_BoolValue) isValue_Kind() {} + +func (*Value_ObjectValue) isValue_Kind() {} + +func (*Value_ListValue) isValue_Kind() {} + +func (*Value_DateValue) isValue_Kind() {} + +func (*Value_UuidValue) isValue_Kind() {} + +func (*Value_IntValue) isValue_Kind() {} + +func (*Value_GeoValue) isValue_Kind() {} + +func (*Value_BlobValue) isValue_Kind() {} + +func (*Value_PhoneValue) isValue_Kind() {} + +func (*Value_NullValue) isValue_Kind() {} + +func (*Value_TextValue) isValue_Kind() {} + +type ListValue struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Types that are valid to be assigned to Kind: + // + // *ListValue_NumberValues + // *ListValue_BoolValues + // *ListValue_ObjectValues + // *ListValue_DateValues + // *ListValue_UuidValues + // *ListValue_IntValues + // *ListValue_TextValues + Kind isListValue_Kind `protobuf_oneof:"kind"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ListValue) Reset() { + *x = ListValue{} + mi := &file_v1_properties_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ListValue) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListValue) ProtoMessage() {} + +func (x *ListValue) ProtoReflect() protoreflect.Message { + mi := &file_v1_properties_proto_msgTypes[2] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListValue.ProtoReflect.Descriptor instead. +func (*ListValue) Descriptor() ([]byte, []int) { + return file_v1_properties_proto_rawDescGZIP(), []int{2} +} + +func (x *ListValue) GetKind() isListValue_Kind { + if x != nil { + return x.Kind + } + return nil +} + +func (x *ListValue) GetNumberValues() *NumberValues { + if x != nil { + if x, ok := x.Kind.(*ListValue_NumberValues); ok { + return x.NumberValues + } + } + return nil +} + +func (x *ListValue) GetBoolValues() *BoolValues { + if x != nil { + if x, ok := x.Kind.(*ListValue_BoolValues); ok { + return x.BoolValues + } + } + return nil +} + +func (x *ListValue) GetObjectValues() *ObjectValues { + if x != nil { + if x, ok := x.Kind.(*ListValue_ObjectValues); ok { + return x.ObjectValues + } + } + return nil +} + +func (x *ListValue) GetDateValues() *DateValues { + if x != nil { + if x, ok := x.Kind.(*ListValue_DateValues); ok { + return x.DateValues + } + } + return nil +} + +func (x *ListValue) GetUuidValues() *UuidValues { + if x != nil { + if x, ok := x.Kind.(*ListValue_UuidValues); ok { + return x.UuidValues + } + } + return nil +} + +func (x *ListValue) GetIntValues() *IntValues { + if x != nil { + if x, ok := x.Kind.(*ListValue_IntValues); ok { + return x.IntValues + } + } + return nil +} + +func (x *ListValue) GetTextValues() *TextValues { + if x != nil { + if x, ok := x.Kind.(*ListValue_TextValues); ok { + return x.TextValues + } + } + return nil +} + +type isListValue_Kind interface { + isListValue_Kind() +} + +type ListValue_NumberValues struct { + NumberValues *NumberValues `protobuf:"bytes,2,opt,name=number_values,json=numberValues,proto3,oneof"` +} + +type ListValue_BoolValues struct { + BoolValues *BoolValues `protobuf:"bytes,3,opt,name=bool_values,json=boolValues,proto3,oneof"` +} + +type ListValue_ObjectValues struct { + ObjectValues *ObjectValues `protobuf:"bytes,4,opt,name=object_values,json=objectValues,proto3,oneof"` +} + +type ListValue_DateValues struct { + DateValues *DateValues `protobuf:"bytes,5,opt,name=date_values,json=dateValues,proto3,oneof"` +} + +type ListValue_UuidValues struct { + UuidValues *UuidValues `protobuf:"bytes,6,opt,name=uuid_values,json=uuidValues,proto3,oneof"` +} + +type ListValue_IntValues struct { + IntValues *IntValues `protobuf:"bytes,7,opt,name=int_values,json=intValues,proto3,oneof"` +} + +type ListValue_TextValues struct { + TextValues *TextValues `protobuf:"bytes,8,opt,name=text_values,json=textValues,proto3,oneof"` +} + +func (*ListValue_NumberValues) isListValue_Kind() {} + +func (*ListValue_BoolValues) isListValue_Kind() {} + +func (*ListValue_ObjectValues) isListValue_Kind() {} + +func (*ListValue_DateValues) isListValue_Kind() {} + +func (*ListValue_UuidValues) isListValue_Kind() {} + +func (*ListValue_IntValues) isListValue_Kind() {} + +func (*ListValue_TextValues) isListValue_Kind() {} + +type NumberValues struct { + state protoimpl.MessageState `protogen:"open.v1"` + // * + // The values are stored as a byte array, where each 8 bytes represent a single float64 value. + // The byte array is stored in little-endian order using uint64 encoding. + Values []byte `protobuf:"bytes,1,opt,name=values,proto3" json:"values,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *NumberValues) Reset() { + *x = NumberValues{} + mi := &file_v1_properties_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *NumberValues) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NumberValues) ProtoMessage() {} + +func (x *NumberValues) ProtoReflect() protoreflect.Message { + mi := &file_v1_properties_proto_msgTypes[3] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NumberValues.ProtoReflect.Descriptor instead. +func (*NumberValues) Descriptor() ([]byte, []int) { + return file_v1_properties_proto_rawDescGZIP(), []int{3} +} + +func (x *NumberValues) GetValues() []byte { + if x != nil { + return x.Values + } + return nil +} + +type TextValues struct { + state protoimpl.MessageState `protogen:"open.v1"` + Values []string `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *TextValues) Reset() { + *x = TextValues{} + mi := &file_v1_properties_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *TextValues) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TextValues) ProtoMessage() {} + +func (x *TextValues) ProtoReflect() protoreflect.Message { + mi := &file_v1_properties_proto_msgTypes[4] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TextValues.ProtoReflect.Descriptor instead. +func (*TextValues) Descriptor() ([]byte, []int) { + return file_v1_properties_proto_rawDescGZIP(), []int{4} +} + +func (x *TextValues) GetValues() []string { + if x != nil { + return x.Values + } + return nil +} + +type BoolValues struct { + state protoimpl.MessageState `protogen:"open.v1"` + Values []bool `protobuf:"varint,1,rep,packed,name=values,proto3" json:"values,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *BoolValues) Reset() { + *x = BoolValues{} + mi := &file_v1_properties_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BoolValues) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BoolValues) ProtoMessage() {} + +func (x *BoolValues) ProtoReflect() protoreflect.Message { + mi := &file_v1_properties_proto_msgTypes[5] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BoolValues.ProtoReflect.Descriptor instead. +func (*BoolValues) Descriptor() ([]byte, []int) { + return file_v1_properties_proto_rawDescGZIP(), []int{5} +} + +func (x *BoolValues) GetValues() []bool { + if x != nil { + return x.Values + } + return nil +} + +type ObjectValues struct { + state protoimpl.MessageState `protogen:"open.v1"` + Values []*Properties `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ObjectValues) Reset() { + *x = ObjectValues{} + mi := &file_v1_properties_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ObjectValues) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ObjectValues) ProtoMessage() {} + +func (x *ObjectValues) ProtoReflect() protoreflect.Message { + mi := &file_v1_properties_proto_msgTypes[6] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ObjectValues.ProtoReflect.Descriptor instead. +func (*ObjectValues) Descriptor() ([]byte, []int) { + return file_v1_properties_proto_rawDescGZIP(), []int{6} +} + +func (x *ObjectValues) GetValues() []*Properties { + if x != nil { + return x.Values + } + return nil +} + +type DateValues struct { + state protoimpl.MessageState `protogen:"open.v1"` + Values []string `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *DateValues) Reset() { + *x = DateValues{} + mi := &file_v1_properties_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *DateValues) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DateValues) ProtoMessage() {} + +func (x *DateValues) ProtoReflect() protoreflect.Message { + mi := &file_v1_properties_proto_msgTypes[7] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DateValues.ProtoReflect.Descriptor instead. +func (*DateValues) Descriptor() ([]byte, []int) { + return file_v1_properties_proto_rawDescGZIP(), []int{7} +} + +func (x *DateValues) GetValues() []string { + if x != nil { + return x.Values + } + return nil +} + +type UuidValues struct { + state protoimpl.MessageState `protogen:"open.v1"` + Values []string `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *UuidValues) Reset() { + *x = UuidValues{} + mi := &file_v1_properties_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *UuidValues) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UuidValues) ProtoMessage() {} + +func (x *UuidValues) ProtoReflect() protoreflect.Message { + mi := &file_v1_properties_proto_msgTypes[8] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UuidValues.ProtoReflect.Descriptor instead. +func (*UuidValues) Descriptor() ([]byte, []int) { + return file_v1_properties_proto_rawDescGZIP(), []int{8} +} + +func (x *UuidValues) GetValues() []string { + if x != nil { + return x.Values + } + return nil +} + +type IntValues struct { + state protoimpl.MessageState `protogen:"open.v1"` + // * + // The values are stored as a byte array, where each 8 bytes represent a single int64 value. + // The byte array is stored in little-endian order using uint64 encoding. + Values []byte `protobuf:"bytes,1,opt,name=values,proto3" json:"values,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *IntValues) Reset() { + *x = IntValues{} + mi := &file_v1_properties_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *IntValues) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*IntValues) ProtoMessage() {} + +func (x *IntValues) ProtoReflect() protoreflect.Message { + mi := &file_v1_properties_proto_msgTypes[9] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use IntValues.ProtoReflect.Descriptor instead. +func (*IntValues) Descriptor() ([]byte, []int) { + return file_v1_properties_proto_rawDescGZIP(), []int{9} +} + +func (x *IntValues) GetValues() []byte { + if x != nil { + return x.Values + } + return nil +} + +type GeoCoordinate struct { + state protoimpl.MessageState `protogen:"open.v1"` + Longitude float32 `protobuf:"fixed32,1,opt,name=longitude,proto3" json:"longitude,omitempty"` + Latitude float32 `protobuf:"fixed32,2,opt,name=latitude,proto3" json:"latitude,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GeoCoordinate) Reset() { + *x = GeoCoordinate{} + mi := &file_v1_properties_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GeoCoordinate) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GeoCoordinate) ProtoMessage() {} + +func (x *GeoCoordinate) ProtoReflect() protoreflect.Message { + mi := &file_v1_properties_proto_msgTypes[10] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GeoCoordinate.ProtoReflect.Descriptor instead. +func (*GeoCoordinate) Descriptor() ([]byte, []int) { + return file_v1_properties_proto_rawDescGZIP(), []int{10} +} + +func (x *GeoCoordinate) GetLongitude() float32 { + if x != nil { + return x.Longitude + } + return 0 +} + +func (x *GeoCoordinate) GetLatitude() float32 { + if x != nil { + return x.Latitude + } + return 0 +} + +type PhoneNumber struct { + state protoimpl.MessageState `protogen:"open.v1"` + CountryCode uint64 `protobuf:"varint,1,opt,name=country_code,json=countryCode,proto3" json:"country_code,omitempty"` + DefaultCountry string `protobuf:"bytes,2,opt,name=default_country,json=defaultCountry,proto3" json:"default_country,omitempty"` + Input string `protobuf:"bytes,3,opt,name=input,proto3" json:"input,omitempty"` + InternationalFormatted string `protobuf:"bytes,4,opt,name=international_formatted,json=internationalFormatted,proto3" json:"international_formatted,omitempty"` + National uint64 `protobuf:"varint,5,opt,name=national,proto3" json:"national,omitempty"` + NationalFormatted string `protobuf:"bytes,6,opt,name=national_formatted,json=nationalFormatted,proto3" json:"national_formatted,omitempty"` + Valid bool `protobuf:"varint,7,opt,name=valid,proto3" json:"valid,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *PhoneNumber) Reset() { + *x = PhoneNumber{} + mi := &file_v1_properties_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *PhoneNumber) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PhoneNumber) ProtoMessage() {} + +func (x *PhoneNumber) ProtoReflect() protoreflect.Message { + mi := &file_v1_properties_proto_msgTypes[11] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PhoneNumber.ProtoReflect.Descriptor instead. +func (*PhoneNumber) Descriptor() ([]byte, []int) { + return file_v1_properties_proto_rawDescGZIP(), []int{11} +} + +func (x *PhoneNumber) GetCountryCode() uint64 { + if x != nil { + return x.CountryCode + } + return 0 +} + +func (x *PhoneNumber) GetDefaultCountry() string { + if x != nil { + return x.DefaultCountry + } + return "" +} + +func (x *PhoneNumber) GetInput() string { + if x != nil { + return x.Input + } + return "" +} + +func (x *PhoneNumber) GetInternationalFormatted() string { + if x != nil { + return x.InternationalFormatted + } + return "" +} + +func (x *PhoneNumber) GetNational() uint64 { + if x != nil { + return x.National + } + return 0 +} + +func (x *PhoneNumber) GetNationalFormatted() string { + if x != nil { + return x.NationalFormatted + } + return "" +} + +func (x *PhoneNumber) GetValid() bool { + if x != nil { + return x.Valid + } + return false +} + +var File_v1_properties_proto protoreflect.FileDescriptor + +const file_v1_properties_proto_rawDesc = "" + + "\n" + + "\x13v1/properties.proto\x12\vweaviate.v1\x1a\x1cgoogle/protobuf/struct.proto\"\x98\x01\n" + + "\n" + + "Properties\x12;\n" + + "\x06fields\x18\x01 \x03(\v2#.weaviate.v1.Properties.FieldsEntryR\x06fields\x1aM\n" + + "\vFieldsEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12(\n" + + "\x05value\x18\x02 \x01(\v2\x12.weaviate.v1.ValueR\x05value:\x028\x01\"\xa4\x04\n" + + "\x05Value\x12#\n" + + "\fnumber_value\x18\x01 \x01(\x01H\x00R\vnumberValue\x12\x1f\n" + + "\n" + + "bool_value\x18\x03 \x01(\bH\x00R\tboolValue\x12<\n" + + "\fobject_value\x18\x04 \x01(\v2\x17.weaviate.v1.PropertiesH\x00R\vobjectValue\x127\n" + + "\n" + + "list_value\x18\x05 \x01(\v2\x16.weaviate.v1.ListValueH\x00R\tlistValue\x12\x1f\n" + + "\n" + + "date_value\x18\x06 \x01(\tH\x00R\tdateValue\x12\x1f\n" + + "\n" + + "uuid_value\x18\a \x01(\tH\x00R\tuuidValue\x12\x1d\n" + + "\tint_value\x18\b \x01(\x03H\x00R\bintValue\x129\n" + + "\tgeo_value\x18\t \x01(\v2\x1a.weaviate.v1.GeoCoordinateH\x00R\bgeoValue\x12\x1f\n" + + "\n" + + "blob_value\x18\n" + + " \x01(\tH\x00R\tblobValue\x12;\n" + + "\vphone_value\x18\v \x01(\v2\x18.weaviate.v1.PhoneNumberH\x00R\n" + + "phoneValue\x12;\n" + + "\n" + + "null_value\x18\f \x01(\x0e2\x1a.google.protobuf.NullValueH\x00R\tnullValue\x12\x1f\n" + + "\n" + + "text_value\x18\r \x01(\tH\x00R\ttextValueB\x06\n" + + "\x04kind\"\xc6\x03\n" + + "\tListValue\x12@\n" + + "\rnumber_values\x18\x02 \x01(\v2\x19.weaviate.v1.NumberValuesH\x00R\fnumberValues\x12:\n" + + "\vbool_values\x18\x03 \x01(\v2\x17.weaviate.v1.BoolValuesH\x00R\n" + + "boolValues\x12@\n" + + "\robject_values\x18\x04 \x01(\v2\x19.weaviate.v1.ObjectValuesH\x00R\fobjectValues\x12:\n" + + "\vdate_values\x18\x05 \x01(\v2\x17.weaviate.v1.DateValuesH\x00R\n" + + "dateValues\x12:\n" + + "\vuuid_values\x18\x06 \x01(\v2\x17.weaviate.v1.UuidValuesH\x00R\n" + + "uuidValues\x127\n" + + "\n" + + "int_values\x18\a \x01(\v2\x16.weaviate.v1.IntValuesH\x00R\tintValues\x12:\n" + + "\vtext_values\x18\b \x01(\v2\x17.weaviate.v1.TextValuesH\x00R\n" + + "textValuesB\x06\n" + + "\x04kindJ\x04\b\x01\x10\x02\"&\n" + + "\fNumberValues\x12\x16\n" + + "\x06values\x18\x01 \x01(\fR\x06values\"$\n" + + "\n" + + "TextValues\x12\x16\n" + + "\x06values\x18\x01 \x03(\tR\x06values\"$\n" + + "\n" + + "BoolValues\x12\x16\n" + + "\x06values\x18\x01 \x03(\bR\x06values\"?\n" + + "\fObjectValues\x12/\n" + + "\x06values\x18\x01 \x03(\v2\x17.weaviate.v1.PropertiesR\x06values\"$\n" + + "\n" + + "DateValues\x12\x16\n" + + "\x06values\x18\x01 \x03(\tR\x06values\"$\n" + + "\n" + + "UuidValues\x12\x16\n" + + "\x06values\x18\x01 \x03(\tR\x06values\"#\n" + + "\tIntValues\x12\x16\n" + + "\x06values\x18\x01 \x01(\fR\x06values\"I\n" + + "\rGeoCoordinate\x12\x1c\n" + + "\tlongitude\x18\x01 \x01(\x02R\tlongitude\x12\x1a\n" + + "\blatitude\x18\x02 \x01(\x02R\blatitude\"\x89\x02\n" + + "\vPhoneNumber\x12!\n" + + "\fcountry_code\x18\x01 \x01(\x04R\vcountryCode\x12'\n" + + "\x0fdefault_country\x18\x02 \x01(\tR\x0edefaultCountry\x12\x14\n" + + "\x05input\x18\x03 \x01(\tR\x05input\x127\n" + + "\x17international_formatted\x18\x04 \x01(\tR\x16internationalFormatted\x12\x1a\n" + + "\bnational\x18\x05 \x01(\x04R\bnational\x12-\n" + + "\x12national_formatted\x18\x06 \x01(\tR\x11nationalFormatted\x12\x14\n" + + "\x05valid\x18\a \x01(\bR\x05validBt\n" + + "#io.weaviate.client.grpc.protocol.v1B\x17WeaviateProtoPropertiesZ4github.com/weaviate/weaviate/grpc/generated;protocolb\x06proto3" + +var ( + file_v1_properties_proto_rawDescOnce sync.Once + file_v1_properties_proto_rawDescData []byte +) + +func file_v1_properties_proto_rawDescGZIP() []byte { + file_v1_properties_proto_rawDescOnce.Do(func() { + file_v1_properties_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_v1_properties_proto_rawDesc), len(file_v1_properties_proto_rawDesc))) + }) + return file_v1_properties_proto_rawDescData +} + +var file_v1_properties_proto_msgTypes = make([]protoimpl.MessageInfo, 13) +var file_v1_properties_proto_goTypes = []any{ + (*Properties)(nil), // 0: weaviate.v1.Properties + (*Value)(nil), // 1: weaviate.v1.Value + (*ListValue)(nil), // 2: weaviate.v1.ListValue + (*NumberValues)(nil), // 3: weaviate.v1.NumberValues + (*TextValues)(nil), // 4: weaviate.v1.TextValues + (*BoolValues)(nil), // 5: weaviate.v1.BoolValues + (*ObjectValues)(nil), // 6: weaviate.v1.ObjectValues + (*DateValues)(nil), // 7: weaviate.v1.DateValues + (*UuidValues)(nil), // 8: weaviate.v1.UuidValues + (*IntValues)(nil), // 9: weaviate.v1.IntValues + (*GeoCoordinate)(nil), // 10: weaviate.v1.GeoCoordinate + (*PhoneNumber)(nil), // 11: weaviate.v1.PhoneNumber + nil, // 12: weaviate.v1.Properties.FieldsEntry + (structpb.NullValue)(0), // 13: google.protobuf.NullValue +} +var file_v1_properties_proto_depIdxs = []int32{ + 12, // 0: weaviate.v1.Properties.fields:type_name -> weaviate.v1.Properties.FieldsEntry + 0, // 1: weaviate.v1.Value.object_value:type_name -> weaviate.v1.Properties + 2, // 2: weaviate.v1.Value.list_value:type_name -> weaviate.v1.ListValue + 10, // 3: weaviate.v1.Value.geo_value:type_name -> weaviate.v1.GeoCoordinate + 11, // 4: weaviate.v1.Value.phone_value:type_name -> weaviate.v1.PhoneNumber + 13, // 5: weaviate.v1.Value.null_value:type_name -> google.protobuf.NullValue + 3, // 6: weaviate.v1.ListValue.number_values:type_name -> weaviate.v1.NumberValues + 5, // 7: weaviate.v1.ListValue.bool_values:type_name -> weaviate.v1.BoolValues + 6, // 8: weaviate.v1.ListValue.object_values:type_name -> weaviate.v1.ObjectValues + 7, // 9: weaviate.v1.ListValue.date_values:type_name -> weaviate.v1.DateValues + 8, // 10: weaviate.v1.ListValue.uuid_values:type_name -> weaviate.v1.UuidValues + 9, // 11: weaviate.v1.ListValue.int_values:type_name -> weaviate.v1.IntValues + 4, // 12: weaviate.v1.ListValue.text_values:type_name -> weaviate.v1.TextValues + 0, // 13: weaviate.v1.ObjectValues.values:type_name -> weaviate.v1.Properties + 1, // 14: weaviate.v1.Properties.FieldsEntry.value:type_name -> weaviate.v1.Value + 15, // [15:15] is the sub-list for method output_type + 15, // [15:15] is the sub-list for method input_type + 15, // [15:15] is the sub-list for extension type_name + 15, // [15:15] is the sub-list for extension extendee + 0, // [0:15] is the sub-list for field type_name +} + +func init() { file_v1_properties_proto_init() } +func file_v1_properties_proto_init() { + if File_v1_properties_proto != nil { + return + } + file_v1_properties_proto_msgTypes[1].OneofWrappers = []any{ + (*Value_NumberValue)(nil), + (*Value_BoolValue)(nil), + (*Value_ObjectValue)(nil), + (*Value_ListValue)(nil), + (*Value_DateValue)(nil), + (*Value_UuidValue)(nil), + (*Value_IntValue)(nil), + (*Value_GeoValue)(nil), + (*Value_BlobValue)(nil), + (*Value_PhoneValue)(nil), + (*Value_NullValue)(nil), + (*Value_TextValue)(nil), + } + file_v1_properties_proto_msgTypes[2].OneofWrappers = []any{ + (*ListValue_NumberValues)(nil), + (*ListValue_BoolValues)(nil), + (*ListValue_ObjectValues)(nil), + (*ListValue_DateValues)(nil), + (*ListValue_UuidValues)(nil), + (*ListValue_IntValues)(nil), + (*ListValue_TextValues)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: unsafe.Slice(unsafe.StringData(file_v1_properties_proto_rawDesc), len(file_v1_properties_proto_rawDesc)), + NumEnums: 0, + NumMessages: 13, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_v1_properties_proto_goTypes, + DependencyIndexes: file_v1_properties_proto_depIdxs, + MessageInfos: file_v1_properties_proto_msgTypes, + }.Build() + File_v1_properties_proto = out.File + file_v1_properties_proto_goTypes = nil + file_v1_properties_proto_depIdxs = nil +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/search_get.pb.go b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/search_get.pb.go new file mode 100644 index 0000000000000000000000000000000000000000..476f6840676e191afcafec053297491c70ab2317 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/search_get.pb.go @@ -0,0 +1,1728 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. + +package protocol + +import ( + reflect "reflect" + sync "sync" + unsafe "unsafe" + + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type SearchRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + // required + Collection string `protobuf:"bytes,1,opt,name=collection,proto3" json:"collection,omitempty"` + // parameters + Tenant string `protobuf:"bytes,10,opt,name=tenant,proto3" json:"tenant,omitempty"` + ConsistencyLevel *ConsistencyLevel `protobuf:"varint,11,opt,name=consistency_level,json=consistencyLevel,proto3,enum=weaviate.v1.ConsistencyLevel,oneof" json:"consistency_level,omitempty"` + // what is returned + Properties *PropertiesRequest `protobuf:"bytes,20,opt,name=properties,proto3,oneof" json:"properties,omitempty"` + Metadata *MetadataRequest `protobuf:"bytes,21,opt,name=metadata,proto3,oneof" json:"metadata,omitempty"` + GroupBy *GroupBy `protobuf:"bytes,22,opt,name=group_by,json=groupBy,proto3,oneof" json:"group_by,omitempty"` + // affects order and length of results. 0/empty (default value) means disabled + Limit uint32 `protobuf:"varint,30,opt,name=limit,proto3" json:"limit,omitempty"` + Offset uint32 `protobuf:"varint,31,opt,name=offset,proto3" json:"offset,omitempty"` + Autocut uint32 `protobuf:"varint,32,opt,name=autocut,proto3" json:"autocut,omitempty"` + After string `protobuf:"bytes,33,opt,name=after,proto3" json:"after,omitempty"` + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + SortBy []*SortBy `protobuf:"bytes,34,rep,name=sort_by,json=sortBy,proto3" json:"sort_by,omitempty"` + // matches/searches for objects + Filters *Filters `protobuf:"bytes,40,opt,name=filters,proto3,oneof" json:"filters,omitempty"` + HybridSearch *Hybrid `protobuf:"bytes,41,opt,name=hybrid_search,json=hybridSearch,proto3,oneof" json:"hybrid_search,omitempty"` + Bm25Search *BM25 `protobuf:"bytes,42,opt,name=bm25_search,json=bm25Search,proto3,oneof" json:"bm25_search,omitempty"` + NearVector *NearVector `protobuf:"bytes,43,opt,name=near_vector,json=nearVector,proto3,oneof" json:"near_vector,omitempty"` + NearObject *NearObject `protobuf:"bytes,44,opt,name=near_object,json=nearObject,proto3,oneof" json:"near_object,omitempty"` + NearText *NearTextSearch `protobuf:"bytes,45,opt,name=near_text,json=nearText,proto3,oneof" json:"near_text,omitempty"` + NearImage *NearImageSearch `protobuf:"bytes,46,opt,name=near_image,json=nearImage,proto3,oneof" json:"near_image,omitempty"` + NearAudio *NearAudioSearch `protobuf:"bytes,47,opt,name=near_audio,json=nearAudio,proto3,oneof" json:"near_audio,omitempty"` + NearVideo *NearVideoSearch `protobuf:"bytes,48,opt,name=near_video,json=nearVideo,proto3,oneof" json:"near_video,omitempty"` + NearDepth *NearDepthSearch `protobuf:"bytes,49,opt,name=near_depth,json=nearDepth,proto3,oneof" json:"near_depth,omitempty"` + NearThermal *NearThermalSearch `protobuf:"bytes,50,opt,name=near_thermal,json=nearThermal,proto3,oneof" json:"near_thermal,omitempty"` + NearImu *NearIMUSearch `protobuf:"bytes,51,opt,name=near_imu,json=nearImu,proto3,oneof" json:"near_imu,omitempty"` + Generative *GenerativeSearch `protobuf:"bytes,60,opt,name=generative,proto3,oneof" json:"generative,omitempty"` + Rerank *Rerank `protobuf:"bytes,61,opt,name=rerank,proto3,oneof" json:"rerank,omitempty"` + // Deprecated: Marked as deprecated in v1/search_get.proto. + Uses_123Api bool `protobuf:"varint,100,opt,name=uses_123_api,json=uses123Api,proto3" json:"uses_123_api,omitempty"` + // Deprecated: Marked as deprecated in v1/search_get.proto. + Uses_125Api bool `protobuf:"varint,101,opt,name=uses_125_api,json=uses125Api,proto3" json:"uses_125_api,omitempty"` + Uses_127Api bool `protobuf:"varint,102,opt,name=uses_127_api,json=uses127Api,proto3" json:"uses_127_api,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *SearchRequest) Reset() { + *x = SearchRequest{} + mi := &file_v1_search_get_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *SearchRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SearchRequest) ProtoMessage() {} + +func (x *SearchRequest) ProtoReflect() protoreflect.Message { + mi := &file_v1_search_get_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SearchRequest.ProtoReflect.Descriptor instead. +func (*SearchRequest) Descriptor() ([]byte, []int) { + return file_v1_search_get_proto_rawDescGZIP(), []int{0} +} + +func (x *SearchRequest) GetCollection() string { + if x != nil { + return x.Collection + } + return "" +} + +func (x *SearchRequest) GetTenant() string { + if x != nil { + return x.Tenant + } + return "" +} + +func (x *SearchRequest) GetConsistencyLevel() ConsistencyLevel { + if x != nil && x.ConsistencyLevel != nil { + return *x.ConsistencyLevel + } + return ConsistencyLevel_CONSISTENCY_LEVEL_UNSPECIFIED +} + +func (x *SearchRequest) GetProperties() *PropertiesRequest { + if x != nil { + return x.Properties + } + return nil +} + +func (x *SearchRequest) GetMetadata() *MetadataRequest { + if x != nil { + return x.Metadata + } + return nil +} + +func (x *SearchRequest) GetGroupBy() *GroupBy { + if x != nil { + return x.GroupBy + } + return nil +} + +func (x *SearchRequest) GetLimit() uint32 { + if x != nil { + return x.Limit + } + return 0 +} + +func (x *SearchRequest) GetOffset() uint32 { + if x != nil { + return x.Offset + } + return 0 +} + +func (x *SearchRequest) GetAutocut() uint32 { + if x != nil { + return x.Autocut + } + return 0 +} + +func (x *SearchRequest) GetAfter() string { + if x != nil { + return x.After + } + return "" +} + +func (x *SearchRequest) GetSortBy() []*SortBy { + if x != nil { + return x.SortBy + } + return nil +} + +func (x *SearchRequest) GetFilters() *Filters { + if x != nil { + return x.Filters + } + return nil +} + +func (x *SearchRequest) GetHybridSearch() *Hybrid { + if x != nil { + return x.HybridSearch + } + return nil +} + +func (x *SearchRequest) GetBm25Search() *BM25 { + if x != nil { + return x.Bm25Search + } + return nil +} + +func (x *SearchRequest) GetNearVector() *NearVector { + if x != nil { + return x.NearVector + } + return nil +} + +func (x *SearchRequest) GetNearObject() *NearObject { + if x != nil { + return x.NearObject + } + return nil +} + +func (x *SearchRequest) GetNearText() *NearTextSearch { + if x != nil { + return x.NearText + } + return nil +} + +func (x *SearchRequest) GetNearImage() *NearImageSearch { + if x != nil { + return x.NearImage + } + return nil +} + +func (x *SearchRequest) GetNearAudio() *NearAudioSearch { + if x != nil { + return x.NearAudio + } + return nil +} + +func (x *SearchRequest) GetNearVideo() *NearVideoSearch { + if x != nil { + return x.NearVideo + } + return nil +} + +func (x *SearchRequest) GetNearDepth() *NearDepthSearch { + if x != nil { + return x.NearDepth + } + return nil +} + +func (x *SearchRequest) GetNearThermal() *NearThermalSearch { + if x != nil { + return x.NearThermal + } + return nil +} + +func (x *SearchRequest) GetNearImu() *NearIMUSearch { + if x != nil { + return x.NearImu + } + return nil +} + +func (x *SearchRequest) GetGenerative() *GenerativeSearch { + if x != nil { + return x.Generative + } + return nil +} + +func (x *SearchRequest) GetRerank() *Rerank { + if x != nil { + return x.Rerank + } + return nil +} + +// Deprecated: Marked as deprecated in v1/search_get.proto. +func (x *SearchRequest) GetUses_123Api() bool { + if x != nil { + return x.Uses_123Api + } + return false +} + +// Deprecated: Marked as deprecated in v1/search_get.proto. +func (x *SearchRequest) GetUses_125Api() bool { + if x != nil { + return x.Uses_125Api + } + return false +} + +func (x *SearchRequest) GetUses_127Api() bool { + if x != nil { + return x.Uses_127Api + } + return false +} + +type GroupBy struct { + state protoimpl.MessageState `protogen:"open.v1"` + // currently only supports one entry (eg just properties, no refs). But might + // be extended in the future. + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + Path []string `protobuf:"bytes,1,rep,name=path,proto3" json:"path,omitempty"` + NumberOfGroups int32 `protobuf:"varint,2,opt,name=number_of_groups,json=numberOfGroups,proto3" json:"number_of_groups,omitempty"` + ObjectsPerGroup int32 `protobuf:"varint,3,opt,name=objects_per_group,json=objectsPerGroup,proto3" json:"objects_per_group,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GroupBy) Reset() { + *x = GroupBy{} + mi := &file_v1_search_get_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GroupBy) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GroupBy) ProtoMessage() {} + +func (x *GroupBy) ProtoReflect() protoreflect.Message { + mi := &file_v1_search_get_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GroupBy.ProtoReflect.Descriptor instead. +func (*GroupBy) Descriptor() ([]byte, []int) { + return file_v1_search_get_proto_rawDescGZIP(), []int{1} +} + +func (x *GroupBy) GetPath() []string { + if x != nil { + return x.Path + } + return nil +} + +func (x *GroupBy) GetNumberOfGroups() int32 { + if x != nil { + return x.NumberOfGroups + } + return 0 +} + +func (x *GroupBy) GetObjectsPerGroup() int32 { + if x != nil { + return x.ObjectsPerGroup + } + return 0 +} + +type SortBy struct { + state protoimpl.MessageState `protogen:"open.v1"` + Ascending bool `protobuf:"varint,1,opt,name=ascending,proto3" json:"ascending,omitempty"` + // currently only supports one entry (eg just properties, no refs). But the + // weaviate datastructure already has paths in it and this makes it easily + // extendable in the future + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + Path []string `protobuf:"bytes,2,rep,name=path,proto3" json:"path,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *SortBy) Reset() { + *x = SortBy{} + mi := &file_v1_search_get_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *SortBy) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SortBy) ProtoMessage() {} + +func (x *SortBy) ProtoReflect() protoreflect.Message { + mi := &file_v1_search_get_proto_msgTypes[2] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SortBy.ProtoReflect.Descriptor instead. +func (*SortBy) Descriptor() ([]byte, []int) { + return file_v1_search_get_proto_rawDescGZIP(), []int{2} +} + +func (x *SortBy) GetAscending() bool { + if x != nil { + return x.Ascending + } + return false +} + +func (x *SortBy) GetPath() []string { + if x != nil { + return x.Path + } + return nil +} + +type MetadataRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + Uuid bool `protobuf:"varint,1,opt,name=uuid,proto3" json:"uuid,omitempty"` + Vector bool `protobuf:"varint,2,opt,name=vector,proto3" json:"vector,omitempty"` + CreationTimeUnix bool `protobuf:"varint,3,opt,name=creation_time_unix,json=creationTimeUnix,proto3" json:"creation_time_unix,omitempty"` + LastUpdateTimeUnix bool `protobuf:"varint,4,opt,name=last_update_time_unix,json=lastUpdateTimeUnix,proto3" json:"last_update_time_unix,omitempty"` + Distance bool `protobuf:"varint,5,opt,name=distance,proto3" json:"distance,omitempty"` + Certainty bool `protobuf:"varint,6,opt,name=certainty,proto3" json:"certainty,omitempty"` + Score bool `protobuf:"varint,7,opt,name=score,proto3" json:"score,omitempty"` + ExplainScore bool `protobuf:"varint,8,opt,name=explain_score,json=explainScore,proto3" json:"explain_score,omitempty"` + IsConsistent bool `protobuf:"varint,9,opt,name=is_consistent,json=isConsistent,proto3" json:"is_consistent,omitempty"` + Vectors []string `protobuf:"bytes,10,rep,name=vectors,proto3" json:"vectors,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *MetadataRequest) Reset() { + *x = MetadataRequest{} + mi := &file_v1_search_get_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *MetadataRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*MetadataRequest) ProtoMessage() {} + +func (x *MetadataRequest) ProtoReflect() protoreflect.Message { + mi := &file_v1_search_get_proto_msgTypes[3] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use MetadataRequest.ProtoReflect.Descriptor instead. +func (*MetadataRequest) Descriptor() ([]byte, []int) { + return file_v1_search_get_proto_rawDescGZIP(), []int{3} +} + +func (x *MetadataRequest) GetUuid() bool { + if x != nil { + return x.Uuid + } + return false +} + +func (x *MetadataRequest) GetVector() bool { + if x != nil { + return x.Vector + } + return false +} + +func (x *MetadataRequest) GetCreationTimeUnix() bool { + if x != nil { + return x.CreationTimeUnix + } + return false +} + +func (x *MetadataRequest) GetLastUpdateTimeUnix() bool { + if x != nil { + return x.LastUpdateTimeUnix + } + return false +} + +func (x *MetadataRequest) GetDistance() bool { + if x != nil { + return x.Distance + } + return false +} + +func (x *MetadataRequest) GetCertainty() bool { + if x != nil { + return x.Certainty + } + return false +} + +func (x *MetadataRequest) GetScore() bool { + if x != nil { + return x.Score + } + return false +} + +func (x *MetadataRequest) GetExplainScore() bool { + if x != nil { + return x.ExplainScore + } + return false +} + +func (x *MetadataRequest) GetIsConsistent() bool { + if x != nil { + return x.IsConsistent + } + return false +} + +func (x *MetadataRequest) GetVectors() []string { + if x != nil { + return x.Vectors + } + return nil +} + +type PropertiesRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + NonRefProperties []string `protobuf:"bytes,1,rep,name=non_ref_properties,json=nonRefProperties,proto3" json:"non_ref_properties,omitempty"` + RefProperties []*RefPropertiesRequest `protobuf:"bytes,2,rep,name=ref_properties,json=refProperties,proto3" json:"ref_properties,omitempty"` + ObjectProperties []*ObjectPropertiesRequest `protobuf:"bytes,3,rep,name=object_properties,json=objectProperties,proto3" json:"object_properties,omitempty"` + ReturnAllNonrefProperties bool `protobuf:"varint,11,opt,name=return_all_nonref_properties,json=returnAllNonrefProperties,proto3" json:"return_all_nonref_properties,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *PropertiesRequest) Reset() { + *x = PropertiesRequest{} + mi := &file_v1_search_get_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *PropertiesRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PropertiesRequest) ProtoMessage() {} + +func (x *PropertiesRequest) ProtoReflect() protoreflect.Message { + mi := &file_v1_search_get_proto_msgTypes[4] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PropertiesRequest.ProtoReflect.Descriptor instead. +func (*PropertiesRequest) Descriptor() ([]byte, []int) { + return file_v1_search_get_proto_rawDescGZIP(), []int{4} +} + +func (x *PropertiesRequest) GetNonRefProperties() []string { + if x != nil { + return x.NonRefProperties + } + return nil +} + +func (x *PropertiesRequest) GetRefProperties() []*RefPropertiesRequest { + if x != nil { + return x.RefProperties + } + return nil +} + +func (x *PropertiesRequest) GetObjectProperties() []*ObjectPropertiesRequest { + if x != nil { + return x.ObjectProperties + } + return nil +} + +func (x *PropertiesRequest) GetReturnAllNonrefProperties() bool { + if x != nil { + return x.ReturnAllNonrefProperties + } + return false +} + +type ObjectPropertiesRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + PropName string `protobuf:"bytes,1,opt,name=prop_name,json=propName,proto3" json:"prop_name,omitempty"` + PrimitiveProperties []string `protobuf:"bytes,2,rep,name=primitive_properties,json=primitiveProperties,proto3" json:"primitive_properties,omitempty"` + ObjectProperties []*ObjectPropertiesRequest `protobuf:"bytes,3,rep,name=object_properties,json=objectProperties,proto3" json:"object_properties,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ObjectPropertiesRequest) Reset() { + *x = ObjectPropertiesRequest{} + mi := &file_v1_search_get_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ObjectPropertiesRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ObjectPropertiesRequest) ProtoMessage() {} + +func (x *ObjectPropertiesRequest) ProtoReflect() protoreflect.Message { + mi := &file_v1_search_get_proto_msgTypes[5] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ObjectPropertiesRequest.ProtoReflect.Descriptor instead. +func (*ObjectPropertiesRequest) Descriptor() ([]byte, []int) { + return file_v1_search_get_proto_rawDescGZIP(), []int{5} +} + +func (x *ObjectPropertiesRequest) GetPropName() string { + if x != nil { + return x.PropName + } + return "" +} + +func (x *ObjectPropertiesRequest) GetPrimitiveProperties() []string { + if x != nil { + return x.PrimitiveProperties + } + return nil +} + +func (x *ObjectPropertiesRequest) GetObjectProperties() []*ObjectPropertiesRequest { + if x != nil { + return x.ObjectProperties + } + return nil +} + +type RefPropertiesRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + ReferenceProperty string `protobuf:"bytes,1,opt,name=reference_property,json=referenceProperty,proto3" json:"reference_property,omitempty"` + Properties *PropertiesRequest `protobuf:"bytes,2,opt,name=properties,proto3" json:"properties,omitempty"` + Metadata *MetadataRequest `protobuf:"bytes,3,opt,name=metadata,proto3" json:"metadata,omitempty"` + TargetCollection string `protobuf:"bytes,4,opt,name=target_collection,json=targetCollection,proto3" json:"target_collection,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *RefPropertiesRequest) Reset() { + *x = RefPropertiesRequest{} + mi := &file_v1_search_get_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *RefPropertiesRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RefPropertiesRequest) ProtoMessage() {} + +func (x *RefPropertiesRequest) ProtoReflect() protoreflect.Message { + mi := &file_v1_search_get_proto_msgTypes[6] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RefPropertiesRequest.ProtoReflect.Descriptor instead. +func (*RefPropertiesRequest) Descriptor() ([]byte, []int) { + return file_v1_search_get_proto_rawDescGZIP(), []int{6} +} + +func (x *RefPropertiesRequest) GetReferenceProperty() string { + if x != nil { + return x.ReferenceProperty + } + return "" +} + +func (x *RefPropertiesRequest) GetProperties() *PropertiesRequest { + if x != nil { + return x.Properties + } + return nil +} + +func (x *RefPropertiesRequest) GetMetadata() *MetadataRequest { + if x != nil { + return x.Metadata + } + return nil +} + +func (x *RefPropertiesRequest) GetTargetCollection() string { + if x != nil { + return x.TargetCollection + } + return "" +} + +type Rerank struct { + state protoimpl.MessageState `protogen:"open.v1"` + Property string `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + Query *string `protobuf:"bytes,2,opt,name=query,proto3,oneof" json:"query,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *Rerank) Reset() { + *x = Rerank{} + mi := &file_v1_search_get_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *Rerank) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Rerank) ProtoMessage() {} + +func (x *Rerank) ProtoReflect() protoreflect.Message { + mi := &file_v1_search_get_proto_msgTypes[7] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Rerank.ProtoReflect.Descriptor instead. +func (*Rerank) Descriptor() ([]byte, []int) { + return file_v1_search_get_proto_rawDescGZIP(), []int{7} +} + +func (x *Rerank) GetProperty() string { + if x != nil { + return x.Property + } + return "" +} + +func (x *Rerank) GetQuery() string { + if x != nil && x.Query != nil { + return *x.Query + } + return "" +} + +type SearchReply struct { + state protoimpl.MessageState `protogen:"open.v1"` + Took float32 `protobuf:"fixed32,1,opt,name=took,proto3" json:"took,omitempty"` + Results []*SearchResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + // Deprecated: Marked as deprecated in v1/search_get.proto. + GenerativeGroupedResult *string `protobuf:"bytes,3,opt,name=generative_grouped_result,json=generativeGroupedResult,proto3,oneof" json:"generative_grouped_result,omitempty"` + GroupByResults []*GroupByResult `protobuf:"bytes,4,rep,name=group_by_results,json=groupByResults,proto3" json:"group_by_results,omitempty"` + GenerativeGroupedResults *GenerativeResult `protobuf:"bytes,5,opt,name=generative_grouped_results,json=generativeGroupedResults,proto3,oneof" json:"generative_grouped_results,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *SearchReply) Reset() { + *x = SearchReply{} + mi := &file_v1_search_get_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *SearchReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SearchReply) ProtoMessage() {} + +func (x *SearchReply) ProtoReflect() protoreflect.Message { + mi := &file_v1_search_get_proto_msgTypes[8] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SearchReply.ProtoReflect.Descriptor instead. +func (*SearchReply) Descriptor() ([]byte, []int) { + return file_v1_search_get_proto_rawDescGZIP(), []int{8} +} + +func (x *SearchReply) GetTook() float32 { + if x != nil { + return x.Took + } + return 0 +} + +func (x *SearchReply) GetResults() []*SearchResult { + if x != nil { + return x.Results + } + return nil +} + +// Deprecated: Marked as deprecated in v1/search_get.proto. +func (x *SearchReply) GetGenerativeGroupedResult() string { + if x != nil && x.GenerativeGroupedResult != nil { + return *x.GenerativeGroupedResult + } + return "" +} + +func (x *SearchReply) GetGroupByResults() []*GroupByResult { + if x != nil { + return x.GroupByResults + } + return nil +} + +func (x *SearchReply) GetGenerativeGroupedResults() *GenerativeResult { + if x != nil { + return x.GenerativeGroupedResults + } + return nil +} + +type RerankReply struct { + state protoimpl.MessageState `protogen:"open.v1"` + Score float64 `protobuf:"fixed64,1,opt,name=score,proto3" json:"score,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *RerankReply) Reset() { + *x = RerankReply{} + mi := &file_v1_search_get_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *RerankReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RerankReply) ProtoMessage() {} + +func (x *RerankReply) ProtoReflect() protoreflect.Message { + mi := &file_v1_search_get_proto_msgTypes[9] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RerankReply.ProtoReflect.Descriptor instead. +func (*RerankReply) Descriptor() ([]byte, []int) { + return file_v1_search_get_proto_rawDescGZIP(), []int{9} +} + +func (x *RerankReply) GetScore() float64 { + if x != nil { + return x.Score + } + return 0 +} + +type GroupByResult struct { + state protoimpl.MessageState `protogen:"open.v1"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + MinDistance float32 `protobuf:"fixed32,2,opt,name=min_distance,json=minDistance,proto3" json:"min_distance,omitempty"` + MaxDistance float32 `protobuf:"fixed32,3,opt,name=max_distance,json=maxDistance,proto3" json:"max_distance,omitempty"` + NumberOfObjects int64 `protobuf:"varint,4,opt,name=number_of_objects,json=numberOfObjects,proto3" json:"number_of_objects,omitempty"` + Objects []*SearchResult `protobuf:"bytes,5,rep,name=objects,proto3" json:"objects,omitempty"` + Rerank *RerankReply `protobuf:"bytes,6,opt,name=rerank,proto3,oneof" json:"rerank,omitempty"` + // Deprecated: Marked as deprecated in v1/search_get.proto. + Generative *GenerativeReply `protobuf:"bytes,7,opt,name=generative,proto3,oneof" json:"generative,omitempty"` + GenerativeResult *GenerativeResult `protobuf:"bytes,8,opt,name=generative_result,json=generativeResult,proto3,oneof" json:"generative_result,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GroupByResult) Reset() { + *x = GroupByResult{} + mi := &file_v1_search_get_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GroupByResult) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GroupByResult) ProtoMessage() {} + +func (x *GroupByResult) ProtoReflect() protoreflect.Message { + mi := &file_v1_search_get_proto_msgTypes[10] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GroupByResult.ProtoReflect.Descriptor instead. +func (*GroupByResult) Descriptor() ([]byte, []int) { + return file_v1_search_get_proto_rawDescGZIP(), []int{10} +} + +func (x *GroupByResult) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *GroupByResult) GetMinDistance() float32 { + if x != nil { + return x.MinDistance + } + return 0 +} + +func (x *GroupByResult) GetMaxDistance() float32 { + if x != nil { + return x.MaxDistance + } + return 0 +} + +func (x *GroupByResult) GetNumberOfObjects() int64 { + if x != nil { + return x.NumberOfObjects + } + return 0 +} + +func (x *GroupByResult) GetObjects() []*SearchResult { + if x != nil { + return x.Objects + } + return nil +} + +func (x *GroupByResult) GetRerank() *RerankReply { + if x != nil { + return x.Rerank + } + return nil +} + +// Deprecated: Marked as deprecated in v1/search_get.proto. +func (x *GroupByResult) GetGenerative() *GenerativeReply { + if x != nil { + return x.Generative + } + return nil +} + +func (x *GroupByResult) GetGenerativeResult() *GenerativeResult { + if x != nil { + return x.GenerativeResult + } + return nil +} + +type SearchResult struct { + state protoimpl.MessageState `protogen:"open.v1"` + Properties *PropertiesResult `protobuf:"bytes,1,opt,name=properties,proto3" json:"properties,omitempty"` + Metadata *MetadataResult `protobuf:"bytes,2,opt,name=metadata,proto3" json:"metadata,omitempty"` + Generative *GenerativeResult `protobuf:"bytes,3,opt,name=generative,proto3,oneof" json:"generative,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *SearchResult) Reset() { + *x = SearchResult{} + mi := &file_v1_search_get_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *SearchResult) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SearchResult) ProtoMessage() {} + +func (x *SearchResult) ProtoReflect() protoreflect.Message { + mi := &file_v1_search_get_proto_msgTypes[11] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SearchResult.ProtoReflect.Descriptor instead. +func (*SearchResult) Descriptor() ([]byte, []int) { + return file_v1_search_get_proto_rawDescGZIP(), []int{11} +} + +func (x *SearchResult) GetProperties() *PropertiesResult { + if x != nil { + return x.Properties + } + return nil +} + +func (x *SearchResult) GetMetadata() *MetadataResult { + if x != nil { + return x.Metadata + } + return nil +} + +func (x *SearchResult) GetGenerative() *GenerativeResult { + if x != nil { + return x.Generative + } + return nil +} + +type MetadataResult struct { + state protoimpl.MessageState `protogen:"open.v1"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + // + // Deprecated: Marked as deprecated in v1/search_get.proto. + Vector []float32 `protobuf:"fixed32,2,rep,packed,name=vector,proto3" json:"vector,omitempty"` + CreationTimeUnix int64 `protobuf:"varint,3,opt,name=creation_time_unix,json=creationTimeUnix,proto3" json:"creation_time_unix,omitempty"` + CreationTimeUnixPresent bool `protobuf:"varint,4,opt,name=creation_time_unix_present,json=creationTimeUnixPresent,proto3" json:"creation_time_unix_present,omitempty"` + LastUpdateTimeUnix int64 `protobuf:"varint,5,opt,name=last_update_time_unix,json=lastUpdateTimeUnix,proto3" json:"last_update_time_unix,omitempty"` + LastUpdateTimeUnixPresent bool `protobuf:"varint,6,opt,name=last_update_time_unix_present,json=lastUpdateTimeUnixPresent,proto3" json:"last_update_time_unix_present,omitempty"` + Distance float32 `protobuf:"fixed32,7,opt,name=distance,proto3" json:"distance,omitempty"` + DistancePresent bool `protobuf:"varint,8,opt,name=distance_present,json=distancePresent,proto3" json:"distance_present,omitempty"` + Certainty float32 `protobuf:"fixed32,9,opt,name=certainty,proto3" json:"certainty,omitempty"` + CertaintyPresent bool `protobuf:"varint,10,opt,name=certainty_present,json=certaintyPresent,proto3" json:"certainty_present,omitempty"` + Score float32 `protobuf:"fixed32,11,opt,name=score,proto3" json:"score,omitempty"` + ScorePresent bool `protobuf:"varint,12,opt,name=score_present,json=scorePresent,proto3" json:"score_present,omitempty"` + ExplainScore string `protobuf:"bytes,13,opt,name=explain_score,json=explainScore,proto3" json:"explain_score,omitempty"` + ExplainScorePresent bool `protobuf:"varint,14,opt,name=explain_score_present,json=explainScorePresent,proto3" json:"explain_score_present,omitempty"` + IsConsistent *bool `protobuf:"varint,15,opt,name=is_consistent,json=isConsistent,proto3,oneof" json:"is_consistent,omitempty"` + // Deprecated: Marked as deprecated in v1/search_get.proto. + Generative string `protobuf:"bytes,16,opt,name=generative,proto3" json:"generative,omitempty"` + // Deprecated: Marked as deprecated in v1/search_get.proto. + GenerativePresent bool `protobuf:"varint,17,opt,name=generative_present,json=generativePresent,proto3" json:"generative_present,omitempty"` + IsConsistentPresent bool `protobuf:"varint,18,opt,name=is_consistent_present,json=isConsistentPresent,proto3" json:"is_consistent_present,omitempty"` + VectorBytes []byte `protobuf:"bytes,19,opt,name=vector_bytes,json=vectorBytes,proto3" json:"vector_bytes,omitempty"` + IdAsBytes []byte `protobuf:"bytes,20,opt,name=id_as_bytes,json=idAsBytes,proto3" json:"id_as_bytes,omitempty"` + RerankScore float64 `protobuf:"fixed64,21,opt,name=rerank_score,json=rerankScore,proto3" json:"rerank_score,omitempty"` + RerankScorePresent bool `protobuf:"varint,22,opt,name=rerank_score_present,json=rerankScorePresent,proto3" json:"rerank_score_present,omitempty"` + Vectors []*Vectors `protobuf:"bytes,23,rep,name=vectors,proto3" json:"vectors,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *MetadataResult) Reset() { + *x = MetadataResult{} + mi := &file_v1_search_get_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *MetadataResult) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*MetadataResult) ProtoMessage() {} + +func (x *MetadataResult) ProtoReflect() protoreflect.Message { + mi := &file_v1_search_get_proto_msgTypes[12] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use MetadataResult.ProtoReflect.Descriptor instead. +func (*MetadataResult) Descriptor() ([]byte, []int) { + return file_v1_search_get_proto_rawDescGZIP(), []int{12} +} + +func (x *MetadataResult) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +// Deprecated: Marked as deprecated in v1/search_get.proto. +func (x *MetadataResult) GetVector() []float32 { + if x != nil { + return x.Vector + } + return nil +} + +func (x *MetadataResult) GetCreationTimeUnix() int64 { + if x != nil { + return x.CreationTimeUnix + } + return 0 +} + +func (x *MetadataResult) GetCreationTimeUnixPresent() bool { + if x != nil { + return x.CreationTimeUnixPresent + } + return false +} + +func (x *MetadataResult) GetLastUpdateTimeUnix() int64 { + if x != nil { + return x.LastUpdateTimeUnix + } + return 0 +} + +func (x *MetadataResult) GetLastUpdateTimeUnixPresent() bool { + if x != nil { + return x.LastUpdateTimeUnixPresent + } + return false +} + +func (x *MetadataResult) GetDistance() float32 { + if x != nil { + return x.Distance + } + return 0 +} + +func (x *MetadataResult) GetDistancePresent() bool { + if x != nil { + return x.DistancePresent + } + return false +} + +func (x *MetadataResult) GetCertainty() float32 { + if x != nil { + return x.Certainty + } + return 0 +} + +func (x *MetadataResult) GetCertaintyPresent() bool { + if x != nil { + return x.CertaintyPresent + } + return false +} + +func (x *MetadataResult) GetScore() float32 { + if x != nil { + return x.Score + } + return 0 +} + +func (x *MetadataResult) GetScorePresent() bool { + if x != nil { + return x.ScorePresent + } + return false +} + +func (x *MetadataResult) GetExplainScore() string { + if x != nil { + return x.ExplainScore + } + return "" +} + +func (x *MetadataResult) GetExplainScorePresent() bool { + if x != nil { + return x.ExplainScorePresent + } + return false +} + +func (x *MetadataResult) GetIsConsistent() bool { + if x != nil && x.IsConsistent != nil { + return *x.IsConsistent + } + return false +} + +// Deprecated: Marked as deprecated in v1/search_get.proto. +func (x *MetadataResult) GetGenerative() string { + if x != nil { + return x.Generative + } + return "" +} + +// Deprecated: Marked as deprecated in v1/search_get.proto. +func (x *MetadataResult) GetGenerativePresent() bool { + if x != nil { + return x.GenerativePresent + } + return false +} + +func (x *MetadataResult) GetIsConsistentPresent() bool { + if x != nil { + return x.IsConsistentPresent + } + return false +} + +func (x *MetadataResult) GetVectorBytes() []byte { + if x != nil { + return x.VectorBytes + } + return nil +} + +func (x *MetadataResult) GetIdAsBytes() []byte { + if x != nil { + return x.IdAsBytes + } + return nil +} + +func (x *MetadataResult) GetRerankScore() float64 { + if x != nil { + return x.RerankScore + } + return 0 +} + +func (x *MetadataResult) GetRerankScorePresent() bool { + if x != nil { + return x.RerankScorePresent + } + return false +} + +func (x *MetadataResult) GetVectors() []*Vectors { + if x != nil { + return x.Vectors + } + return nil +} + +type PropertiesResult struct { + state protoimpl.MessageState `protogen:"open.v1"` + RefProps []*RefPropertiesResult `protobuf:"bytes,2,rep,name=ref_props,json=refProps,proto3" json:"ref_props,omitempty"` + TargetCollection string `protobuf:"bytes,3,opt,name=target_collection,json=targetCollection,proto3" json:"target_collection,omitempty"` + Metadata *MetadataResult `protobuf:"bytes,4,opt,name=metadata,proto3" json:"metadata,omitempty"` + NonRefProps *Properties `protobuf:"bytes,11,opt,name=non_ref_props,json=nonRefProps,proto3" json:"non_ref_props,omitempty"` + RefPropsRequested bool `protobuf:"varint,12,opt,name=ref_props_requested,json=refPropsRequested,proto3" json:"ref_props_requested,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *PropertiesResult) Reset() { + *x = PropertiesResult{} + mi := &file_v1_search_get_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *PropertiesResult) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PropertiesResult) ProtoMessage() {} + +func (x *PropertiesResult) ProtoReflect() protoreflect.Message { + mi := &file_v1_search_get_proto_msgTypes[13] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PropertiesResult.ProtoReflect.Descriptor instead. +func (*PropertiesResult) Descriptor() ([]byte, []int) { + return file_v1_search_get_proto_rawDescGZIP(), []int{13} +} + +func (x *PropertiesResult) GetRefProps() []*RefPropertiesResult { + if x != nil { + return x.RefProps + } + return nil +} + +func (x *PropertiesResult) GetTargetCollection() string { + if x != nil { + return x.TargetCollection + } + return "" +} + +func (x *PropertiesResult) GetMetadata() *MetadataResult { + if x != nil { + return x.Metadata + } + return nil +} + +func (x *PropertiesResult) GetNonRefProps() *Properties { + if x != nil { + return x.NonRefProps + } + return nil +} + +func (x *PropertiesResult) GetRefPropsRequested() bool { + if x != nil { + return x.RefPropsRequested + } + return false +} + +type RefPropertiesResult struct { + state protoimpl.MessageState `protogen:"open.v1"` + Properties []*PropertiesResult `protobuf:"bytes,1,rep,name=properties,proto3" json:"properties,omitempty"` + PropName string `protobuf:"bytes,2,opt,name=prop_name,json=propName,proto3" json:"prop_name,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *RefPropertiesResult) Reset() { + *x = RefPropertiesResult{} + mi := &file_v1_search_get_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *RefPropertiesResult) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RefPropertiesResult) ProtoMessage() {} + +func (x *RefPropertiesResult) ProtoReflect() protoreflect.Message { + mi := &file_v1_search_get_proto_msgTypes[14] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RefPropertiesResult.ProtoReflect.Descriptor instead. +func (*RefPropertiesResult) Descriptor() ([]byte, []int) { + return file_v1_search_get_proto_rawDescGZIP(), []int{14} +} + +func (x *RefPropertiesResult) GetProperties() []*PropertiesResult { + if x != nil { + return x.Properties + } + return nil +} + +func (x *RefPropertiesResult) GetPropName() string { + if x != nil { + return x.PropName + } + return "" +} + +var File_v1_search_get_proto protoreflect.FileDescriptor + +const file_v1_search_get_proto_rawDesc = "" + + "\n" + + "\x13v1/search_get.proto\x12\vweaviate.v1\x1a\rv1/base.proto\x1a\x14v1/base_search.proto\x1a\x13v1/generative.proto\x1a\x13v1/properties.proto\"\xc7\r\n" + + "\rSearchRequest\x12\x1e\n" + + "\n" + + "collection\x18\x01 \x01(\tR\n" + + "collection\x12\x16\n" + + "\x06tenant\x18\n" + + " \x01(\tR\x06tenant\x12O\n" + + "\x11consistency_level\x18\v \x01(\x0e2\x1d.weaviate.v1.ConsistencyLevelH\x00R\x10consistencyLevel\x88\x01\x01\x12C\n" + + "\n" + + "properties\x18\x14 \x01(\v2\x1e.weaviate.v1.PropertiesRequestH\x01R\n" + + "properties\x88\x01\x01\x12=\n" + + "\bmetadata\x18\x15 \x01(\v2\x1c.weaviate.v1.MetadataRequestH\x02R\bmetadata\x88\x01\x01\x124\n" + + "\bgroup_by\x18\x16 \x01(\v2\x14.weaviate.v1.GroupByH\x03R\agroupBy\x88\x01\x01\x12\x14\n" + + "\x05limit\x18\x1e \x01(\rR\x05limit\x12\x16\n" + + "\x06offset\x18\x1f \x01(\rR\x06offset\x12\x18\n" + + "\aautocut\x18 \x01(\rR\aautocut\x12\x14\n" + + "\x05after\x18! \x01(\tR\x05after\x12,\n" + + "\asort_by\x18\" \x03(\v2\x13.weaviate.v1.SortByR\x06sortBy\x123\n" + + "\afilters\x18( \x01(\v2\x14.weaviate.v1.FiltersH\x04R\afilters\x88\x01\x01\x12=\n" + + "\rhybrid_search\x18) \x01(\v2\x13.weaviate.v1.HybridH\x05R\fhybridSearch\x88\x01\x01\x127\n" + + "\vbm25_search\x18* \x01(\v2\x11.weaviate.v1.BM25H\x06R\n" + + "bm25Search\x88\x01\x01\x12=\n" + + "\vnear_vector\x18+ \x01(\v2\x17.weaviate.v1.NearVectorH\aR\n" + + "nearVector\x88\x01\x01\x12=\n" + + "\vnear_object\x18, \x01(\v2\x17.weaviate.v1.NearObjectH\bR\n" + + "nearObject\x88\x01\x01\x12=\n" + + "\tnear_text\x18- \x01(\v2\x1b.weaviate.v1.NearTextSearchH\tR\bnearText\x88\x01\x01\x12@\n" + + "\n" + + "near_image\x18. \x01(\v2\x1c.weaviate.v1.NearImageSearchH\n" + + "R\tnearImage\x88\x01\x01\x12@\n" + + "\n" + + "near_audio\x18/ \x01(\v2\x1c.weaviate.v1.NearAudioSearchH\vR\tnearAudio\x88\x01\x01\x12@\n" + + "\n" + + "near_video\x180 \x01(\v2\x1c.weaviate.v1.NearVideoSearchH\fR\tnearVideo\x88\x01\x01\x12@\n" + + "\n" + + "near_depth\x181 \x01(\v2\x1c.weaviate.v1.NearDepthSearchH\rR\tnearDepth\x88\x01\x01\x12F\n" + + "\fnear_thermal\x182 \x01(\v2\x1e.weaviate.v1.NearThermalSearchH\x0eR\vnearThermal\x88\x01\x01\x12:\n" + + "\bnear_imu\x183 \x01(\v2\x1a.weaviate.v1.NearIMUSearchH\x0fR\anearImu\x88\x01\x01\x12B\n" + + "\n" + + "generative\x18< \x01(\v2\x1d.weaviate.v1.GenerativeSearchH\x10R\n" + + "generative\x88\x01\x01\x120\n" + + "\x06rerank\x18= \x01(\v2\x13.weaviate.v1.RerankH\x11R\x06rerank\x88\x01\x01\x12$\n" + + "\fuses_123_api\x18d \x01(\bB\x02\x18\x01R\n" + + "uses123Api\x12$\n" + + "\fuses_125_api\x18e \x01(\bB\x02\x18\x01R\n" + + "uses125Api\x12 \n" + + "\fuses_127_api\x18f \x01(\bR\n" + + "uses127ApiB\x14\n" + + "\x12_consistency_levelB\r\n" + + "\v_propertiesB\v\n" + + "\t_metadataB\v\n" + + "\t_group_byB\n" + + "\n" + + "\b_filtersB\x10\n" + + "\x0e_hybrid_searchB\x0e\n" + + "\f_bm25_searchB\x0e\n" + + "\f_near_vectorB\x0e\n" + + "\f_near_objectB\f\n" + + "\n" + + "_near_textB\r\n" + + "\v_near_imageB\r\n" + + "\v_near_audioB\r\n" + + "\v_near_videoB\r\n" + + "\v_near_depthB\x0f\n" + + "\r_near_thermalB\v\n" + + "\t_near_imuB\r\n" + + "\v_generativeB\t\n" + + "\a_rerank\"s\n" + + "\aGroupBy\x12\x12\n" + + "\x04path\x18\x01 \x03(\tR\x04path\x12(\n" + + "\x10number_of_groups\x18\x02 \x01(\x05R\x0enumberOfGroups\x12*\n" + + "\x11objects_per_group\x18\x03 \x01(\x05R\x0fobjectsPerGroup\":\n" + + "\x06SortBy\x12\x1c\n" + + "\tascending\x18\x01 \x01(\bR\tascending\x12\x12\n" + + "\x04path\x18\x02 \x03(\tR\x04path\"\xd2\x02\n" + + "\x0fMetadataRequest\x12\x12\n" + + "\x04uuid\x18\x01 \x01(\bR\x04uuid\x12\x16\n" + + "\x06vector\x18\x02 \x01(\bR\x06vector\x12,\n" + + "\x12creation_time_unix\x18\x03 \x01(\bR\x10creationTimeUnix\x121\n" + + "\x15last_update_time_unix\x18\x04 \x01(\bR\x12lastUpdateTimeUnix\x12\x1a\n" + + "\bdistance\x18\x05 \x01(\bR\bdistance\x12\x1c\n" + + "\tcertainty\x18\x06 \x01(\bR\tcertainty\x12\x14\n" + + "\x05score\x18\a \x01(\bR\x05score\x12#\n" + + "\rexplain_score\x18\b \x01(\bR\fexplainScore\x12#\n" + + "\ris_consistent\x18\t \x01(\bR\fisConsistent\x12\x18\n" + + "\avectors\x18\n" + + " \x03(\tR\avectors\"\x9f\x02\n" + + "\x11PropertiesRequest\x12,\n" + + "\x12non_ref_properties\x18\x01 \x03(\tR\x10nonRefProperties\x12H\n" + + "\x0eref_properties\x18\x02 \x03(\v2!.weaviate.v1.RefPropertiesRequestR\rrefProperties\x12Q\n" + + "\x11object_properties\x18\x03 \x03(\v2$.weaviate.v1.ObjectPropertiesRequestR\x10objectProperties\x12?\n" + + "\x1creturn_all_nonref_properties\x18\v \x01(\bR\x19returnAllNonrefProperties\"\xbc\x01\n" + + "\x17ObjectPropertiesRequest\x12\x1b\n" + + "\tprop_name\x18\x01 \x01(\tR\bpropName\x121\n" + + "\x14primitive_properties\x18\x02 \x03(\tR\x13primitiveProperties\x12Q\n" + + "\x11object_properties\x18\x03 \x03(\v2$.weaviate.v1.ObjectPropertiesRequestR\x10objectProperties\"\xec\x01\n" + + "\x14RefPropertiesRequest\x12-\n" + + "\x12reference_property\x18\x01 \x01(\tR\x11referenceProperty\x12>\n" + + "\n" + + "properties\x18\x02 \x01(\v2\x1e.weaviate.v1.PropertiesRequestR\n" + + "properties\x128\n" + + "\bmetadata\x18\x03 \x01(\v2\x1c.weaviate.v1.MetadataRequestR\bmetadata\x12+\n" + + "\x11target_collection\x18\x04 \x01(\tR\x10targetCollection\"I\n" + + "\x06Rerank\x12\x1a\n" + + "\bproperty\x18\x01 \x01(\tR\bproperty\x12\x19\n" + + "\x05query\x18\x02 \x01(\tH\x00R\x05query\x88\x01\x01B\b\n" + + "\x06_query\"\x80\x03\n" + + "\vSearchReply\x12\x12\n" + + "\x04took\x18\x01 \x01(\x02R\x04took\x123\n" + + "\aresults\x18\x02 \x03(\v2\x19.weaviate.v1.SearchResultR\aresults\x12C\n" + + "\x19generative_grouped_result\x18\x03 \x01(\tB\x02\x18\x01H\x00R\x17generativeGroupedResult\x88\x01\x01\x12D\n" + + "\x10group_by_results\x18\x04 \x03(\v2\x1a.weaviate.v1.GroupByResultR\x0egroupByResults\x12`\n" + + "\x1agenerative_grouped_results\x18\x05 \x01(\v2\x1d.weaviate.v1.GenerativeResultH\x01R\x18generativeGroupedResults\x88\x01\x01B\x1c\n" + + "\x1a_generative_grouped_resultB\x1d\n" + + "\x1b_generative_grouped_results\"#\n" + + "\vRerankReply\x12\x14\n" + + "\x05score\x18\x01 \x01(\x01R\x05score\"\xc9\x03\n" + + "\rGroupByResult\x12\x12\n" + + "\x04name\x18\x01 \x01(\tR\x04name\x12!\n" + + "\fmin_distance\x18\x02 \x01(\x02R\vminDistance\x12!\n" + + "\fmax_distance\x18\x03 \x01(\x02R\vmaxDistance\x12*\n" + + "\x11number_of_objects\x18\x04 \x01(\x03R\x0fnumberOfObjects\x123\n" + + "\aobjects\x18\x05 \x03(\v2\x19.weaviate.v1.SearchResultR\aobjects\x125\n" + + "\x06rerank\x18\x06 \x01(\v2\x18.weaviate.v1.RerankReplyH\x00R\x06rerank\x88\x01\x01\x12E\n" + + "\n" + + "generative\x18\a \x01(\v2\x1c.weaviate.v1.GenerativeReplyB\x02\x18\x01H\x01R\n" + + "generative\x88\x01\x01\x12O\n" + + "\x11generative_result\x18\b \x01(\v2\x1d.weaviate.v1.GenerativeResultH\x02R\x10generativeResult\x88\x01\x01B\t\n" + + "\a_rerankB\r\n" + + "\v_generativeB\x14\n" + + "\x12_generative_result\"\xd9\x01\n" + + "\fSearchResult\x12=\n" + + "\n" + + "properties\x18\x01 \x01(\v2\x1d.weaviate.v1.PropertiesResultR\n" + + "properties\x127\n" + + "\bmetadata\x18\x02 \x01(\v2\x1b.weaviate.v1.MetadataResultR\bmetadata\x12B\n" + + "\n" + + "generative\x18\x03 \x01(\v2\x1d.weaviate.v1.GenerativeResultH\x00R\n" + + "generative\x88\x01\x01B\r\n" + + "\v_generative\"\xd1\a\n" + + "\x0eMetadataResult\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\x12\x1a\n" + + "\x06vector\x18\x02 \x03(\x02B\x02\x18\x01R\x06vector\x12,\n" + + "\x12creation_time_unix\x18\x03 \x01(\x03R\x10creationTimeUnix\x12;\n" + + "\x1acreation_time_unix_present\x18\x04 \x01(\bR\x17creationTimeUnixPresent\x121\n" + + "\x15last_update_time_unix\x18\x05 \x01(\x03R\x12lastUpdateTimeUnix\x12@\n" + + "\x1dlast_update_time_unix_present\x18\x06 \x01(\bR\x19lastUpdateTimeUnixPresent\x12\x1a\n" + + "\bdistance\x18\a \x01(\x02R\bdistance\x12)\n" + + "\x10distance_present\x18\b \x01(\bR\x0fdistancePresent\x12\x1c\n" + + "\tcertainty\x18\t \x01(\x02R\tcertainty\x12+\n" + + "\x11certainty_present\x18\n" + + " \x01(\bR\x10certaintyPresent\x12\x14\n" + + "\x05score\x18\v \x01(\x02R\x05score\x12#\n" + + "\rscore_present\x18\f \x01(\bR\fscorePresent\x12#\n" + + "\rexplain_score\x18\r \x01(\tR\fexplainScore\x122\n" + + "\x15explain_score_present\x18\x0e \x01(\bR\x13explainScorePresent\x12(\n" + + "\ris_consistent\x18\x0f \x01(\bH\x00R\fisConsistent\x88\x01\x01\x12\"\n" + + "\n" + + "generative\x18\x10 \x01(\tB\x02\x18\x01R\n" + + "generative\x121\n" + + "\x12generative_present\x18\x11 \x01(\bB\x02\x18\x01R\x11generativePresent\x122\n" + + "\x15is_consistent_present\x18\x12 \x01(\bR\x13isConsistentPresent\x12!\n" + + "\fvector_bytes\x18\x13 \x01(\fR\vvectorBytes\x12\x1e\n" + + "\vid_as_bytes\x18\x14 \x01(\fR\tidAsBytes\x12!\n" + + "\frerank_score\x18\x15 \x01(\x01R\vrerankScore\x120\n" + + "\x14rerank_score_present\x18\x16 \x01(\bR\x12rerankScorePresent\x12.\n" + + "\avectors\x18\x17 \x03(\v2\x14.weaviate.v1.VectorsR\avectorsB\x10\n" + + "\x0e_is_consistent\"\xce\x02\n" + + "\x10PropertiesResult\x12=\n" + + "\tref_props\x18\x02 \x03(\v2 .weaviate.v1.RefPropertiesResultR\brefProps\x12+\n" + + "\x11target_collection\x18\x03 \x01(\tR\x10targetCollection\x127\n" + + "\bmetadata\x18\x04 \x01(\v2\x1b.weaviate.v1.MetadataResultR\bmetadata\x12;\n" + + "\rnon_ref_props\x18\v \x01(\v2\x17.weaviate.v1.PropertiesR\vnonRefProps\x12.\n" + + "\x13ref_props_requested\x18\f \x01(\bR\x11refPropsRequestedJ\x04\b\x01\x10\x02J\x04\b\x05\x10\x06J\x04\b\x06\x10\aJ\x04\b\a\x10\bJ\x04\b\b\x10\tJ\x04\b\t\x10\n" + + "J\x04\b\n" + + "\x10\v\"q\n" + + "\x13RefPropertiesResult\x12=\n" + + "\n" + + "properties\x18\x01 \x03(\v2\x1d.weaviate.v1.PropertiesResultR\n" + + "properties\x12\x1b\n" + + "\tprop_name\x18\x02 \x01(\tR\bpropNameBs\n" + + "#io.weaviate.client.grpc.protocol.v1B\x16WeaviateProtoSearchGetZ4github.com/weaviate/weaviate/grpc/generated;protocolb\x06proto3" + +var ( + file_v1_search_get_proto_rawDescOnce sync.Once + file_v1_search_get_proto_rawDescData []byte +) + +func file_v1_search_get_proto_rawDescGZIP() []byte { + file_v1_search_get_proto_rawDescOnce.Do(func() { + file_v1_search_get_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_v1_search_get_proto_rawDesc), len(file_v1_search_get_proto_rawDesc))) + }) + return file_v1_search_get_proto_rawDescData +} + +var file_v1_search_get_proto_msgTypes = make([]protoimpl.MessageInfo, 15) +var file_v1_search_get_proto_goTypes = []any{ + (*SearchRequest)(nil), // 0: weaviate.v1.SearchRequest + (*GroupBy)(nil), // 1: weaviate.v1.GroupBy + (*SortBy)(nil), // 2: weaviate.v1.SortBy + (*MetadataRequest)(nil), // 3: weaviate.v1.MetadataRequest + (*PropertiesRequest)(nil), // 4: weaviate.v1.PropertiesRequest + (*ObjectPropertiesRequest)(nil), // 5: weaviate.v1.ObjectPropertiesRequest + (*RefPropertiesRequest)(nil), // 6: weaviate.v1.RefPropertiesRequest + (*Rerank)(nil), // 7: weaviate.v1.Rerank + (*SearchReply)(nil), // 8: weaviate.v1.SearchReply + (*RerankReply)(nil), // 9: weaviate.v1.RerankReply + (*GroupByResult)(nil), // 10: weaviate.v1.GroupByResult + (*SearchResult)(nil), // 11: weaviate.v1.SearchResult + (*MetadataResult)(nil), // 12: weaviate.v1.MetadataResult + (*PropertiesResult)(nil), // 13: weaviate.v1.PropertiesResult + (*RefPropertiesResult)(nil), // 14: weaviate.v1.RefPropertiesResult + (ConsistencyLevel)(0), // 15: weaviate.v1.ConsistencyLevel + (*Filters)(nil), // 16: weaviate.v1.Filters + (*Hybrid)(nil), // 17: weaviate.v1.Hybrid + (*BM25)(nil), // 18: weaviate.v1.BM25 + (*NearVector)(nil), // 19: weaviate.v1.NearVector + (*NearObject)(nil), // 20: weaviate.v1.NearObject + (*NearTextSearch)(nil), // 21: weaviate.v1.NearTextSearch + (*NearImageSearch)(nil), // 22: weaviate.v1.NearImageSearch + (*NearAudioSearch)(nil), // 23: weaviate.v1.NearAudioSearch + (*NearVideoSearch)(nil), // 24: weaviate.v1.NearVideoSearch + (*NearDepthSearch)(nil), // 25: weaviate.v1.NearDepthSearch + (*NearThermalSearch)(nil), // 26: weaviate.v1.NearThermalSearch + (*NearIMUSearch)(nil), // 27: weaviate.v1.NearIMUSearch + (*GenerativeSearch)(nil), // 28: weaviate.v1.GenerativeSearch + (*GenerativeResult)(nil), // 29: weaviate.v1.GenerativeResult + (*GenerativeReply)(nil), // 30: weaviate.v1.GenerativeReply + (*Vectors)(nil), // 31: weaviate.v1.Vectors + (*Properties)(nil), // 32: weaviate.v1.Properties +} +var file_v1_search_get_proto_depIdxs = []int32{ + 15, // 0: weaviate.v1.SearchRequest.consistency_level:type_name -> weaviate.v1.ConsistencyLevel + 4, // 1: weaviate.v1.SearchRequest.properties:type_name -> weaviate.v1.PropertiesRequest + 3, // 2: weaviate.v1.SearchRequest.metadata:type_name -> weaviate.v1.MetadataRequest + 1, // 3: weaviate.v1.SearchRequest.group_by:type_name -> weaviate.v1.GroupBy + 2, // 4: weaviate.v1.SearchRequest.sort_by:type_name -> weaviate.v1.SortBy + 16, // 5: weaviate.v1.SearchRequest.filters:type_name -> weaviate.v1.Filters + 17, // 6: weaviate.v1.SearchRequest.hybrid_search:type_name -> weaviate.v1.Hybrid + 18, // 7: weaviate.v1.SearchRequest.bm25_search:type_name -> weaviate.v1.BM25 + 19, // 8: weaviate.v1.SearchRequest.near_vector:type_name -> weaviate.v1.NearVector + 20, // 9: weaviate.v1.SearchRequest.near_object:type_name -> weaviate.v1.NearObject + 21, // 10: weaviate.v1.SearchRequest.near_text:type_name -> weaviate.v1.NearTextSearch + 22, // 11: weaviate.v1.SearchRequest.near_image:type_name -> weaviate.v1.NearImageSearch + 23, // 12: weaviate.v1.SearchRequest.near_audio:type_name -> weaviate.v1.NearAudioSearch + 24, // 13: weaviate.v1.SearchRequest.near_video:type_name -> weaviate.v1.NearVideoSearch + 25, // 14: weaviate.v1.SearchRequest.near_depth:type_name -> weaviate.v1.NearDepthSearch + 26, // 15: weaviate.v1.SearchRequest.near_thermal:type_name -> weaviate.v1.NearThermalSearch + 27, // 16: weaviate.v1.SearchRequest.near_imu:type_name -> weaviate.v1.NearIMUSearch + 28, // 17: weaviate.v1.SearchRequest.generative:type_name -> weaviate.v1.GenerativeSearch + 7, // 18: weaviate.v1.SearchRequest.rerank:type_name -> weaviate.v1.Rerank + 6, // 19: weaviate.v1.PropertiesRequest.ref_properties:type_name -> weaviate.v1.RefPropertiesRequest + 5, // 20: weaviate.v1.PropertiesRequest.object_properties:type_name -> weaviate.v1.ObjectPropertiesRequest + 5, // 21: weaviate.v1.ObjectPropertiesRequest.object_properties:type_name -> weaviate.v1.ObjectPropertiesRequest + 4, // 22: weaviate.v1.RefPropertiesRequest.properties:type_name -> weaviate.v1.PropertiesRequest + 3, // 23: weaviate.v1.RefPropertiesRequest.metadata:type_name -> weaviate.v1.MetadataRequest + 11, // 24: weaviate.v1.SearchReply.results:type_name -> weaviate.v1.SearchResult + 10, // 25: weaviate.v1.SearchReply.group_by_results:type_name -> weaviate.v1.GroupByResult + 29, // 26: weaviate.v1.SearchReply.generative_grouped_results:type_name -> weaviate.v1.GenerativeResult + 11, // 27: weaviate.v1.GroupByResult.objects:type_name -> weaviate.v1.SearchResult + 9, // 28: weaviate.v1.GroupByResult.rerank:type_name -> weaviate.v1.RerankReply + 30, // 29: weaviate.v1.GroupByResult.generative:type_name -> weaviate.v1.GenerativeReply + 29, // 30: weaviate.v1.GroupByResult.generative_result:type_name -> weaviate.v1.GenerativeResult + 13, // 31: weaviate.v1.SearchResult.properties:type_name -> weaviate.v1.PropertiesResult + 12, // 32: weaviate.v1.SearchResult.metadata:type_name -> weaviate.v1.MetadataResult + 29, // 33: weaviate.v1.SearchResult.generative:type_name -> weaviate.v1.GenerativeResult + 31, // 34: weaviate.v1.MetadataResult.vectors:type_name -> weaviate.v1.Vectors + 14, // 35: weaviate.v1.PropertiesResult.ref_props:type_name -> weaviate.v1.RefPropertiesResult + 12, // 36: weaviate.v1.PropertiesResult.metadata:type_name -> weaviate.v1.MetadataResult + 32, // 37: weaviate.v1.PropertiesResult.non_ref_props:type_name -> weaviate.v1.Properties + 13, // 38: weaviate.v1.RefPropertiesResult.properties:type_name -> weaviate.v1.PropertiesResult + 39, // [39:39] is the sub-list for method output_type + 39, // [39:39] is the sub-list for method input_type + 39, // [39:39] is the sub-list for extension type_name + 39, // [39:39] is the sub-list for extension extendee + 0, // [0:39] is the sub-list for field type_name +} + +func init() { file_v1_search_get_proto_init() } +func file_v1_search_get_proto_init() { + if File_v1_search_get_proto != nil { + return + } + file_v1_base_proto_init() + file_v1_base_search_proto_init() + file_v1_generative_proto_init() + file_v1_properties_proto_init() + file_v1_search_get_proto_msgTypes[0].OneofWrappers = []any{} + file_v1_search_get_proto_msgTypes[7].OneofWrappers = []any{} + file_v1_search_get_proto_msgTypes[8].OneofWrappers = []any{} + file_v1_search_get_proto_msgTypes[10].OneofWrappers = []any{} + file_v1_search_get_proto_msgTypes[11].OneofWrappers = []any{} + file_v1_search_get_proto_msgTypes[12].OneofWrappers = []any{} + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: unsafe.Slice(unsafe.StringData(file_v1_search_get_proto_rawDesc), len(file_v1_search_get_proto_rawDesc)), + NumEnums: 0, + NumMessages: 15, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_v1_search_get_proto_goTypes, + DependencyIndexes: file_v1_search_get_proto_depIdxs, + MessageInfos: file_v1_search_get_proto_msgTypes, + }.Build() + File_v1_search_get_proto = out.File + file_v1_search_get_proto_goTypes = nil + file_v1_search_get_proto_depIdxs = nil +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/tenants.pb.go b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/tenants.pb.go new file mode 100644 index 0000000000000000000000000000000000000000..41626597dba1a561cc35b7aa6e42c410cb8dbd1c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/tenants.pb.go @@ -0,0 +1,411 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. + +package protocol + +import ( + reflect "reflect" + sync "sync" + unsafe "unsafe" + + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type TenantActivityStatus int32 + +const ( + TenantActivityStatus_TENANT_ACTIVITY_STATUS_UNSPECIFIED TenantActivityStatus = 0 + TenantActivityStatus_TENANT_ACTIVITY_STATUS_HOT TenantActivityStatus = 1 + TenantActivityStatus_TENANT_ACTIVITY_STATUS_COLD TenantActivityStatus = 2 + TenantActivityStatus_TENANT_ACTIVITY_STATUS_FROZEN TenantActivityStatus = 4 + TenantActivityStatus_TENANT_ACTIVITY_STATUS_UNFREEZING TenantActivityStatus = 5 + TenantActivityStatus_TENANT_ACTIVITY_STATUS_FREEZING TenantActivityStatus = 6 + // not used yet - added to let the clients already add code to handle this in the future + TenantActivityStatus_TENANT_ACTIVITY_STATUS_ACTIVE TenantActivityStatus = 7 + TenantActivityStatus_TENANT_ACTIVITY_STATUS_INACTIVE TenantActivityStatus = 8 + TenantActivityStatus_TENANT_ACTIVITY_STATUS_OFFLOADED TenantActivityStatus = 9 + TenantActivityStatus_TENANT_ACTIVITY_STATUS_OFFLOADING TenantActivityStatus = 10 + TenantActivityStatus_TENANT_ACTIVITY_STATUS_ONLOADING TenantActivityStatus = 11 +) + +// Enum value maps for TenantActivityStatus. +var ( + TenantActivityStatus_name = map[int32]string{ + 0: "TENANT_ACTIVITY_STATUS_UNSPECIFIED", + 1: "TENANT_ACTIVITY_STATUS_HOT", + 2: "TENANT_ACTIVITY_STATUS_COLD", + 4: "TENANT_ACTIVITY_STATUS_FROZEN", + 5: "TENANT_ACTIVITY_STATUS_UNFREEZING", + 6: "TENANT_ACTIVITY_STATUS_FREEZING", + 7: "TENANT_ACTIVITY_STATUS_ACTIVE", + 8: "TENANT_ACTIVITY_STATUS_INACTIVE", + 9: "TENANT_ACTIVITY_STATUS_OFFLOADED", + 10: "TENANT_ACTIVITY_STATUS_OFFLOADING", + 11: "TENANT_ACTIVITY_STATUS_ONLOADING", + } + TenantActivityStatus_value = map[string]int32{ + "TENANT_ACTIVITY_STATUS_UNSPECIFIED": 0, + "TENANT_ACTIVITY_STATUS_HOT": 1, + "TENANT_ACTIVITY_STATUS_COLD": 2, + "TENANT_ACTIVITY_STATUS_FROZEN": 4, + "TENANT_ACTIVITY_STATUS_UNFREEZING": 5, + "TENANT_ACTIVITY_STATUS_FREEZING": 6, + "TENANT_ACTIVITY_STATUS_ACTIVE": 7, + "TENANT_ACTIVITY_STATUS_INACTIVE": 8, + "TENANT_ACTIVITY_STATUS_OFFLOADED": 9, + "TENANT_ACTIVITY_STATUS_OFFLOADING": 10, + "TENANT_ACTIVITY_STATUS_ONLOADING": 11, + } +) + +func (x TenantActivityStatus) Enum() *TenantActivityStatus { + p := new(TenantActivityStatus) + *p = x + return p +} + +func (x TenantActivityStatus) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (TenantActivityStatus) Descriptor() protoreflect.EnumDescriptor { + return file_v1_tenants_proto_enumTypes[0].Descriptor() +} + +func (TenantActivityStatus) Type() protoreflect.EnumType { + return &file_v1_tenants_proto_enumTypes[0] +} + +func (x TenantActivityStatus) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use TenantActivityStatus.Descriptor instead. +func (TenantActivityStatus) EnumDescriptor() ([]byte, []int) { + return file_v1_tenants_proto_rawDescGZIP(), []int{0} +} + +type TenantsGetRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + Collection string `protobuf:"bytes,1,opt,name=collection,proto3" json:"collection,omitempty"` + // we might need to add a tenant-cursor api at some point, make this easily extendable + // + // Types that are valid to be assigned to Params: + // + // *TenantsGetRequest_Names + Params isTenantsGetRequest_Params `protobuf_oneof:"params"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *TenantsGetRequest) Reset() { + *x = TenantsGetRequest{} + mi := &file_v1_tenants_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *TenantsGetRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TenantsGetRequest) ProtoMessage() {} + +func (x *TenantsGetRequest) ProtoReflect() protoreflect.Message { + mi := &file_v1_tenants_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TenantsGetRequest.ProtoReflect.Descriptor instead. +func (*TenantsGetRequest) Descriptor() ([]byte, []int) { + return file_v1_tenants_proto_rawDescGZIP(), []int{0} +} + +func (x *TenantsGetRequest) GetCollection() string { + if x != nil { + return x.Collection + } + return "" +} + +func (x *TenantsGetRequest) GetParams() isTenantsGetRequest_Params { + if x != nil { + return x.Params + } + return nil +} + +func (x *TenantsGetRequest) GetNames() *TenantNames { + if x != nil { + if x, ok := x.Params.(*TenantsGetRequest_Names); ok { + return x.Names + } + } + return nil +} + +type isTenantsGetRequest_Params interface { + isTenantsGetRequest_Params() +} + +type TenantsGetRequest_Names struct { + Names *TenantNames `protobuf:"bytes,2,opt,name=names,proto3,oneof"` +} + +func (*TenantsGetRequest_Names) isTenantsGetRequest_Params() {} + +type TenantNames struct { + state protoimpl.MessageState `protogen:"open.v1"` + Values []string `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *TenantNames) Reset() { + *x = TenantNames{} + mi := &file_v1_tenants_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *TenantNames) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TenantNames) ProtoMessage() {} + +func (x *TenantNames) ProtoReflect() protoreflect.Message { + mi := &file_v1_tenants_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TenantNames.ProtoReflect.Descriptor instead. +func (*TenantNames) Descriptor() ([]byte, []int) { + return file_v1_tenants_proto_rawDescGZIP(), []int{1} +} + +func (x *TenantNames) GetValues() []string { + if x != nil { + return x.Values + } + return nil +} + +type TenantsGetReply struct { + state protoimpl.MessageState `protogen:"open.v1"` + Took float32 `protobuf:"fixed32,1,opt,name=took,proto3" json:"took,omitempty"` + Tenants []*Tenant `protobuf:"bytes,2,rep,name=tenants,proto3" json:"tenants,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *TenantsGetReply) Reset() { + *x = TenantsGetReply{} + mi := &file_v1_tenants_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *TenantsGetReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TenantsGetReply) ProtoMessage() {} + +func (x *TenantsGetReply) ProtoReflect() protoreflect.Message { + mi := &file_v1_tenants_proto_msgTypes[2] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TenantsGetReply.ProtoReflect.Descriptor instead. +func (*TenantsGetReply) Descriptor() ([]byte, []int) { + return file_v1_tenants_proto_rawDescGZIP(), []int{2} +} + +func (x *TenantsGetReply) GetTook() float32 { + if x != nil { + return x.Took + } + return 0 +} + +func (x *TenantsGetReply) GetTenants() []*Tenant { + if x != nil { + return x.Tenants + } + return nil +} + +type Tenant struct { + state protoimpl.MessageState `protogen:"open.v1"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + ActivityStatus TenantActivityStatus `protobuf:"varint,2,opt,name=activity_status,json=activityStatus,proto3,enum=weaviate.v1.TenantActivityStatus" json:"activity_status,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *Tenant) Reset() { + *x = Tenant{} + mi := &file_v1_tenants_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *Tenant) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Tenant) ProtoMessage() {} + +func (x *Tenant) ProtoReflect() protoreflect.Message { + mi := &file_v1_tenants_proto_msgTypes[3] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Tenant.ProtoReflect.Descriptor instead. +func (*Tenant) Descriptor() ([]byte, []int) { + return file_v1_tenants_proto_rawDescGZIP(), []int{3} +} + +func (x *Tenant) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *Tenant) GetActivityStatus() TenantActivityStatus { + if x != nil { + return x.ActivityStatus + } + return TenantActivityStatus_TENANT_ACTIVITY_STATUS_UNSPECIFIED +} + +var File_v1_tenants_proto protoreflect.FileDescriptor + +const file_v1_tenants_proto_rawDesc = "" + + "\n" + + "\x10v1/tenants.proto\x12\vweaviate.v1\"o\n" + + "\x11TenantsGetRequest\x12\x1e\n" + + "\n" + + "collection\x18\x01 \x01(\tR\n" + + "collection\x120\n" + + "\x05names\x18\x02 \x01(\v2\x18.weaviate.v1.TenantNamesH\x00R\x05namesB\b\n" + + "\x06params\"%\n" + + "\vTenantNames\x12\x16\n" + + "\x06values\x18\x01 \x03(\tR\x06values\"T\n" + + "\x0fTenantsGetReply\x12\x12\n" + + "\x04took\x18\x01 \x01(\x02R\x04took\x12-\n" + + "\atenants\x18\x02 \x03(\v2\x13.weaviate.v1.TenantR\atenants\"h\n" + + "\x06Tenant\x12\x12\n" + + "\x04name\x18\x01 \x01(\tR\x04name\x12J\n" + + "\x0factivity_status\x18\x02 \x01(\x0e2!.weaviate.v1.TenantActivityStatusR\x0eactivityStatus*\xaf\x03\n" + + "\x14TenantActivityStatus\x12&\n" + + "\"TENANT_ACTIVITY_STATUS_UNSPECIFIED\x10\x00\x12\x1e\n" + + "\x1aTENANT_ACTIVITY_STATUS_HOT\x10\x01\x12\x1f\n" + + "\x1bTENANT_ACTIVITY_STATUS_COLD\x10\x02\x12!\n" + + "\x1dTENANT_ACTIVITY_STATUS_FROZEN\x10\x04\x12%\n" + + "!TENANT_ACTIVITY_STATUS_UNFREEZING\x10\x05\x12#\n" + + "\x1fTENANT_ACTIVITY_STATUS_FREEZING\x10\x06\x12!\n" + + "\x1dTENANT_ACTIVITY_STATUS_ACTIVE\x10\a\x12#\n" + + "\x1fTENANT_ACTIVITY_STATUS_INACTIVE\x10\b\x12$\n" + + " TENANT_ACTIVITY_STATUS_OFFLOADED\x10\t\x12%\n" + + "!TENANT_ACTIVITY_STATUS_OFFLOADING\x10\n" + + "\x12$\n" + + " TENANT_ACTIVITY_STATUS_ONLOADING\x10\v\"\x04\b\x03\x10\x03Bq\n" + + "#io.weaviate.client.grpc.protocol.v1B\x14WeaviateProtoTenantsZ4github.com/weaviate/weaviate/grpc/generated;protocolb\x06proto3" + +var ( + file_v1_tenants_proto_rawDescOnce sync.Once + file_v1_tenants_proto_rawDescData []byte +) + +func file_v1_tenants_proto_rawDescGZIP() []byte { + file_v1_tenants_proto_rawDescOnce.Do(func() { + file_v1_tenants_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_v1_tenants_proto_rawDesc), len(file_v1_tenants_proto_rawDesc))) + }) + return file_v1_tenants_proto_rawDescData +} + +var file_v1_tenants_proto_enumTypes = make([]protoimpl.EnumInfo, 1) +var file_v1_tenants_proto_msgTypes = make([]protoimpl.MessageInfo, 4) +var file_v1_tenants_proto_goTypes = []any{ + (TenantActivityStatus)(0), // 0: weaviate.v1.TenantActivityStatus + (*TenantsGetRequest)(nil), // 1: weaviate.v1.TenantsGetRequest + (*TenantNames)(nil), // 2: weaviate.v1.TenantNames + (*TenantsGetReply)(nil), // 3: weaviate.v1.TenantsGetReply + (*Tenant)(nil), // 4: weaviate.v1.Tenant +} +var file_v1_tenants_proto_depIdxs = []int32{ + 2, // 0: weaviate.v1.TenantsGetRequest.names:type_name -> weaviate.v1.TenantNames + 4, // 1: weaviate.v1.TenantsGetReply.tenants:type_name -> weaviate.v1.Tenant + 0, // 2: weaviate.v1.Tenant.activity_status:type_name -> weaviate.v1.TenantActivityStatus + 3, // [3:3] is the sub-list for method output_type + 3, // [3:3] is the sub-list for method input_type + 3, // [3:3] is the sub-list for extension type_name + 3, // [3:3] is the sub-list for extension extendee + 0, // [0:3] is the sub-list for field type_name +} + +func init() { file_v1_tenants_proto_init() } +func file_v1_tenants_proto_init() { + if File_v1_tenants_proto != nil { + return + } + file_v1_tenants_proto_msgTypes[0].OneofWrappers = []any{ + (*TenantsGetRequest_Names)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: unsafe.Slice(unsafe.StringData(file_v1_tenants_proto_rawDesc), len(file_v1_tenants_proto_rawDesc)), + NumEnums: 1, + NumMessages: 4, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_v1_tenants_proto_goTypes, + DependencyIndexes: file_v1_tenants_proto_depIdxs, + EnumInfos: file_v1_tenants_proto_enumTypes, + MessageInfos: file_v1_tenants_proto_msgTypes, + }.Build() + File_v1_tenants_proto = out.File + file_v1_tenants_proto_goTypes = nil + file_v1_tenants_proto_depIdxs = nil +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/weaviate.pb.go b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/weaviate.pb.go new file mode 100644 index 0000000000000000000000000000000000000000..9b01c357735afc264e3d4ca0308abdc46e2c9fa4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/weaviate.pb.go @@ -0,0 +1,105 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. + +package protocol + +import ( + reflect "reflect" + unsafe "unsafe" + + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +var File_v1_weaviate_proto protoreflect.FileDescriptor + +const file_v1_weaviate_proto_rawDesc = "" + + "\n" + + "\x11v1/weaviate.proto\x12\vweaviate.v1\x1a\x12v1/aggregate.proto\x1a\x0ev1/batch.proto\x1a\x15v1/batch_delete.proto\x1a\x13v1/search_get.proto\x1a\x10v1/tenants.proto2\x87\x05\n" + + "\bWeaviate\x12@\n" + + "\x06Search\x12\x1a.weaviate.v1.SearchRequest\x1a\x18.weaviate.v1.SearchReply\"\x00\x12R\n" + + "\fBatchObjects\x12 .weaviate.v1.BatchObjectsRequest\x1a\x1e.weaviate.v1.BatchObjectsReply\"\x00\x12[\n" + + "\x0fBatchReferences\x12#.weaviate.v1.BatchReferencesRequest\x1a!.weaviate.v1.BatchReferencesReply\"\x00\x12O\n" + + "\vBatchDelete\x12\x1f.weaviate.v1.BatchDeleteRequest\x1a\x1d.weaviate.v1.BatchDeleteReply\"\x00\x12L\n" + + "\n" + + "TenantsGet\x12\x1e.weaviate.v1.TenantsGetRequest\x1a\x1c.weaviate.v1.TenantsGetReply\"\x00\x12I\n" + + "\tAggregate\x12\x1d.weaviate.v1.AggregateRequest\x1a\x1b.weaviate.v1.AggregateReply\"\x00\x12I\n" + + "\tBatchSend\x12\x1d.weaviate.v1.BatchSendRequest\x1a\x1b.weaviate.v1.BatchSendReply\"\x00\x12S\n" + + "\vBatchStream\x12\x1f.weaviate.v1.BatchStreamRequest\x1a\x1f.weaviate.v1.BatchStreamMessage\"\x000\x01Bj\n" + + "#io.weaviate.client.grpc.protocol.v1B\rWeaviateProtoZ4github.com/weaviate/weaviate/grpc/generated;protocolb\x06proto3" + +var file_v1_weaviate_proto_goTypes = []any{ + (*SearchRequest)(nil), // 0: weaviate.v1.SearchRequest + (*BatchObjectsRequest)(nil), // 1: weaviate.v1.BatchObjectsRequest + (*BatchReferencesRequest)(nil), // 2: weaviate.v1.BatchReferencesRequest + (*BatchDeleteRequest)(nil), // 3: weaviate.v1.BatchDeleteRequest + (*TenantsGetRequest)(nil), // 4: weaviate.v1.TenantsGetRequest + (*AggregateRequest)(nil), // 5: weaviate.v1.AggregateRequest + (*BatchSendRequest)(nil), // 6: weaviate.v1.BatchSendRequest + (*BatchStreamRequest)(nil), // 7: weaviate.v1.BatchStreamRequest + (*SearchReply)(nil), // 8: weaviate.v1.SearchReply + (*BatchObjectsReply)(nil), // 9: weaviate.v1.BatchObjectsReply + (*BatchReferencesReply)(nil), // 10: weaviate.v1.BatchReferencesReply + (*BatchDeleteReply)(nil), // 11: weaviate.v1.BatchDeleteReply + (*TenantsGetReply)(nil), // 12: weaviate.v1.TenantsGetReply + (*AggregateReply)(nil), // 13: weaviate.v1.AggregateReply + (*BatchSendReply)(nil), // 14: weaviate.v1.BatchSendReply + (*BatchStreamMessage)(nil), // 15: weaviate.v1.BatchStreamMessage +} +var file_v1_weaviate_proto_depIdxs = []int32{ + 0, // 0: weaviate.v1.Weaviate.Search:input_type -> weaviate.v1.SearchRequest + 1, // 1: weaviate.v1.Weaviate.BatchObjects:input_type -> weaviate.v1.BatchObjectsRequest + 2, // 2: weaviate.v1.Weaviate.BatchReferences:input_type -> weaviate.v1.BatchReferencesRequest + 3, // 3: weaviate.v1.Weaviate.BatchDelete:input_type -> weaviate.v1.BatchDeleteRequest + 4, // 4: weaviate.v1.Weaviate.TenantsGet:input_type -> weaviate.v1.TenantsGetRequest + 5, // 5: weaviate.v1.Weaviate.Aggregate:input_type -> weaviate.v1.AggregateRequest + 6, // 6: weaviate.v1.Weaviate.BatchSend:input_type -> weaviate.v1.BatchSendRequest + 7, // 7: weaviate.v1.Weaviate.BatchStream:input_type -> weaviate.v1.BatchStreamRequest + 8, // 8: weaviate.v1.Weaviate.Search:output_type -> weaviate.v1.SearchReply + 9, // 9: weaviate.v1.Weaviate.BatchObjects:output_type -> weaviate.v1.BatchObjectsReply + 10, // 10: weaviate.v1.Weaviate.BatchReferences:output_type -> weaviate.v1.BatchReferencesReply + 11, // 11: weaviate.v1.Weaviate.BatchDelete:output_type -> weaviate.v1.BatchDeleteReply + 12, // 12: weaviate.v1.Weaviate.TenantsGet:output_type -> weaviate.v1.TenantsGetReply + 13, // 13: weaviate.v1.Weaviate.Aggregate:output_type -> weaviate.v1.AggregateReply + 14, // 14: weaviate.v1.Weaviate.BatchSend:output_type -> weaviate.v1.BatchSendReply + 15, // 15: weaviate.v1.Weaviate.BatchStream:output_type -> weaviate.v1.BatchStreamMessage + 8, // [8:16] is the sub-list for method output_type + 0, // [0:8] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_v1_weaviate_proto_init() } +func file_v1_weaviate_proto_init() { + if File_v1_weaviate_proto != nil { + return + } + file_v1_aggregate_proto_init() + file_v1_batch_proto_init() + file_v1_batch_delete_proto_init() + file_v1_search_get_proto_init() + file_v1_tenants_proto_init() + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: unsafe.Slice(unsafe.StringData(file_v1_weaviate_proto_rawDesc), len(file_v1_weaviate_proto_rawDesc)), + NumEnums: 0, + NumMessages: 0, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_v1_weaviate_proto_goTypes, + DependencyIndexes: file_v1_weaviate_proto_depIdxs, + }.Build() + File_v1_weaviate_proto = out.File + file_v1_weaviate_proto_goTypes = nil + file_v1_weaviate_proto_depIdxs = nil +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/weaviate_grpc.pb.go b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/weaviate_grpc.pb.go new file mode 100644 index 0000000000000000000000000000000000000000..8ecac12947f5de5eae11f5ee6119b0266af7bfdc --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/generated/protocol/v1/weaviate_grpc.pb.go @@ -0,0 +1,388 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. + +package protocol + +import ( + context "context" + + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + Weaviate_Search_FullMethodName = "/weaviate.v1.Weaviate/Search" + Weaviate_BatchObjects_FullMethodName = "/weaviate.v1.Weaviate/BatchObjects" + Weaviate_BatchReferences_FullMethodName = "/weaviate.v1.Weaviate/BatchReferences" + Weaviate_BatchDelete_FullMethodName = "/weaviate.v1.Weaviate/BatchDelete" + Weaviate_TenantsGet_FullMethodName = "/weaviate.v1.Weaviate/TenantsGet" + Weaviate_Aggregate_FullMethodName = "/weaviate.v1.Weaviate/Aggregate" + Weaviate_BatchSend_FullMethodName = "/weaviate.v1.Weaviate/BatchSend" + Weaviate_BatchStream_FullMethodName = "/weaviate.v1.Weaviate/BatchStream" +) + +// WeaviateClient is the client API for Weaviate service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type WeaviateClient interface { + Search(ctx context.Context, in *SearchRequest, opts ...grpc.CallOption) (*SearchReply, error) + BatchObjects(ctx context.Context, in *BatchObjectsRequest, opts ...grpc.CallOption) (*BatchObjectsReply, error) + BatchReferences(ctx context.Context, in *BatchReferencesRequest, opts ...grpc.CallOption) (*BatchReferencesReply, error) + BatchDelete(ctx context.Context, in *BatchDeleteRequest, opts ...grpc.CallOption) (*BatchDeleteReply, error) + TenantsGet(ctx context.Context, in *TenantsGetRequest, opts ...grpc.CallOption) (*TenantsGetReply, error) + Aggregate(ctx context.Context, in *AggregateRequest, opts ...grpc.CallOption) (*AggregateReply, error) + BatchSend(ctx context.Context, in *BatchSendRequest, opts ...grpc.CallOption) (*BatchSendReply, error) + BatchStream(ctx context.Context, in *BatchStreamRequest, opts ...grpc.CallOption) (grpc.ServerStreamingClient[BatchStreamMessage], error) +} + +type weaviateClient struct { + cc grpc.ClientConnInterface +} + +func NewWeaviateClient(cc grpc.ClientConnInterface) WeaviateClient { + return &weaviateClient{cc} +} + +func (c *weaviateClient) Search(ctx context.Context, in *SearchRequest, opts ...grpc.CallOption) (*SearchReply, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(SearchReply) + err := c.cc.Invoke(ctx, Weaviate_Search_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *weaviateClient) BatchObjects(ctx context.Context, in *BatchObjectsRequest, opts ...grpc.CallOption) (*BatchObjectsReply, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(BatchObjectsReply) + err := c.cc.Invoke(ctx, Weaviate_BatchObjects_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *weaviateClient) BatchReferences(ctx context.Context, in *BatchReferencesRequest, opts ...grpc.CallOption) (*BatchReferencesReply, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(BatchReferencesReply) + err := c.cc.Invoke(ctx, Weaviate_BatchReferences_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *weaviateClient) BatchDelete(ctx context.Context, in *BatchDeleteRequest, opts ...grpc.CallOption) (*BatchDeleteReply, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(BatchDeleteReply) + err := c.cc.Invoke(ctx, Weaviate_BatchDelete_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *weaviateClient) TenantsGet(ctx context.Context, in *TenantsGetRequest, opts ...grpc.CallOption) (*TenantsGetReply, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(TenantsGetReply) + err := c.cc.Invoke(ctx, Weaviate_TenantsGet_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *weaviateClient) Aggregate(ctx context.Context, in *AggregateRequest, opts ...grpc.CallOption) (*AggregateReply, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(AggregateReply) + err := c.cc.Invoke(ctx, Weaviate_Aggregate_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *weaviateClient) BatchSend(ctx context.Context, in *BatchSendRequest, opts ...grpc.CallOption) (*BatchSendReply, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(BatchSendReply) + err := c.cc.Invoke(ctx, Weaviate_BatchSend_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *weaviateClient) BatchStream(ctx context.Context, in *BatchStreamRequest, opts ...grpc.CallOption) (grpc.ServerStreamingClient[BatchStreamMessage], error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + stream, err := c.cc.NewStream(ctx, &Weaviate_ServiceDesc.Streams[0], Weaviate_BatchStream_FullMethodName, cOpts...) + if err != nil { + return nil, err + } + x := &grpc.GenericClientStream[BatchStreamRequest, BatchStreamMessage]{ClientStream: stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type Weaviate_BatchStreamClient = grpc.ServerStreamingClient[BatchStreamMessage] + +// WeaviateServer is the server API for Weaviate service. +// All implementations must embed UnimplementedWeaviateServer +// for forward compatibility. +type WeaviateServer interface { + Search(context.Context, *SearchRequest) (*SearchReply, error) + BatchObjects(context.Context, *BatchObjectsRequest) (*BatchObjectsReply, error) + BatchReferences(context.Context, *BatchReferencesRequest) (*BatchReferencesReply, error) + BatchDelete(context.Context, *BatchDeleteRequest) (*BatchDeleteReply, error) + TenantsGet(context.Context, *TenantsGetRequest) (*TenantsGetReply, error) + Aggregate(context.Context, *AggregateRequest) (*AggregateReply, error) + BatchSend(context.Context, *BatchSendRequest) (*BatchSendReply, error) + BatchStream(*BatchStreamRequest, grpc.ServerStreamingServer[BatchStreamMessage]) error + mustEmbedUnimplementedWeaviateServer() +} + +// UnimplementedWeaviateServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedWeaviateServer struct{} + +func (UnimplementedWeaviateServer) Search(context.Context, *SearchRequest) (*SearchReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method Search not implemented") +} +func (UnimplementedWeaviateServer) BatchObjects(context.Context, *BatchObjectsRequest) (*BatchObjectsReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method BatchObjects not implemented") +} +func (UnimplementedWeaviateServer) BatchReferences(context.Context, *BatchReferencesRequest) (*BatchReferencesReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method BatchReferences not implemented") +} +func (UnimplementedWeaviateServer) BatchDelete(context.Context, *BatchDeleteRequest) (*BatchDeleteReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method BatchDelete not implemented") +} +func (UnimplementedWeaviateServer) TenantsGet(context.Context, *TenantsGetRequest) (*TenantsGetReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method TenantsGet not implemented") +} +func (UnimplementedWeaviateServer) Aggregate(context.Context, *AggregateRequest) (*AggregateReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method Aggregate not implemented") +} +func (UnimplementedWeaviateServer) BatchSend(context.Context, *BatchSendRequest) (*BatchSendReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method BatchSend not implemented") +} +func (UnimplementedWeaviateServer) BatchStream(*BatchStreamRequest, grpc.ServerStreamingServer[BatchStreamMessage]) error { + return status.Errorf(codes.Unimplemented, "method BatchStream not implemented") +} +func (UnimplementedWeaviateServer) mustEmbedUnimplementedWeaviateServer() {} +func (UnimplementedWeaviateServer) testEmbeddedByValue() {} + +// UnsafeWeaviateServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to WeaviateServer will +// result in compilation errors. +type UnsafeWeaviateServer interface { + mustEmbedUnimplementedWeaviateServer() +} + +func RegisterWeaviateServer(s grpc.ServiceRegistrar, srv WeaviateServer) { + // If the following call pancis, it indicates UnimplementedWeaviateServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&Weaviate_ServiceDesc, srv) +} + +func _Weaviate_Search_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WeaviateServer).Search(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Weaviate_Search_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WeaviateServer).Search(ctx, req.(*SearchRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Weaviate_BatchObjects_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchObjectsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WeaviateServer).BatchObjects(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Weaviate_BatchObjects_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WeaviateServer).BatchObjects(ctx, req.(*BatchObjectsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Weaviate_BatchReferences_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchReferencesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WeaviateServer).BatchReferences(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Weaviate_BatchReferences_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WeaviateServer).BatchReferences(ctx, req.(*BatchReferencesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Weaviate_BatchDelete_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchDeleteRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WeaviateServer).BatchDelete(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Weaviate_BatchDelete_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WeaviateServer).BatchDelete(ctx, req.(*BatchDeleteRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Weaviate_TenantsGet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(TenantsGetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WeaviateServer).TenantsGet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Weaviate_TenantsGet_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WeaviateServer).TenantsGet(ctx, req.(*TenantsGetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Weaviate_Aggregate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AggregateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WeaviateServer).Aggregate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Weaviate_Aggregate_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WeaviateServer).Aggregate(ctx, req.(*AggregateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Weaviate_BatchSend_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchSendRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WeaviateServer).BatchSend(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Weaviate_BatchSend_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WeaviateServer).BatchSend(ctx, req.(*BatchSendRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Weaviate_BatchStream_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(BatchStreamRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(WeaviateServer).BatchStream(m, &grpc.GenericServerStream[BatchStreamRequest, BatchStreamMessage]{ServerStream: stream}) +} + +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type Weaviate_BatchStreamServer = grpc.ServerStreamingServer[BatchStreamMessage] + +// Weaviate_ServiceDesc is the grpc.ServiceDesc for Weaviate service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var Weaviate_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "weaviate.v1.Weaviate", + HandlerType: (*WeaviateServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Search", + Handler: _Weaviate_Search_Handler, + }, + { + MethodName: "BatchObjects", + Handler: _Weaviate_BatchObjects_Handler, + }, + { + MethodName: "BatchReferences", + Handler: _Weaviate_BatchReferences_Handler, + }, + { + MethodName: "BatchDelete", + Handler: _Weaviate_BatchDelete_Handler, + }, + { + MethodName: "TenantsGet", + Handler: _Weaviate_TenantsGet_Handler, + }, + { + MethodName: "Aggregate", + Handler: _Weaviate_Aggregate_Handler, + }, + { + MethodName: "BatchSend", + Handler: _Weaviate_BatchSend_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "BatchStream", + Handler: _Weaviate_BatchStream_Handler, + ServerStreams: true, + }, + }, + Metadata: "v1/weaviate.proto", +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/proto/v0/batch.proto b/platform/dbops/binaries/weaviate-src/grpc/proto/v0/batch.proto new file mode 100644 index 0000000000000000000000000000000000000000..8b57f95e6a162480f737fdb85f108ba8bef9f542 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/proto/v0/batch.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package weaviategrpc; + +option go_package = "github.com/weaviate/weaviate/grpc/generated;protocol"; +option java_package = "io.weaviate.client.grpc.protocol.v0"; +option java_outer_classname = "WeaviateProtoBatch"; + +message BatchObjectsRequest { +} +message BatchObjectsReply { +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/proto/v0/search_get.proto b/platform/dbops/binaries/weaviate-src/grpc/proto/v0/search_get.proto new file mode 100644 index 0000000000000000000000000000000000000000..86b6c1d44976a75f34b53d2d73a968adaa57a0fd --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/proto/v0/search_get.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package weaviategrpc; + +option go_package = "github.com/weaviate/weaviate/grpc/generated;protocol"; +option java_package = "io.weaviate.client.grpc.protocol.v0"; +option java_outer_classname = "WeaviateProtoSearchGet"; + +message SearchRequest { +} +message SearchReply { +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/proto/v0/weaviate.proto b/platform/dbops/binaries/weaviate-src/grpc/proto/v0/weaviate.proto new file mode 100644 index 0000000000000000000000000000000000000000..8797ccb7872689fe23cd9422e247a7a9bd219797 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/proto/v0/weaviate.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; + +package weaviategrpc; + +import "v0/batch.proto"; +import "v0/search_get.proto"; + +option go_package = "github.com/weaviate/weaviate/grpc/generated;protocol"; +option java_package = "io.weaviate.client.grpc.protocol.v0"; +option java_outer_classname = "WeaviateProto"; + +service Weaviate { + rpc Search(SearchRequest) returns (SearchReply) {}; + rpc BatchObjects(BatchObjectsRequest) returns (BatchObjectsReply) {}; +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/proto/v1/aggregate.proto b/platform/dbops/binaries/weaviate-src/grpc/proto/v1/aggregate.proto new file mode 100644 index 0000000000000000000000000000000000000000..49943935279487e66ef8c3786ce630b695591e7b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/proto/v1/aggregate.proto @@ -0,0 +1,205 @@ +syntax = "proto3"; + +package weaviate.v1; + +import "v1/base.proto"; +import "v1/base_search.proto"; + +option go_package = "github.com/weaviate/weaviate/grpc/generated;protocol"; +option java_package = "io.weaviate.client.grpc.protocol.v1"; +option java_outer_classname = "WeaviateProtoAggregate"; + +message AggregateRequest { + message Aggregation { + message Integer { + bool count = 1; + bool type = 2; + bool sum = 3; + bool mean = 4; + bool mode = 5; + bool median = 6; + bool maximum = 7; + bool minimum = 8; + } + message Number { + bool count = 1; + bool type = 2; + bool sum = 3; + bool mean = 4; + bool mode = 5; + bool median = 6; + bool maximum = 7; + bool minimum = 8; + } + message Text { + bool count = 1; + bool type = 2; + bool top_occurences = 3; + optional uint32 top_occurences_limit = 4; + } + message Boolean { + bool count = 1; + bool type = 2; + bool total_true = 3; + bool total_false = 4; + bool percentage_true = 5; + bool percentage_false = 6; + } + message Date { + bool count = 1; + bool type = 2; + bool median = 3; + bool mode = 4; + bool maximum = 5; + bool minimum = 6; + } + message Reference { + bool type = 1; + bool pointing_to = 2; + } + string property = 1; + oneof aggregation { + Integer int = 2; + Number number = 3; + Text text = 4; + Boolean boolean = 5; + Date date = 6; + Reference reference = 7; + } + } + message GroupBy { + string collection = 1; + string property = 2; + } + // required + string collection = 1; + + // parameters + string tenant = 10; + + // what is returned + bool objects_count = 20; + repeated Aggregation aggregations = 21; + + // affects aggregation results + optional uint32 object_limit = 30; + optional GroupBy group_by = 31; + optional uint32 limit = 32; + + // matches/searches for objects + optional Filters filters = 40; + oneof search { + Hybrid hybrid = 41; + NearVector near_vector = 42; + NearObject near_object = 43; + NearTextSearch near_text = 44; + NearImageSearch near_image = 45; + NearAudioSearch near_audio = 46; + NearVideoSearch near_video = 47; + NearDepthSearch near_depth = 48; + NearThermalSearch near_thermal = 49; + NearIMUSearch near_imu = 50; + } +} + +message AggregateReply { + message Aggregations { + message Aggregation { + message Integer { + optional int64 count = 1; + optional string type = 2; + optional double mean = 3; + optional double median = 4; + optional int64 mode = 5; + optional int64 maximum = 6; + optional int64 minimum = 7; + optional int64 sum = 8; + } + message Number { + optional int64 count = 1; + optional string type = 2; + optional double mean = 3; + optional double median = 4; + optional double mode = 5; + optional double maximum = 6; + optional double minimum = 7; + optional double sum = 8; + } + message Text { + message TopOccurrences { + message TopOccurrence { + string value = 1; + int64 occurs = 2; + } + repeated TopOccurrence items = 1; + } + optional int64 count = 1; + optional string type = 2; + optional TopOccurrences top_occurences = 3; + } + message Boolean { + optional int64 count = 1; + optional string type = 2; + optional int64 total_true = 3; + optional int64 total_false = 4; + optional double percentage_true = 5; + optional double percentage_false = 6; + } + message Date { + optional int64 count = 1; + optional string type = 2; + optional string median = 3; + optional string mode = 4; + optional string maximum = 5; + optional string minimum = 6; + } + message Reference { + optional string type = 1; + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + repeated string pointing_to = 2; + } + string property = 1; + oneof aggregation { + Integer int = 2; + Number number = 3; + Text text = 4; + Boolean boolean = 5; + Date date = 6; + Reference reference = 7; + } + } + repeated Aggregation aggregations = 1; + } + message Single { + optional int64 objects_count = 1; + optional Aggregations aggregations = 2; + } + message Group { + message GroupedBy { + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + repeated string path = 1; + oneof value { + string text = 2; + int64 int = 3; + bool boolean = 4; + double number = 5; + TextArray texts = 6; + IntArray ints = 7; + BooleanArray booleans = 8; + NumberArray numbers = 9; + GeoCoordinatesFilter geo = 10; + }; + } + optional int64 objects_count = 1; + optional Aggregations aggregations = 2; + optional GroupedBy grouped_by = 3; + } + message Grouped { + repeated Group groups = 1; + } + float took = 1; + oneof result { + Single single_result = 2; + Grouped grouped_results = 3; + }; +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/proto/v1/base.proto b/platform/dbops/binaries/weaviate-src/grpc/proto/v1/base.proto new file mode 100644 index 0000000000000000000000000000000000000000..85b4ca42ba6c91439a36abe124489b073031a449 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/proto/v1/base.proto @@ -0,0 +1,156 @@ +syntax = "proto3"; + +package weaviate.v1; +import "google/protobuf/struct.proto"; + +option go_package = "github.com/weaviate/weaviate/grpc/generated;protocol"; +option java_package = "io.weaviate.client.grpc.protocol.v1"; +option java_outer_classname = "WeaviateProtoBase"; + +enum ConsistencyLevel { + CONSISTENCY_LEVEL_UNSPECIFIED = 0; + CONSISTENCY_LEVEL_ONE = 1; + CONSISTENCY_LEVEL_QUORUM = 2; + CONSISTENCY_LEVEL_ALL = 3; +} + +message NumberArrayProperties { + // will be removed in the future, use vector_bytes + // go client 5.4.1 depends on this field. Only remove after go client is deprecated + repeated double values = 1 [deprecated = true]; + string prop_name = 2; + bytes values_bytes = 3; +} + +message IntArrayProperties { + repeated int64 values = 1; + string prop_name = 2; +} + +message TextArrayProperties { + repeated string values = 1; + string prop_name = 2; +} + +message BooleanArrayProperties { + repeated bool values = 1; + string prop_name = 2; +} + +message ObjectPropertiesValue { + google.protobuf.Struct non_ref_properties = 1; + repeated NumberArrayProperties number_array_properties = 2; + repeated IntArrayProperties int_array_properties = 3; + repeated TextArrayProperties text_array_properties = 4; + repeated BooleanArrayProperties boolean_array_properties = 5; + repeated ObjectProperties object_properties = 6; + repeated ObjectArrayProperties object_array_properties = 7; + repeated string empty_list_props = 10; +} + +message ObjectArrayProperties { + repeated ObjectPropertiesValue values = 1; + string prop_name = 2; +} + +message ObjectProperties { + ObjectPropertiesValue value = 1; + string prop_name = 2; +} + + +message TextArray { + repeated string values = 1; +} + +message IntArray { + repeated int64 values = 1; +} + +message NumberArray { + repeated double values = 1; +} + +message BooleanArray { + repeated bool values = 1; +} + +message Filters { + enum Operator { + OPERATOR_UNSPECIFIED = 0; + OPERATOR_EQUAL = 1; + OPERATOR_NOT_EQUAL = 2; + OPERATOR_GREATER_THAN = 3; + OPERATOR_GREATER_THAN_EQUAL = 4; + OPERATOR_LESS_THAN = 5; + OPERATOR_LESS_THAN_EQUAL = 6; + OPERATOR_AND = 7; + OPERATOR_OR = 8; + OPERATOR_WITHIN_GEO_RANGE = 9; + OPERATOR_LIKE = 10; + OPERATOR_IS_NULL = 11; + OPERATOR_CONTAINS_ANY = 12; + OPERATOR_CONTAINS_ALL = 13; + OPERATOR_CONTAINS_NONE = 14; + OPERATOR_NOT = 15; + } + + Operator operator = 1; + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + repeated string on = 2 [deprecated = true]; // will be removed in the future, use path + repeated Filters filters = 3; + oneof test_value { + string value_text = 4; + int64 value_int = 5; + bool value_boolean = 6; + double value_number = 7; + TextArray value_text_array = 9; + IntArray value_int_array = 10; + BooleanArray value_boolean_array = 11; + NumberArray value_number_array = 12; + GeoCoordinatesFilter value_geo = 13; + }; + FilterTarget target = 20; // leave space for more filter values +} + +message FilterReferenceSingleTarget { + string on = 1; + FilterTarget target = 2; +} + +message FilterReferenceMultiTarget { + string on = 1; + FilterTarget target = 2; + string target_collection = 3; +} + +message FilterReferenceCount { + string on = 1; +} + +message FilterTarget { + oneof target{ + string property = 1; + FilterReferenceSingleTarget single_target = 2; + FilterReferenceMultiTarget multi_target = 3; + FilterReferenceCount count = 4; + }; +} + +message GeoCoordinatesFilter { + float latitude = 1; + float longitude = 2; + float distance = 3; +} + +message Vectors { + enum VectorType { + VECTOR_TYPE_UNSPECIFIED = 0; + VECTOR_TYPE_SINGLE_FP32 = 1; + VECTOR_TYPE_MULTI_FP32 = 2; + } + string name = 1; + uint64 index = 2 [deprecated = true]; // for multi-vec + bytes vector_bytes = 3; + VectorType type = 4; +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/proto/v1/base_search.proto b/platform/dbops/binaries/weaviate-src/grpc/proto/v1/base_search.proto new file mode 100644 index 0000000000000000000000000000000000000000..55732017607b80d35c77242748a2714805ca8deb --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/proto/v1/base_search.proto @@ -0,0 +1,165 @@ +syntax = "proto3"; + +package weaviate.v1; + +import "v1/base.proto"; + +option go_package = "github.com/weaviate/weaviate/grpc/generated;protocol"; +option java_package = "io.weaviate.client.grpc.protocol.v1"; +option java_outer_classname = "WeaviateProtoBaseSearch"; + +enum CombinationMethod { + COMBINATION_METHOD_UNSPECIFIED = 0; + COMBINATION_METHOD_TYPE_SUM = 1; + COMBINATION_METHOD_TYPE_MIN = 2; + COMBINATION_METHOD_TYPE_AVERAGE = 3; + COMBINATION_METHOD_TYPE_RELATIVE_SCORE = 4; + COMBINATION_METHOD_TYPE_MANUAL = 5; +} + +message WeightsForTarget { + string target = 1; + float weight = 2; +} + +message Targets { + repeated string target_vectors = 1; + CombinationMethod combination = 2; + reserved 3; // was weights + repeated WeightsForTarget weights_for_targets = 4; +} + +message VectorForTarget { + string name = 1; + bytes vector_bytes = 2 [deprecated = true]; // deprecated in 1.29.0 - use vectors + repeated Vectors vectors = 3; +} + +message SearchOperatorOptions { + enum Operator { + OPERATOR_UNSPECIFIED = 0; + OPERATOR_OR = 1; + OPERATOR_AND = 2; + } + Operator operator = 1; + optional int32 minimum_or_tokens_match = 2; +} + +message Hybrid { + string query = 1; + repeated string properties = 2; + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + repeated float vector = 3 [deprecated = true]; // will be removed in the future, use vectors + float alpha = 4; + enum FusionType { + FUSION_TYPE_UNSPECIFIED = 0; + FUSION_TYPE_RANKED = 1; + FUSION_TYPE_RELATIVE_SCORE = 2; + } + FusionType fusion_type = 5; + bytes vector_bytes = 6 [deprecated = true]; // deprecated in 1.29.0 - use vectors + repeated string target_vectors = 7 [deprecated = true]; // deprecated in 1.26 - use targets + NearTextSearch near_text = 8; // targets in msg is ignored and should not be set for hybrid + NearVector near_vector = 9; // same as above. Use the target vector in the hybrid message + Targets targets = 10; + optional SearchOperatorOptions bm25_search_operator = 11; + + // only vector distance, but keep it extendable + oneof threshold { + float vector_distance = 20; + }; + + repeated Vectors vectors = 21; +} + +message NearVector { + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + repeated float vector = 1 [deprecated = true]; // will be removed in the future, use vectors + optional double certainty = 2; + optional double distance = 3; + bytes vector_bytes = 4 [deprecated = true]; // deprecated in 1.29.0 - use vectors + repeated string target_vectors = 5 [deprecated = true]; // deprecated in 1.26 - use targets + Targets targets = 6; + map vector_per_target = 7 [deprecated = true]; // deprecated in 1.26.2 - use vector_for_targets + repeated VectorForTarget vector_for_targets = 8; + repeated Vectors vectors = 9; +} + +message NearObject { + string id = 1; + optional double certainty = 2; + optional double distance = 3; + repeated string target_vectors = 4 [deprecated = true]; // deprecated in 1.26 - use targets + Targets targets = 5; +} + +message NearTextSearch { + message Move { + float force = 1; + repeated string concepts = 2; + repeated string uuids = 3; + } + + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + repeated string query = 1; + optional double certainty = 2; + optional double distance = 3; + optional Move move_to = 4; + optional Move move_away = 5; + repeated string target_vectors = 6 [deprecated = true]; // deprecated in 1.26 - use targets + Targets targets = 7; +}; + +message NearImageSearch { + string image = 1; + optional double certainty = 2; + optional double distance = 3; + repeated string target_vectors = 4 [deprecated = true]; // deprecated in 1.26 - use targets + Targets targets = 5; +}; + +message NearAudioSearch { + string audio = 1; + optional double certainty = 2; + optional double distance = 3; + repeated string target_vectors = 4 [deprecated = true]; // deprecated in 1.26 - use targets + Targets targets = 5; +}; + +message NearVideoSearch { + string video = 1; + optional double certainty = 2; + optional double distance = 3; + repeated string target_vectors = 4 [deprecated = true]; // deprecated in 1.26 - use targets + Targets targets = 5; +}; + +message NearDepthSearch { + string depth = 1; + optional double certainty = 2; + optional double distance = 3; + repeated string target_vectors = 4 [deprecated = true]; // deprecated in 1.26 - use targets + Targets targets = 5; +} + +message NearThermalSearch { + string thermal = 1; + optional double certainty = 2; + optional double distance = 3; + repeated string target_vectors = 4 [deprecated = true]; // deprecated in 1.26 - use targets + Targets targets = 5; +} + +message NearIMUSearch { + string imu = 1; + optional double certainty = 2; + optional double distance = 3; + repeated string target_vectors = 4 [deprecated = true]; // deprecated in 1.26 - use targets + Targets targets = 5; +} + +message BM25 { + string query = 1; + repeated string properties = 2; + optional SearchOperatorOptions search_operator = 3; +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/proto/v1/batch.proto b/platform/dbops/binaries/weaviate-src/grpc/proto/v1/batch.proto new file mode 100644 index 0000000000000000000000000000000000000000..72204577f675f6e423b5154e2a85d4e216621688 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/proto/v1/batch.proto @@ -0,0 +1,141 @@ +syntax = "proto3"; + +package weaviate.v1; + +import "google/protobuf/struct.proto"; +import "v1/base.proto"; + +option go_package = "github.com/weaviate/weaviate/grpc/generated;protocol"; +option java_package = "io.weaviate.client.grpc.protocol.v1"; +option java_outer_classname = "WeaviateProtoBatch"; + +message BatchObjectsRequest { + repeated BatchObject objects = 1; + optional ConsistencyLevel consistency_level = 2; +} + +message BatchReferencesRequest { + repeated BatchReference references = 1; + optional ConsistencyLevel consistency_level = 2; +} + +message BatchSendRequest { + message Stop { + } + message Objects { + repeated BatchObject values = 1; + } + message References { + repeated BatchReference values = 1; + } + string stream_id = 1; + oneof message { + Objects objects = 2; + References references = 3; + Stop stop = 4; + } +} + +message BatchSendReply { + int32 next_batch_size = 1; + float backoff_seconds = 2; +} + +message BatchStreamRequest { + optional ConsistencyLevel consistency_level = 1; + optional int32 object_index = 2; + optional int32 reference_index = 3; +} + +message BatchStreamMessage { + message Start { + } + message Stop { + } + message Shutdown { + } + message ShuttingDown { + } + message Error { + string error = 1; + int32 index = 2; + bool is_retriable = 3; + bool is_object = 4; + bool is_reference = 5; + } + string stream_id = 1; + oneof message { + Error error = 2; + Start start = 3; + Stop stop = 4; + Shutdown shutdown = 5; + ShuttingDown shutting_down = 6; + } +} + +message BatchObject { + message Properties { + google.protobuf.Struct non_ref_properties = 1; + repeated SingleTargetRefProps single_target_ref_props = 2; + repeated MultiTargetRefProps multi_target_ref_props = 3; + repeated NumberArrayProperties number_array_properties = 4; + repeated IntArrayProperties int_array_properties = 5; + repeated TextArrayProperties text_array_properties = 6; + repeated BooleanArrayProperties boolean_array_properties = 7; + repeated ObjectProperties object_properties = 8; + repeated ObjectArrayProperties object_array_properties = 9; + // empty lists do not have a type in many languages and clients do not know which datatype the property has. + // Weaviate can get the datatype from its schema + repeated string empty_list_props = 10; + } + + message SingleTargetRefProps { + repeated string uuids = 1; + string prop_name = 2; + } + + message MultiTargetRefProps { + repeated string uuids = 1; + string prop_name = 2; + string target_collection = 3; + } + + string uuid = 1; + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + repeated float vector = 2 [deprecated = true]; // deprecated, will be removed + Properties properties = 3; + string collection = 4; + string tenant = 5; + bytes vector_bytes = 6; + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + repeated Vectors vectors = 23; +} + +message BatchReference { + string name = 1; + string from_collection = 2; + string from_uuid = 3; + optional string to_collection = 4; + string to_uuid = 5; + string tenant = 6; +} + +message BatchObjectsReply { + message BatchError { + int32 index = 1; + string error = 2; + } + + float took = 1; + repeated BatchError errors = 2; +} + +message BatchReferencesReply { + message BatchError { + int32 index = 1; + string error = 2; + } + + float took = 1; + repeated BatchError errors = 2; +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/proto/v1/batch_delete.proto b/platform/dbops/binaries/weaviate-src/grpc/proto/v1/batch_delete.proto new file mode 100644 index 0000000000000000000000000000000000000000..d0a9f8e827c1dc621c10fbb54f82a2ee5146f761 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/proto/v1/batch_delete.proto @@ -0,0 +1,32 @@ +syntax = "proto3"; + +package weaviate.v1; + +import "v1/base.proto"; + +option go_package = "github.com/weaviate/weaviate/grpc/generated;protocol"; +option java_package = "io.weaviate.client.grpc.protocol.v1"; +option java_outer_classname = "WeaviateProtoBatchDelete"; + +message BatchDeleteRequest { + string collection = 1; + Filters filters = 2; + bool verbose = 3; + bool dry_run = 4; + optional ConsistencyLevel consistency_level = 5; + optional string tenant = 6; +} + +message BatchDeleteReply { + float took = 1; + int64 failed = 2; + int64 matches = 3; + int64 successful = 4; + repeated BatchDeleteObject objects = 5; +} + +message BatchDeleteObject { + bytes uuid = 1; + bool successful = 2; + optional string error = 3; // empty string means no error +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/proto/v1/file_replication.proto b/platform/dbops/binaries/weaviate-src/grpc/proto/v1/file_replication.proto new file mode 100644 index 0000000000000000000000000000000000000000..9ae2e2cb6d5cbd915be803c23ca4abb516ce76e9 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/proto/v1/file_replication.proto @@ -0,0 +1,85 @@ +syntax = "proto3"; + +package weaviate.v1; + +option go_package = "github.com/weaviate/weaviate/grpc/generated;protocol"; +option java_package = "io.weaviate.client.grpc.protocol.v1"; +option java_outer_classname = "WeaviateProto"; + +enum CompressionType { + COMPRESSION_TYPE_UNSPECIFIED = 0; // No compression + COMPRESSION_TYPE_GZIP = 1; // gzip (compress/gzip) + COMPRESSION_TYPE_ZLIB = 2; // zlib (compress/zlib) + COMPRESSION_TYPE_DEFLATE = 3; // raw DEFLATE (compress/flate) +} + +service FileReplicationService { + rpc PauseFileActivity (PauseFileActivityRequest) returns (PauseFileActivityResponse); + + rpc ResumeFileActivity (ResumeFileActivityRequest) returns (ResumeFileActivityResponse); + + rpc ListFiles (ListFilesRequest) returns (ListFilesResponse); + + rpc GetFileMetadata (stream GetFileMetadataRequest) returns (stream FileMetadata); + + rpc GetFile (stream GetFileRequest) returns (stream FileChunk); +} + +message PauseFileActivityRequest { + string index_name = 1; + string shard_name = 2; + uint64 schema_version = 3; +} + +message PauseFileActivityResponse { + string index_name = 1; + string shard_name = 2; +} + +message ResumeFileActivityRequest { + string index_name = 1; + string shard_name = 2; +} + +message ResumeFileActivityResponse { + string index_name = 1; + string shard_name = 2; +} + +message ListFilesRequest { + string index_name = 1; + string shard_name = 2; +} + +message ListFilesResponse { + string index_name = 1; + string shard_name = 2; + repeated string file_names = 3; +} + +message GetFileMetadataRequest { + string index_name = 1; + string shard_name = 2; + string file_name = 3; +} + +message FileMetadata { + string index_name = 1; + string shard_name = 2; + string file_name = 3; + int64 size = 4; + uint32 crc32 = 5; +} + +message GetFileRequest { + string index_name = 1; + string shard_name = 2; + string file_name = 3; + CompressionType compression = 4; // Requested compression algorithm for streamed chunks +} + +message FileChunk { + int64 offset = 1; // Byte offset in the uncompressed file + bytes data = 2; // Compressed or raw chunk data + bool eof = 3; // Indicates final chunk +} \ No newline at end of file diff --git a/platform/dbops/binaries/weaviate-src/grpc/proto/v1/generative.proto b/platform/dbops/binaries/weaviate-src/grpc/proto/v1/generative.proto new file mode 100644 index 0000000000000000000000000000000000000000..e2707ec8663d482227dd821eee32d53cafc13464 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/proto/v1/generative.proto @@ -0,0 +1,337 @@ +syntax = "proto3"; + +package weaviate.v1; + +import "v1/base.proto"; + +option go_package = "github.com/weaviate/weaviate/grpc/generated;protocol"; +option java_package = "io.weaviate.client.grpc.protocol.v1"; +option java_outer_classname = "WeaviateProtoGenerative"; + +message GenerativeSearch { + message Single { + string prompt = 1; + bool debug = 2; + // only allow one at the beginning, but multiple in the future + repeated GenerativeProvider queries = 3; + } + + message Grouped { + string task = 1; + optional TextArray properties = 2; + // only allow one at the beginning, but multiple in the future + repeated GenerativeProvider queries = 3; + bool debug = 4; + } + + string single_response_prompt = 1 [deprecated = true]; + string grouped_response_task = 2 [deprecated = true]; + repeated string grouped_properties = 3 [deprecated = true]; + Single single = 4; + Grouped grouped = 5; +} + +message GenerativeProvider { + bool return_metadata = 1; + oneof kind { + GenerativeAnthropic anthropic = 2; + GenerativeAnyscale anyscale = 3; + GenerativeAWS aws = 4; + GenerativeCohere cohere = 5; + GenerativeDummy dummy = 6; + GenerativeMistral mistral = 7; + GenerativeOllama ollama = 8; + GenerativeOpenAI openai = 9; + GenerativeGoogle google = 10; + GenerativeDatabricks databricks = 11; + GenerativeFriendliAI friendliai = 12; + GenerativeNvidia nvidia = 13; + GenerativeXAI xai = 14; + } +} + +message GenerativeAnthropic{ + optional string base_url = 1; + optional int64 max_tokens = 2; + optional string model = 3; + optional double temperature = 4; + optional int64 top_k = 5; + optional double top_p = 6; + optional TextArray stop_sequences = 7; + optional TextArray images = 8; + optional TextArray image_properties = 9; +} + +message GenerativeAnyscale{ + optional string base_url = 1; + optional string model = 2; + optional double temperature = 3; +} + +message GenerativeAWS{ + optional string model = 3; + optional double temperature = 8; + optional string service = 9; + optional string region = 10; + optional string endpoint = 11; + optional string target_model = 12; + optional string target_variant = 13; + optional TextArray images = 14; + optional TextArray image_properties = 15; +} + +message GenerativeCohere{ + optional string base_url = 1; + optional double frequency_penalty = 2; + optional int64 max_tokens = 3; + optional string model = 4; + optional int64 k = 5; + optional double p = 6; + optional double presence_penalty = 7; + optional TextArray stop_sequences = 8; + optional double temperature = 9; +} + +message GenerativeDummy{ +} + +message GenerativeMistral{ + optional string base_url = 1; + optional int64 max_tokens = 2; + optional string model = 3; + optional double temperature = 4; + optional double top_p = 5; +} + +message GenerativeOllama{ + optional string api_endpoint = 1; + optional string model = 2; + optional double temperature = 3; + optional TextArray images = 4; + optional TextArray image_properties = 5; +} + +message GenerativeOpenAI{ + optional double frequency_penalty = 1; + optional int64 max_tokens = 2; + optional string model = 3; + optional int64 n = 4; + optional double presence_penalty = 5; + optional TextArray stop = 6; + optional double temperature = 7; + optional double top_p = 8; + optional string base_url = 9; + optional string api_version = 10; + optional string resource_name = 11; + optional string deployment_id = 12; + optional bool is_azure = 13; + optional TextArray images = 14; + optional TextArray image_properties = 15; +} + +message GenerativeGoogle{ + optional double frequency_penalty = 1; + optional int64 max_tokens = 2; + optional string model = 3; + optional double presence_penalty = 4; + optional double temperature = 5; + optional int64 top_k = 6; + optional double top_p = 7; + optional TextArray stop_sequences = 8; + optional string api_endpoint = 9; + optional string project_id = 10; + optional string endpoint_id = 11; + optional string region = 12; + optional TextArray images = 13; + optional TextArray image_properties = 14; +} + +message GenerativeDatabricks{ + optional string endpoint = 1; + optional string model = 2; + optional double frequency_penalty = 3; + optional bool log_probs = 4; + optional int64 top_log_probs = 5; + optional int64 max_tokens = 6; + optional int64 n = 7; + optional double presence_penalty = 8; + optional TextArray stop = 9; + optional double temperature = 10; + optional double top_p = 11; +} + +message GenerativeFriendliAI{ + optional string base_url = 1; + optional string model = 2; + optional int64 max_tokens = 3; + optional double temperature = 4; + optional int64 n = 5; + optional double top_p = 6; +} + +message GenerativeNvidia{ + optional string base_url = 1; + optional string model = 2; + optional double temperature = 3; + optional double top_p = 4; + optional int64 max_tokens = 5; +} + +message GenerativeXAI{ + optional string base_url = 1; + optional string model = 2; + optional double temperature = 3; + optional double top_p = 4; + optional int64 max_tokens = 5; + optional TextArray images = 6; + optional TextArray image_properties = 7; +} + +message GenerativeAnthropicMetadata { + message Usage { + int64 input_tokens = 1; + int64 output_tokens = 2; + } + Usage usage = 1; +} + +message GenerativeAnyscaleMetadata { +} + +message GenerativeAWSMetadata { +} + +message GenerativeCohereMetadata { + message ApiVersion { + optional string version = 1; + optional bool is_deprecated = 2; + optional bool is_experimental = 3; + } + message BilledUnits { + optional double input_tokens = 1; + optional double output_tokens = 2; + optional double search_units = 3; + optional double classifications = 4; + } + message Tokens { + optional double input_tokens = 1; + optional double output_tokens = 2; + } + optional ApiVersion api_version = 1; + optional BilledUnits billed_units = 2; + optional Tokens tokens = 3; + optional TextArray warnings = 4; +} + +message GenerativeDummyMetadata { +} + +message GenerativeMistralMetadata { + message Usage { + optional int64 prompt_tokens = 1; + optional int64 completion_tokens = 2; + optional int64 total_tokens = 3; + } + optional Usage usage = 1; +} + +message GenerativeOllamaMetadata { +} + +message GenerativeOpenAIMetadata { + message Usage { + optional int64 prompt_tokens = 1; + optional int64 completion_tokens = 2; + optional int64 total_tokens = 3; + } + optional Usage usage = 1; +} + +message GenerativeGoogleMetadata { + message TokenCount { + optional int64 total_billable_characters = 1; + optional int64 total_tokens = 2; + } + message TokenMetadata { + optional TokenCount input_token_count = 1; + optional TokenCount output_token_count = 2; + } + message Metadata { + optional TokenMetadata token_metadata = 1; + } + message UsageMetadata { + optional int64 prompt_token_count = 1; + optional int64 candidates_token_count = 2; + optional int64 total_token_count = 3; + } + optional Metadata metadata = 1; + optional UsageMetadata usage_metadata = 2; +} + +message GenerativeDatabricksMetadata { + message Usage { + optional int64 prompt_tokens = 1; + optional int64 completion_tokens = 2; + optional int64 total_tokens = 3; + } + optional Usage usage = 1; +} + +message GenerativeFriendliAIMetadata { + message Usage { + optional int64 prompt_tokens = 1; + optional int64 completion_tokens = 2; + optional int64 total_tokens = 3; + } + optional Usage usage = 1; +} + +message GenerativeNvidiaMetadata { + message Usage { + optional int64 prompt_tokens = 1; + optional int64 completion_tokens = 2; + optional int64 total_tokens = 3; + } + optional Usage usage = 1; +} + +message GenerativeXAIMetadata { + message Usage { + optional int64 prompt_tokens = 1; + optional int64 completion_tokens = 2; + optional int64 total_tokens = 3; + } + optional Usage usage = 1; +} + +message GenerativeMetadata { + oneof kind { + GenerativeAnthropicMetadata anthropic = 1; + GenerativeAnyscaleMetadata anyscale = 2; + GenerativeAWSMetadata aws = 3; + GenerativeCohereMetadata cohere = 4; + GenerativeDummyMetadata dummy = 5; + GenerativeMistralMetadata mistral = 6; + GenerativeOllamaMetadata ollama = 7; + GenerativeOpenAIMetadata openai = 8; + GenerativeGoogleMetadata google = 9; + GenerativeDatabricksMetadata databricks = 10; + GenerativeFriendliAIMetadata friendliai = 11; + GenerativeNvidiaMetadata nvidia = 12; + GenerativeXAIMetadata xai = 13; + } +} + +message GenerativeReply { + string result = 1; + optional GenerativeDebug debug = 2; + optional GenerativeMetadata metadata = 3; +} + +message GenerativeResult { + repeated GenerativeReply values = 1; +} + +message GenerativeDebug { + optional string full_prompt = 1; +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/proto/v1/properties.proto b/platform/dbops/binaries/weaviate-src/grpc/proto/v1/properties.proto new file mode 100644 index 0000000000000000000000000000000000000000..ed7fa23aad556039ddedf7e4d71e9b7e707548b6 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/proto/v1/properties.proto @@ -0,0 +1,95 @@ +syntax = "proto3"; + +package weaviate.v1; + +import "google/protobuf/struct.proto"; + +option go_package = "github.com/weaviate/weaviate/grpc/generated;protocol"; +option java_package = "io.weaviate.client.grpc.protocol.v1"; +option java_outer_classname = "WeaviateProtoProperties"; + +message Properties { + map fields = 1; +} + +message Value { + oneof kind { + double number_value = 1; + //dont reuse 2, old field that has been removed; Was "string string_value = 2;" + bool bool_value = 3; + Properties object_value = 4; + ListValue list_value = 5; + string date_value = 6; + string uuid_value = 7; + int64 int_value = 8; + GeoCoordinate geo_value = 9; + string blob_value = 10; + PhoneNumber phone_value = 11; + google.protobuf.NullValue null_value = 12; + string text_value = 13; + } +} + +message ListValue { + reserved 1; // was values + oneof kind { + NumberValues number_values = 2; + BoolValues bool_values = 3; + ObjectValues object_values = 4; + DateValues date_values = 5; + UuidValues uuid_values = 6; + IntValues int_values = 7; + TextValues text_values = 8; + } +} + +message NumberValues { + /** + * The values are stored as a byte array, where each 8 bytes represent a single float64 value. + * The byte array is stored in little-endian order using uint64 encoding. + */ + bytes values = 1; +} + +message TextValues { + repeated string values = 1; +} + +message BoolValues { + repeated bool values = 1; +} + +message ObjectValues { + repeated Properties values = 1; +} + +message DateValues { + repeated string values = 1; +} + +message UuidValues { + repeated string values = 1; +} + +message IntValues { + /** + * The values are stored as a byte array, where each 8 bytes represent a single int64 value. + * The byte array is stored in little-endian order using uint64 encoding. + */ + bytes values = 1; +} + +message GeoCoordinate { + float longitude = 1; + float latitude = 2; +} + +message PhoneNumber { + uint64 country_code = 1; + string default_country = 2; + string input = 3; + string international_formatted = 4; + uint64 national = 5; + string national_formatted = 6; + bool valid = 7; +} \ No newline at end of file diff --git a/platform/dbops/binaries/weaviate-src/grpc/proto/v1/search_get.proto b/platform/dbops/binaries/weaviate-src/grpc/proto/v1/search_get.proto new file mode 100644 index 0000000000000000000000000000000000000000..b19968af9ec17528626eb11724f0bc0a3be9079a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/proto/v1/search_get.proto @@ -0,0 +1,188 @@ +syntax = "proto3"; + +package weaviate.v1; + +import "v1/base.proto"; +import "v1/base_search.proto"; +import "v1/generative.proto"; +import "v1/properties.proto"; + +option go_package = "github.com/weaviate/weaviate/grpc/generated;protocol"; +option java_package = "io.weaviate.client.grpc.protocol.v1"; +option java_outer_classname = "WeaviateProtoSearchGet"; + +message SearchRequest { + //required + string collection = 1; + + // parameters + string tenant = 10; + optional ConsistencyLevel consistency_level = 11; + + // what is returned + optional PropertiesRequest properties = 20; + optional MetadataRequest metadata = 21; + optional GroupBy group_by = 22; + + // affects order and length of results. 0/empty (default value) means disabled + uint32 limit = 30; + uint32 offset = 31; + uint32 autocut = 32; + string after = 33; + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + repeated SortBy sort_by = 34; + + // matches/searches for objects + optional Filters filters = 40; + optional Hybrid hybrid_search = 41; + optional BM25 bm25_search = 42; + optional NearVector near_vector = 43; + optional NearObject near_object = 44; + optional NearTextSearch near_text = 45; + optional NearImageSearch near_image = 46; + optional NearAudioSearch near_audio = 47; + optional NearVideoSearch near_video = 48; + optional NearDepthSearch near_depth = 49; + optional NearThermalSearch near_thermal = 50; + optional NearIMUSearch near_imu = 51; + + optional GenerativeSearch generative = 60; + optional Rerank rerank = 61; + + bool uses_123_api = 100 [deprecated = true]; + bool uses_125_api = 101 [deprecated = true]; + bool uses_127_api = 102; +} + +message GroupBy { + // currently only supports one entry (eg just properties, no refs). But might + // be extended in the future. + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + repeated string path = 1; + int32 number_of_groups = 2; + int32 objects_per_group = 3; +} + +message SortBy { + bool ascending = 1; + // currently only supports one entry (eg just properties, no refs). But the + // weaviate datastructure already has paths in it and this makes it easily + // extendable in the future + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + repeated string path = 2; +} + +message MetadataRequest { + bool uuid = 1; + bool vector = 2; + bool creation_time_unix = 3; + bool last_update_time_unix = 4; + bool distance = 5; + bool certainty = 6; + bool score = 7; + bool explain_score = 8; + bool is_consistent = 9; + repeated string vectors = 10; +} + +message PropertiesRequest { + repeated string non_ref_properties = 1; + repeated RefPropertiesRequest ref_properties = 2; + repeated ObjectPropertiesRequest object_properties = 3; + bool return_all_nonref_properties = 11; +} + +message ObjectPropertiesRequest { + string prop_name = 1; + repeated string primitive_properties = 2; + repeated ObjectPropertiesRequest object_properties = 3; +} + +message RefPropertiesRequest { + string reference_property = 1; + PropertiesRequest properties = 2; + MetadataRequest metadata = 3; + string target_collection = 4; +} + +message Rerank { + string property = 1; + optional string query = 2; +} + +message SearchReply { + float took = 1; + repeated SearchResult results = 2; + optional string generative_grouped_result = 3 [deprecated = true]; + repeated GroupByResult group_by_results = 4; + optional GenerativeResult generative_grouped_results = 5; +} + +message RerankReply { + double score = 1; +} + +message GroupByResult { + string name = 1; + float min_distance = 2; + float max_distance = 3; + int64 number_of_objects = 4; + repeated SearchResult objects = 5; + optional RerankReply rerank = 6; + optional GenerativeReply generative = 7 [deprecated = true]; + optional GenerativeResult generative_result = 8; +} + +message SearchResult { + PropertiesResult properties = 1; + MetadataResult metadata = 2; + optional GenerativeResult generative = 3; +} + + +message MetadataResult { + string id = 1; + // protolint:disable:next REPEATED_FIELD_NAMES_PLURALIZED + repeated float vector = 2 [deprecated = true]; + int64 creation_time_unix = 3; + bool creation_time_unix_present = 4; + int64 last_update_time_unix = 5; + bool last_update_time_unix_present = 6; + float distance = 7; + bool distance_present = 8; + float certainty = 9; + bool certainty_present = 10; + float score = 11; + bool score_present = 12; + string explain_score = 13; + bool explain_score_present = 14; + optional bool is_consistent = 15; + string generative = 16 [deprecated = true]; + bool generative_present = 17 [deprecated = true]; + bool is_consistent_present = 18; + bytes vector_bytes = 19; + bytes id_as_bytes = 20; + double rerank_score = 21; + bool rerank_score_present = 22; + repeated Vectors vectors = 23; +} + +message PropertiesResult { + reserved 1; // was non_ref_properties + repeated RefPropertiesResult ref_props = 2; + string target_collection = 3; + MetadataResult metadata = 4; + reserved 5; // was number_array_properties + reserved 6; // was int_array_properties + reserved 7; // was text_array_properties + reserved 8; // was boolean_array_properties + reserved 9; // was object_properties + reserved 10; // was object_array_properties + Properties non_ref_props = 11; + bool ref_props_requested = 12; +} + +message RefPropertiesResult { + repeated PropertiesResult properties = 1; + string prop_name = 2; +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/proto/v1/tenants.proto b/platform/dbops/binaries/weaviate-src/grpc/proto/v1/tenants.proto new file mode 100644 index 0000000000000000000000000000000000000000..2d39c6c98db1395ee391b0a07e16bce9776a7a1b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/proto/v1/tenants.proto @@ -0,0 +1,47 @@ +syntax = "proto3"; + +package weaviate.v1; + +option go_package = "github.com/weaviate/weaviate/grpc/generated;protocol"; +option java_package = "io.weaviate.client.grpc.protocol.v1"; +option java_outer_classname = "WeaviateProtoTenants"; + +enum TenantActivityStatus { + TENANT_ACTIVITY_STATUS_UNSPECIFIED = 0; + TENANT_ACTIVITY_STATUS_HOT = 1; + TENANT_ACTIVITY_STATUS_COLD = 2; + // Originally TENANT_ACTIVITY_STATUS_WARM = 3; + reserved 3; + TENANT_ACTIVITY_STATUS_FROZEN = 4; + TENANT_ACTIVITY_STATUS_UNFREEZING = 5; + TENANT_ACTIVITY_STATUS_FREEZING = 6; + // not used yet - added to let the clients already add code to handle this in the future + TENANT_ACTIVITY_STATUS_ACTIVE = 7; + TENANT_ACTIVITY_STATUS_INACTIVE = 8; + TENANT_ACTIVITY_STATUS_OFFLOADED = 9; + TENANT_ACTIVITY_STATUS_OFFLOADING = 10; + TENANT_ACTIVITY_STATUS_ONLOADING = 11; +} + +message TenantsGetRequest { + string collection = 1; + // we might need to add a tenant-cursor api at some point, make this easily extendable + oneof params { + TenantNames names = 2; + }; +} + +message TenantNames { + repeated string values = 1; + +} + +message TenantsGetReply { + float took = 1; + repeated Tenant tenants = 2; +} + +message Tenant { + string name = 1; + TenantActivityStatus activity_status = 2; +} diff --git a/platform/dbops/binaries/weaviate-src/grpc/proto/v1/weaviate.proto b/platform/dbops/binaries/weaviate-src/grpc/proto/v1/weaviate.proto new file mode 100644 index 0000000000000000000000000000000000000000..1e074c4a05ab10cf25dbbf728ae42c202079f011 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/grpc/proto/v1/weaviate.proto @@ -0,0 +1,24 @@ +syntax = "proto3"; + +package weaviate.v1; + +import "v1/aggregate.proto"; +import "v1/batch.proto"; +import "v1/batch_delete.proto"; +import "v1/search_get.proto"; +import "v1/tenants.proto"; + +option go_package = "github.com/weaviate/weaviate/grpc/generated;protocol"; +option java_package = "io.weaviate.client.grpc.protocol.v1"; +option java_outer_classname = "WeaviateProto"; + +service Weaviate { + rpc Search(SearchRequest) returns (SearchReply) {}; + rpc BatchObjects(BatchObjectsRequest) returns (BatchObjectsReply) {}; + rpc BatchReferences(BatchReferencesRequest) returns (BatchReferencesReply) {}; + rpc BatchDelete(BatchDeleteRequest) returns (BatchDeleteReply) {}; + rpc TenantsGet(TenantsGetRequest) returns (TenantsGetReply) {}; + rpc Aggregate(AggregateRequest) returns (AggregateReply) {}; + rpc BatchSend(BatchSendRequest) returns (BatchSendReply) {}; + rpc BatchStream(BatchStreamRequest) returns (stream BatchStreamMessage) {}; +} diff --git a/platform/dbops/binaries/weaviate-src/modules/backup-azure/backup_test.go b/platform/dbops/binaries/weaviate-src/modules/backup-azure/backup_test.go new file mode 100644 index 0000000000000000000000000000000000000000..315583ecc01e233de4756acb1d424d2565c4462e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/backup-azure/backup_test.go @@ -0,0 +1,168 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modstgazure + +import ( + "context" + "os" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/entities/moduletools" +) + +// Test user overrides +func TestUploadParams(t *testing.T) { + defaultBlockSize := int64(40 * 1024 * 1024) + defaultEnvironmentValue := int64(11) + defaultHeaderValue := int64(13) + testCtx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + + azure := New() + os.Setenv("BACKUP_AZURE_CONTAINER", "test") + os.Setenv("AZURE_STORAGE_ACCOUNT", "test") + + params := moduletools.NewMockModuleInitParams(t) + params.EXPECT().GetLogger().Return(logrus.New()) + params.EXPECT().GetStorageProvider().Return(&fakeStorageProvider{dataPath: t.TempDir()}) + err := azure.Init(testCtx, params) + require.Nil(t, err) + + t.Run("getBlockSize with no inputs", func(t *testing.T) { + blockSize := azure.getBlockSize(testCtx) + assert.Equal(t, defaultBlockSize, blockSize) + }) + + t.Run("getBlockSize with environment variable", func(t *testing.T) { + t.Setenv("AZURE_BLOCK_SIZE", "11") + azure := New() + params := moduletools.NewMockModuleInitParams(t) + params.EXPECT().GetLogger().Return(logrus.New()) + params.EXPECT().GetStorageProvider().Return(&fakeStorageProvider{dataPath: t.TempDir()}) + err := azure.Init(testCtx, params) + assert.Nil(t, err) + + blockSize := azure.getBlockSize(testCtx) + assert.Equal(t, defaultEnvironmentValue, blockSize) + }) + + t.Run("getBlockSize with invalid environment variable", func(t *testing.T) { + t.Setenv("AZURE_BLOCK_SIZE", "invalid") + azure := New() + params := moduletools.NewMockModuleInitParams(t) + params.EXPECT().GetLogger().Return(logrus.New()) + params.EXPECT().GetStorageProvider().Return(&fakeStorageProvider{dataPath: t.TempDir()}) + err := azure.Init(testCtx, params) + assert.Nil(t, err) + + blockSize := azure.getBlockSize(testCtx) + assert.Equal(t, defaultBlockSize, blockSize) + }) + + t.Run("getBlockSize with header", func(t *testing.T) { + ctxWithValue := context.WithValue(context.Background(), + "X-Azure-Block-Size", []string{"13"}) + + blockSize := azure.getBlockSize(ctxWithValue) + assert.Equal(t, defaultHeaderValue, blockSize) + }) + + t.Run("getBlockSize with invalid header", func(t *testing.T) { + ctxWithValue := context.WithValue(context.Background(), + "X-Azure-Block-Size", []string{"invalid"}) + + blockSize := azure.getBlockSize(ctxWithValue) + assert.Equal(t, defaultBlockSize, blockSize) + }) + + t.Run("getBlockSize with environment variable and header", func(t *testing.T) { + t.Setenv("AZURE_BLOCK_SIZE", "11") + ctxWithValue := context.WithValue(context.Background(), + "X-Azure-Block-Size", []string{"13"}) + + blockSize := azure.getBlockSize(ctxWithValue) + assert.Equal(t, defaultHeaderValue, blockSize) + }) + + t.Run("getConcurrency with no inputs", func(t *testing.T) { + concurrency := azure.getConcurrency(testCtx) + assert.Equal(t, 1, concurrency) + }) + + t.Run("getConcurrency with environment variable", func(t *testing.T) { + t.Setenv("AZURE_CONCURRENCY", "11") + azure := New() + params := moduletools.NewMockModuleInitParams(t) + params.EXPECT().GetLogger().Return(logrus.New()) + params.EXPECT().GetStorageProvider().Return(&fakeStorageProvider{dataPath: t.TempDir()}) + err := azure.Init(testCtx, params) + assert.Nil(t, err) + + concurrency := azure.getConcurrency(testCtx) + assert.Equal(t, defaultEnvironmentValue, int64(concurrency)) + }) + + t.Run("getConcurrency with invalid environment variable", func(t *testing.T) { + t.Setenv("AZURE_CONCURRENCY", "invalid") + azure := New() + params := moduletools.NewMockModuleInitParams(t) + params.EXPECT().GetLogger().Return(logrus.New()) + params.EXPECT().GetStorageProvider().Return(&fakeStorageProvider{dataPath: t.TempDir()}) + err := azure.Init(testCtx, params) + assert.Nil(t, err) + + concurrency := azure.getConcurrency(testCtx) + assert.Equal(t, 1, concurrency) + }) + + t.Run("getConcurrency with header", func(t *testing.T) { + ctxWithValue := context.WithValue(context.Background(), + "X-Azure-Concurrency", []string{"13"}) + + concurrency := azure.getConcurrency(ctxWithValue) + assert.Equal(t, defaultHeaderValue, int64(concurrency)) + }) + + t.Run("getConcurrency with invalid header", func(t *testing.T) { + ctxWithValue := context.WithValue(context.Background(), + "X-Azure-Concurrency", []string{"invalid"}) + + concurrency := azure.getConcurrency(ctxWithValue) + assert.Equal(t, 1, concurrency) + }) + + t.Run("getConcurrency with environment variable and header", func(t *testing.T) { + t.Setenv("AZURE_CONCURRENCY", "11") + ctxWithValue := context.WithValue(context.Background(), + "X-Azure-Concurrency", []string{"13"}) + + concurrency := azure.getConcurrency(ctxWithValue) + assert.Equal(t, defaultHeaderValue, int64(concurrency)) + }) +} + +type fakeStorageProvider struct { + dataPath string +} + +func (f *fakeStorageProvider) Storage(name string) (moduletools.Storage, error) { + return nil, nil +} + +func (f *fakeStorageProvider) DataPath() string { + return f.dataPath +} diff --git a/platform/dbops/binaries/weaviate-src/modules/backup-azure/client.go b/platform/dbops/binaries/weaviate-src/modules/backup-azure/client.go new file mode 100644 index 0000000000000000000000000000000000000000..d26270bbc0ca44d3044a723b46528024a17f2b1d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/backup-azure/client.go @@ -0,0 +1,382 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modstgazure + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "os" + "path" + "strconv" + "strings" + "time" + + "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy" + "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" + "github.com/Azure/azure-sdk-for-go/sdk/storage/azblob" + "github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/bloberror" + "github.com/pkg/errors" + + "github.com/weaviate/weaviate/entities/backup" + ubak "github.com/weaviate/weaviate/usecases/backup" + "github.com/weaviate/weaviate/usecases/modulecomponents" +) + +const ( + defaultBlockSize = int64(40 * 1024 * 1024) + defaultConcurrency = 1 +) + +type azureClient struct { + client *azblob.Client + config clientConfig + serviceURL string + dataPath string +} + +func newClient(ctx context.Context, config *clientConfig, dataPath string) (*azureClient, error) { + connectionString := os.Getenv("AZURE_STORAGE_CONNECTION_STRING") + if connectionString != "" { + client, err := azblob.NewClientFromConnectionString(connectionString, nil) + if err != nil { + return nil, errors.Wrap(err, "create client using connection string") + } + serviceURL := "" + connectionStrings := strings.Split(connectionString, ";") + for _, str := range connectionStrings { + if strings.HasPrefix(str, "BlobEndpoint") { + blobEndpoint := strings.Split(str, "=") + if len(blobEndpoint) > 1 { + serviceURL = blobEndpoint[1] + if !strings.HasSuffix(serviceURL, "/") { + serviceURL = serviceURL + "/" + } + } + } + } + return &azureClient{client, *config, serviceURL, dataPath}, nil + } + + // Your account name and key can be obtained from the Azure Portal. + accountName := os.Getenv("AZURE_STORAGE_ACCOUNT") + accountKey := os.Getenv("AZURE_STORAGE_KEY") + + if accountName == "" { + return nil, errors.New("AZURE_STORAGE_ACCOUNT must be set") + } + + // The service URL for blob endpoints is usually in the form: http(s)://.blob.core.windows.net/ + serviceURL := fmt.Sprintf("https://%s.blob.core.windows.net/", accountName) + + if accountKey != "" { + cred, err := azblob.NewSharedKeyCredential(accountName, accountKey) + if err != nil { + return nil, err + } + + client, err := azblob.NewClientWithSharedKeyCredential(serviceURL, cred, nil) + if err != nil { + return nil, err + } + return &azureClient{client, *config, serviceURL, dataPath}, nil + } + + options := &azblob.ClientOptions{ + ClientOptions: policy.ClientOptions{ + Retry: policy.RetryOptions{ + MaxRetries: 3, + RetryDelay: 4 * time.Second, + MaxRetryDelay: 120 * time.Second, + }, + }, + } + + client, err := azblob.NewClientWithNoCredential(serviceURL, options) + if err != nil { + return nil, err + } + return &azureClient{client, *config, serviceURL, dataPath}, nil +} + +func (a *azureClient) HomeDir(backupID, overrideBucket, overridePath string) string { + if overrideBucket == "" { + overrideBucket = a.config.Container + } + + return a.serviceURL + path.Join(overrideBucket, a.makeObjectName(overridePath, []string{backupID})) +} + +func (g *azureClient) makeObjectName(overridePath string, parts []string) string { + if overridePath != "" { + base := path.Join(parts...) + return path.Join(overridePath, base) + } else { + base := path.Join(parts...) + return path.Join(g.config.BackupPath, base) + } +} + +func (a *azureClient) AllBackups(ctx context.Context) ([]*backup.DistributedBackupDescriptor, error) { + var meta []*backup.DistributedBackupDescriptor + + blobs := a.client.NewListBlobsFlatPager(a.config.Container, &azblob.ListBlobsFlatOptions{Prefix: to.Ptr(a.config.BackupPath)}) + for { + if !blobs.More() { + break + } + blob, err := blobs.NextPage(ctx) + if err != nil { + return nil, fmt.Errorf("get next blob: %w", err) + } + + if blob.ListBlobsFlatSegmentResponse.Segment != nil { + for _, item := range blob.ListBlobsFlatSegmentResponse.Segment.BlobItems { + if item.Name == nil || !strings.Contains(*item.Name, ubak.GlobalBackupFile) { + continue + } + + // now we have ubak.GlobalBackupFile + contents, err := a.getObject(ctx, a.config.Container, *item.Name) + if err != nil { + return nil, fmt.Errorf("get blob item %q: %w", *item.Name, err) + } + var desc backup.DistributedBackupDescriptor + if err := json.Unmarshal(contents, &desc); err != nil { + return nil, fmt.Errorf("unmarshal blob item %q: %w", *item.Name, err) + } + meta = append(meta, &desc) + } + } + } + + return meta, nil +} + +func (a *azureClient) GetObject(ctx context.Context, backupID, key, overrideBucket, overridePath string) ([]byte, error) { + objectName := a.makeObjectName(overridePath, []string{backupID, key}) + + containerName := a.config.Container + if overrideBucket != "" { + containerName = overrideBucket + } + + return a.getObject(ctx, containerName, objectName) +} + +func (a *azureClient) getObject(ctx context.Context, containerName, objectName string) ([]byte, error) { + blobDownloadResponse, err := a.client.DownloadStream(ctx, containerName, objectName, nil) + if err != nil { + if bloberror.HasCode(err, bloberror.BlobNotFound) { + return nil, backup.NewErrNotFound(errors.Wrapf(err, "get object %s", objectName)) + } + return nil, backup.NewErrInternal(errors.Wrapf(err, "download stream for object %s", objectName)) + } + + reader := blobDownloadResponse.Body + downloadData, err := io.ReadAll(reader) + errClose := reader.Close() + if errClose != nil { + return nil, backup.NewErrInternal(errors.Wrapf(errClose, "close stream for object %s", objectName)) + } + if err != nil { + return nil, backup.NewErrInternal(errors.Wrapf(err, "read stream for object %s", objectName)) + } + + return downloadData, nil +} + +func (a *azureClient) PutObject(ctx context.Context, backupID, key, overrideBucket, overridePath string, data []byte) error { + objectName := a.makeObjectName(overridePath, []string{backupID, key}) + + containerName := a.config.Container + if overrideBucket != "" { + containerName = overrideBucket + } + + reader := bytes.NewReader(data) + _, err := a.client.UploadStream(ctx, + containerName, + objectName, + reader, + &azblob.UploadStreamOptions{ + Metadata: map[string]*string{"backupid": to.Ptr(backupID)}, + Tags: map[string]string{"backupid": backupID}, + BlockSize: a.getBlockSize(ctx), + Concurrency: a.getConcurrency(ctx), + }) + if err != nil { + return backup.NewErrInternal(errors.Wrapf(err, "upload stream for object %s", objectName)) + } + + return nil +} + +func (a *azureClient) Initialize(ctx context.Context, backupID, overrideBucket, overridePath string) error { + key := "access-check" + + if err := a.PutObject(ctx, backupID, key, overrideBucket, overridePath, []byte("")); err != nil { + return errors.Wrap(err, "failed to access-check Azure backup module") + } + + containerName := a.config.Container + if overrideBucket != "" { + containerName = overrideBucket + } + + objectName := a.makeObjectName(overridePath, []string{backupID, key}) + if _, err := a.client.DeleteBlob(ctx, containerName, objectName, nil); err != nil { + return errors.Wrap(err, "failed to remove access-check Azure backup module at"+objectName) + } + + return nil +} + +func (a *azureClient) WriteToFile(ctx context.Context, backupID, key, destPath, overrideBucket, overridePath string) error { + dir := path.Dir(destPath) + if err := os.MkdirAll(dir, os.ModePerm); err != nil { + return errors.Wrapf(err, "make dir %s", dir) + } + + file, err := os.Create(destPath) + if err != nil { + return backup.NewErrInternal(errors.Wrapf(err, "create file: %q", destPath)) + } + defer file.Close() + + containerName := a.config.Container + if overrideBucket != "" { + containerName = overrideBucket + } + + objectName := a.makeObjectName(overridePath, []string{backupID, key}) + _, err = a.client.DownloadFile(ctx, containerName, objectName, file, nil) + if err != nil { + if bloberror.HasCode(err, bloberror.BlobNotFound) { + return backup.NewErrNotFound(errors.Wrapf(err, "get object %s", objectName)) + } + return backup.NewErrInternal(errors.Wrapf(err, "download file for object %s", objectName)) + } + + return nil +} + +func (a *azureClient) getBlockSize(ctx context.Context) int64 { + blockSize := defaultBlockSize + blockSizeStr := modulecomponents.GetValueFromContext(ctx, "X-Azure-Block-Size") + + if blockSizeStr == "" { + blockSizeStr = os.Getenv("AZURE_BLOCK_SIZE") + } + + if blockSizeStr != "" { + bs, err := strconv.ParseInt(blockSizeStr, 10, 64) + if err != nil { + return defaultBlockSize + } + blockSize = bs + } + return blockSize +} + +func (a *azureClient) getConcurrency(ctx context.Context) int { + concurrency := defaultConcurrency + concurrencyStr := modulecomponents.GetValueFromContext(ctx, "X-Azure-Concurrency") + + if concurrencyStr == "" { + concurrencyStr = os.Getenv("AZURE_CONCURRENCY") + } + + if concurrencyStr != "" { + cc, err := strconv.Atoi(concurrencyStr) + if err != nil { + return defaultConcurrency + } + concurrency = cc + } + return concurrency +} + +func (a *azureClient) Write(ctx context.Context, backupID, key, overrideBucket, overridePath string, r io.ReadCloser) (written int64, err error) { + path := a.makeObjectName(overridePath, []string{backupID, key}) + reader := &reader{src: r} + defer func() { + r.Close() + written = int64(reader.count) + }() + + containerName := a.config.Container + if overrideBucket != "" { + containerName = overrideBucket + } + + if _, err = a.client.UploadStream(ctx, + containerName, + path, + reader, + &azblob.UploadStreamOptions{ + Metadata: map[string]*string{"backupid": to.Ptr(backupID)}, + Tags: map[string]string{"backupid": backupID}, + BlockSize: a.getBlockSize(ctx), + Concurrency: a.getConcurrency(ctx), + }); err != nil { + err = fmt.Errorf("upload stream %q: %w", path, err) + } + + return +} + +func (a *azureClient) Read(ctx context.Context, backupID, key, overrideBucket, overridePath string, w io.WriteCloser) (int64, error) { + defer w.Close() + + containerName := a.config.Container + if overrideBucket != "" { + containerName = overrideBucket + } + + path := a.makeObjectName(overridePath, []string{backupID, key}) + resp, err := a.client.DownloadStream(ctx, containerName, path, nil) + if err != nil { + err = fmt.Errorf("find object %q: %w", path, err) + if bloberror.HasCode(err, bloberror.BlobNotFound) { + err = backup.NewErrNotFound(err) + } + return 0, err + } + defer resp.Body.Close() + + read, err := io.Copy(w, resp.Body) + if err != nil { + return read, fmt.Errorf("io.copy %q: %w", path, err) + } + + return read, nil +} + +func (a *azureClient) SourceDataPath() string { + return a.dataPath +} + +// reader is a wrapper used to count number of written bytes +// Unlike GCS and S3 Azure Interface does not provide this information +type reader struct { + src io.Reader + count int +} + +func (r *reader) Read(p []byte) (n int, err error) { + n, err = r.src.Read(p) + r.count += n + return +} diff --git a/platform/dbops/binaries/weaviate-src/modules/backup-azure/module.go b/platform/dbops/binaries/weaviate-src/modules/backup-azure/module.go new file mode 100644 index 0000000000000000000000000000000000000000..48696a54980cae28b87abe581671970c4665e585 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/backup-azure/module.go @@ -0,0 +1,110 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modstgazure + +import ( + "context" + "os" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" +) + +const ( + Name = "backup-azure" + AltName1 = "azure" + azureContainer = "BACKUP_AZURE_CONTAINER" + + // this is an optional value, allowing for + // the backup to be stored in a specific + // directory inside the provided container. + // + // if left unset, the backup files will + // be stored directly in the root of the + // container. + azurePath = "BACKUP_AZURE_PATH" +) + +type clientConfig struct { + Container string + + // this is an optional value, allowing for + // the backup to be stored in a specific + // directory inside the provided bucket + BackupPath string +} + +type Module struct { + logger logrus.FieldLogger + *azureClient + dataPath string +} + +func New() *Module { + return &Module{} +} + +func (m *Module) Name() string { + return Name +} + +func (m *Module) IsExternal() bool { + return true +} + +func (m *Module) AltNames() []string { + return []string{AltName1} +} + +func (m *Module) Type() modulecapabilities.ModuleType { + return modulecapabilities.Backup +} + +func (m *Module) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + m.dataPath = params.GetStorageProvider().DataPath() + + config := &clientConfig{ + Container: os.Getenv(azureContainer), + BackupPath: os.Getenv(azurePath), + } + if config.Container == "" { + return errors.Errorf("backup init: '%s' must be set", azureContainer) + } + + client, err := newClient(ctx, config, m.dataPath) + if err != nil { + return errors.Wrap(err, "init Azure client") + } + m.azureClient = client + return nil +} + +func (m *Module) MetaInfo() (map[string]interface{}, error) { + metaInfo := make(map[string]interface{}) + metaInfo["containerName"] = m.config.Container + if root := m.config.BackupPath; root != "" { + metaInfo["rootName"] = root + } + return metaInfo, nil +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.BackupBackend(New()) + _ = modulecapabilities.MetaProvider(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/backup-filesystem/backup.go b/platform/dbops/binaries/weaviate-src/modules/backup-filesystem/backup.go new file mode 100644 index 0000000000000000000000000000000000000000..7092b5c59e0cccda7401f32f06068dce43e3653b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/backup-filesystem/backup.go @@ -0,0 +1,250 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modstgfs + +import ( + "context" + "fmt" + "io" + "os" + "path" + "path/filepath" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/backup" + "github.com/weaviate/weaviate/usecases/monitoring" +) + +func (m *Module) GetObject(ctx context.Context, backupID, key, overrideBucket, overridePath string) ([]byte, error) { + var metaPath string + var err error + if overridePath != "" { + metaPath, err = m.getObjectPath(ctx, overridePath, backupID, key) + } else { + metaPath, err = m.getObjectPath(ctx, m.backupsPath, backupID, key) + } + + if err != nil { + return nil, err + } + + contents, err := os.ReadFile(metaPath) + if err != nil { + return nil, backup.NewErrInternal(errors.Wrapf(err, "get object %s", metaPath)) + } + + metric, err := monitoring.GetMetrics().BackupRestoreDataTransferred.GetMetricWithLabelValues(m.Name(), "class") + if err == nil { + metric.Add(float64(len(contents))) + } + + return contents, nil +} + +func (m *Module) getObjectPath(ctx context.Context, path, backupID, key string) (string, error) { + metaPath := filepath.Join(path, backupID, key) + + if err := ctx.Err(); err != nil { + return "", backup.NewErrContextExpired(errors.Wrapf(err, "get object path expired %s", metaPath)) + } + + if _, err := os.Stat(metaPath); errors.Is(err, os.ErrNotExist) { + return "", backup.NewErrNotFound(errors.Wrapf(err, "get object path could not find %s", metaPath)) + } else if err != nil { + return "", backup.NewErrInternal(errors.Wrapf(err, "get object path %s", metaPath)) + } + + return metaPath, nil +} + +func (m *Module) copyFile(sourcePath, destinationPath string) (int64, error) { + source, err := os.Open(sourcePath) + defer func() error { + return source.Close() + }() + if err != nil { + return 0, errors.Wrapf(err, "open file %s", sourcePath) + } + + if _, err := os.Stat(destinationPath); err != nil { + if err := os.MkdirAll(path.Dir(destinationPath), os.ModePerm); err != nil { + return 0, errors.Wrapf(err, "make dir %s", destinationPath) + } + } + + destination, err := os.Create(destinationPath) + defer func() error { + return destination.Close() + }() + if err != nil { + return 0, errors.Wrapf(err, "create destination file %s", destinationPath) + } + + written, err := io.Copy(destination, source) + if err != nil { + return 0, errors.Wrapf(err, "copy file from %s to %s", sourcePath, destinationPath) + } + + return written, nil +} + +func (m *Module) PutObject(ctx context.Context, backupID, key, bucket, overridePath string, byes []byte) error { + if bucket != "" { + m.logger.Info("bucket parameter not supported for filesystem backup module!") + } + + backupPath := path.Join(m.makeBackupDirPath(m.backupsPath, backupID), key) + if overridePath != "" { + backupPath = path.Join(overridePath, backupID, key) + } + + dir := path.Dir(backupPath) + + if err := os.MkdirAll(dir, os.ModePerm); err != nil { + return errors.Wrapf(err, "make dir %s", dir) + } + + if err := os.WriteFile(backupPath, byes, os.ModePerm); err != nil { + return errors.Wrapf(err, "write file %s", backupPath) + } + + metric, err := monitoring.GetMetrics().BackupStoreDataTransferred.GetMetricWithLabelValues(m.Name(), "class") + if err == nil { + metric.Add(float64(len(byes))) + } + + return nil +} + +func (m *Module) Initialize(ctx context.Context, backupID, overrideBucket, overridePath string) error { + // TODO: does anything need to be done here? + return nil +} + +func (m *Module) WriteToFile(ctx context.Context, backupID, key, destPath, overrideBucket, overridePath string) error { + var objectPath string + var err error + if overridePath != "" { + objectPath = filepath.Join(overridePath, backupID, key) + } else { + objectPath = filepath.Join(m.backupsPath, backupID, key) + } + + bytesWritten, err := m.copyFile(objectPath, destPath) + if err != nil { + return err + } + + metric, err := monitoring.GetMetrics().BackupRestoreDataTransferred.GetMetricWithLabelValues(m.Name(), "class") + if err == nil { + metric.Add(float64(bytesWritten)) + } + + return nil +} + +func (m *Module) Write(ctx context.Context, backupID, key, overrideBucket, overridePath string, r io.ReadCloser) (int64, error) { + defer r.Close() + + var backupPath string + var err error + if overridePath != "" { + backupPath = filepath.Join(overridePath, backupID, key) + } else { + backupPath = filepath.Join(m.backupsPath, backupID, key) + } + dir := path.Dir(backupPath) + if err := os.MkdirAll(dir, os.ModePerm); err != nil { + return 0, fmt.Errorf("make dir %q: %w", dir, err) + } + f, err := os.OpenFile(backupPath, os.O_RDWR|os.O_CREATE, os.ModePerm) + if err != nil { + return 0, fmt.Errorf("open file %q: %w", backupPath, err) + } + defer f.Close() + + written, err := io.Copy(f, r) + if err != nil { + return 0, fmt.Errorf("write file %q: %w", backupPath, err) + } + if metric, err := monitoring.GetMetrics().BackupStoreDataTransferred. + GetMetricWithLabelValues(m.Name(), "class"); err == nil { + metric.Add(float64(written)) + } + + return written, err +} + +func (m *Module) Read(ctx context.Context, backupID, key, overrideBucket, overridePath string, w io.WriteCloser) (int64, error) { + defer w.Close() + + var sourcePath string + var err error + if overridePath != "" { + sourcePath, err = m.getObjectPath(ctx, overridePath, backupID, key) + } else { + sourcePath, err = m.getObjectPath(ctx, m.backupsPath, backupID, key) + } + if err != nil { + return 0, fmt.Errorf("source path %s/%s: %w", backupID, key, err) + } + + // open file + f, err := os.Open(sourcePath) + if err != nil { + return 0, fmt.Errorf("open file %q: %w", sourcePath, err) + } + defer f.Close() + + // copy file + read, err := io.Copy(w, f) + if err != nil { + return 0, fmt.Errorf("write : %w", err) + } + + if metric, err := monitoring.GetMetrics().BackupRestoreDataTransferred. + GetMetricWithLabelValues(m.Name(), "class"); err == nil { + metric.Add(float64(read)) + } + return read, err +} + +func (m *Module) SourceDataPath() string { + return m.dataPath +} + +func (m *Module) initBackupBackend(ctx context.Context, backupsPath string) error { + if backupsPath == "" { + return fmt.Errorf("empty backup path provided") + } + backupsPath = filepath.Clean(backupsPath) + if !filepath.IsAbs(backupsPath) { + return fmt.Errorf("relative backup path provided") + } + if err := m.createBackupsDir(backupsPath); err != nil { + return errors.Wrap(err, "invalid backup path provided") + } + m.backupsPath = backupsPath + + return nil +} + +func (m *Module) createBackupsDir(backupsPath string) error { + if err := os.MkdirAll(backupsPath, os.ModePerm); err != nil { + m.logger.WithField("module", m.Name()). + WithField("action", "create_backups_dir"). + WithError(err). + Errorf("failed creating backups directory %v", backupsPath) + return backup.NewErrInternal(errors.Wrap(err, "make backups dir")) + } + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/backup-filesystem/backup_test.go b/platform/dbops/binaries/weaviate-src/modules/backup-filesystem/backup_test.go new file mode 100644 index 0000000000000000000000000000000000000000..bd689a4a7af05cef65b048b026da0e613b4f264e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/backup-filesystem/backup_test.go @@ -0,0 +1,54 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modstgfs + +import ( + "context" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestBackend_StoreBackup(t *testing.T) { + backupRelativePath := filepath.Join("./backups", "some", "nested", "dir") + backupAbsolutePath := t.TempDir() + + ctx := context.Background() + + t.Run("fails init fs module with empty backup path", func(t *testing.T) { + module := New() + err := module.initBackupBackend(ctx, "") + + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "empty backup path provided") + }) + + t.Run("fails init fs module with relative backup path", func(t *testing.T) { + module := New() + err := module.initBackupBackend(ctx, backupRelativePath) + + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "relative backup path provided") + }) + + t.Run("inits backup module with absolute backup path", func(t *testing.T) { + module := New() + err := module.initBackupBackend(ctx, backupAbsolutePath) + + assert.Nil(t, err) + + _, err = os.Stat(backupAbsolutePath) + assert.Nil(t, err) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/backup-filesystem/module.go b/platform/dbops/binaries/weaviate-src/modules/backup-filesystem/module.go new file mode 100644 index 0000000000000000000000000000000000000000..c02ca9906ef3544930d0dc800a318175aa275ae7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/backup-filesystem/module.go @@ -0,0 +1,133 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modstgfs + +import ( + "context" + "encoding/json" + "fmt" + "os" + "path" + "path/filepath" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/backup" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + ubak "github.com/weaviate/weaviate/usecases/backup" +) + +const ( + Name = "backup-filesystem" + AltName1 = "filesystem" + backupsPathName = "BACKUP_FILESYSTEM_PATH" +) + +type Module struct { + logger logrus.FieldLogger + dataPath string // path to the current (operational) data + backupsPath string // complete(?) path to the directory that holds all the backups +} + +func New() *Module { + return &Module{} +} + +func (m *Module) Name() string { + return Name +} + +func (m *Module) IsExternal() bool { + return false +} + +func (m *Module) AltNames() []string { + return []string{AltName1} +} + +func (m *Module) Type() modulecapabilities.ModuleType { + return modulecapabilities.Backup +} + +func (m *Module) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + m.dataPath = params.GetStorageProvider().DataPath() + backupsPath := os.Getenv(backupsPathName) + if err := m.initBackupBackend(ctx, backupsPath); err != nil { + return errors.Wrap(err, "init backup backend") + } + + return nil +} + +func (m *Module) HomeDir(backupID, overrideBucket, overridePath string) string { + if overridePath != "" { + return path.Join(overridePath, backupID) + } else { + return path.Join(m.makeBackupDirPath(m.backupsPath, backupID)) + } +} + +func (m *Module) AllBackups(context.Context) ([]*backup.DistributedBackupDescriptor, error) { + var meta []*backup.DistributedBackupDescriptor + backups, err := os.ReadDir(m.backupsPath) + if err != nil { + return nil, fmt.Errorf("open backups path: %w", err) + } + for _, bak := range backups { + if !bak.IsDir() { + continue + } + backupPath := path.Join(m.backupsPath, bak.Name()) + contents, err := os.ReadDir(backupPath) + if err != nil { + return nil, fmt.Errorf("read backup contents: %w", err) + } + for _, file := range contents { + if file.Name() == ubak.GlobalBackupFile { + fileName := path.Join(backupPath, file.Name()) + bytes, err := os.ReadFile(fileName) + if err != nil { + return nil, fmt.Errorf("read backup meta file %q: %w", + fileName, err) + } + var desc backup.DistributedBackupDescriptor + if err := json.Unmarshal(bytes, &desc); err != nil { + return nil, fmt.Errorf("unmarshal backup meta file %q: %w", + path.Join(backupPath, file.Name()), err) + } + meta = append(meta, &desc) + } + } + } + return meta, nil +} + +func (m *Module) MetaInfo() (map[string]interface{}, error) { + metaInfo := make(map[string]interface{}) + metaInfo["backupsPath"] = m.backupsPath + return metaInfo, nil +} + +func (m *Module) makeBackupDirPath(path, id string) string { + return filepath.Join(path, id) +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.BackupBackend(New()) + _ = modulecapabilities.MetaProvider(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/backup-gcs/client.go b/platform/dbops/binaries/weaviate-src/modules/backup-gcs/client.go new file mode 100644 index 0000000000000000000000000000000000000000..1fa6fb5e5277b2b4427d179217ece4313c5068de --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/backup-gcs/client.go @@ -0,0 +1,388 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modstggcs + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "os" + "path" + "strings" + "time" + + "cloud.google.com/go/storage" + "github.com/googleapis/gax-go/v2" + "github.com/pkg/errors" + "golang.org/x/oauth2/google" + "google.golang.org/api/iterator" + "google.golang.org/api/option" + + "github.com/weaviate/weaviate/entities/backup" + ubak "github.com/weaviate/weaviate/usecases/backup" + "github.com/weaviate/weaviate/usecases/monitoring" +) + +type gcsClient struct { + client *storage.Client + config clientConfig + projectID string + dataPath string +} + +func newClient(ctx context.Context, config *clientConfig, dataPath string) (*gcsClient, error) { + options := []option.ClientOption{} + useAuth := strings.ToLower(os.Getenv("BACKUP_GCS_USE_AUTH")) != "false" + if useAuth { + scopes := []string{ + "https://www.googleapis.com/auth/devstorage.read_write", + } + creds, err := google.FindDefaultCredentials(ctx, scopes...) + if err != nil { + return nil, errors.Wrap(err, "find default credentials") + } + options = append(options, option.WithCredentials(creds)) + } else { + options = append(options, option.WithoutAuthentication()) + } + projectID := os.Getenv("GOOGLE_CLOUD_PROJECT") + if len(projectID) == 0 { + projectID = os.Getenv("GCLOUD_PROJECT") + if len(projectID) == 0 { + projectID = os.Getenv("GCP_PROJECT") + } + } + client, err := storage.NewClient(ctx, options...) + if err != nil { + return nil, errors.Wrap(err, "create client") + } + + client.SetRetry(storage.WithBackoff(gax.Backoff{ + Initial: 2 * time.Second, // Note: the client uses a jitter internally + Max: 60 * time.Second, + Multiplier: 3, + }), + storage.WithPolicy(storage.RetryAlways), + ) + return &gcsClient{client, *config, projectID, dataPath}, nil +} + +func (g *gcsClient) getObject(ctx context.Context, bucket *storage.BucketHandle, + objectName string, +) ([]byte, error) { + // Create bucket reader + obj := bucket.Object(objectName) + reader, err := obj.NewReader(ctx) + if err != nil { + if errors.Is(err, storage.ErrObjectNotExist) { + return nil, err + } + return nil, errors.Wrapf(err, "new reader: %v", objectName) + } + defer reader.Close() + + // Read file contents using io.Copy for better memory management + var buf bytes.Buffer + _, err = io.Copy(&buf, reader) + if err != nil { + return nil, errors.Wrapf(err, "read object: %v", objectName) + } + + content := buf.Bytes() + metric, err := monitoring.GetMetrics().BackupRestoreDataTransferred.GetMetricWithLabelValues(Name, "class") + if err == nil { + metric.Add(float64(len(content))) + } + return content, nil +} + +func (g *gcsClient) HomeDir(backupID, overrideBucket, overridePath string) string { + if overridePath == "" && overrideBucket == "" { + return "gs://" + path.Join(g.config.Bucket, + g.makeObjectName("", []string{backupID})) + } else { + return "gs://" + path.Join(overrideBucket, + g.makeObjectName(overridePath, []string{backupID})) + } +} + +func (g *gcsClient) AllBackups(ctx context.Context) ([]*backup.DistributedBackupDescriptor, error) { + var meta []*backup.DistributedBackupDescriptor + bucket, err := g.findBucket(ctx, "") + if err != nil { + return nil, fmt.Errorf("find bucket: %w", err) + } + + iter := bucket.Objects(ctx, &storage.Query{Prefix: g.config.BackupPath, MatchGlob: "**/" + ubak.GlobalBackupFile}) + for { + // Check context before each iteration + if err := ctx.Err(); err != nil { + return nil, err + } + + next, err := iter.Next() + if errors.Is(err, iterator.Done) { + break + } + if err != nil { + return nil, fmt.Errorf("get next object: %w", err) + } + + // mostly needed for testing on the emulator + if !strings.HasSuffix(next.Name, ubak.GlobalBackupFile) { + continue + } + + contents, err := g.getObject(ctx, bucket, next.Name) + if err != nil { + return nil, fmt.Errorf("read object %q: %w", next.Name, err) + } + var desc backup.DistributedBackupDescriptor + if err := json.Unmarshal(contents, &desc); err != nil { + return nil, fmt.Errorf("unmarshal object %q: %w", next.Name, err) + } + meta = append(meta, &desc) + } + + return meta, nil +} + +func (g *gcsClient) findBucket(ctx context.Context, bucketOverride string) (*storage.BucketHandle, error) { + b := g.config.Bucket + + if bucketOverride != "" { + b = bucketOverride + } + bucket := g.client.Bucket(b) + + if _, err := bucket.Attrs(ctx); err != nil { + return nil, fmt.Errorf("find bucket: %w", err) + } + + return bucket, nil +} + +func (g *gcsClient) makeObjectName(overridePath string, parts []string) string { + if overridePath != "" { + base := path.Join(parts...) + return path.Join(overridePath, base) + } else { + base := path.Join(parts...) + return path.Join(g.config.BackupPath, base) + } +} + +func (g *gcsClient) GetObject(ctx context.Context, backupID, key, overrideBucket, overridePath string) ([]byte, error) { + objectName := g.makeObjectName(overridePath, []string{backupID, key}) + + if err := ctx.Err(); err != nil { + return nil, backup.NewErrContextExpired(errors.Wrapf(err, "get object %s", objectName)) + } + + bucket, err := g.findBucket(ctx, overrideBucket) + if err != nil { + if errors.Is(err, storage.ErrBucketNotExist) { + return nil, backup.NewErrNotFound(errors.Wrapf(err, "get object %s", objectName)) + } + return nil, backup.NewErrInternal(errors.Wrapf(err, "get object %s", objectName)) + } + + contents, err := g.getObject(ctx, bucket, objectName) + if err != nil { + if errors.Is(err, storage.ErrObjectNotExist) { + return nil, backup.NewErrNotFound(errors.Wrapf(err, "get object %s", objectName)) + } + return nil, backup.NewErrInternal(errors.Wrapf(err, "get object %s", objectName)) + } + + return contents, nil +} + +func (g *gcsClient) PutObject(ctx context.Context, backupID, key, overrideBucket, overridePath string, byes []byte) error { + bucket, err := g.findBucket(ctx, overrideBucket) + if err != nil { + return errors.Wrap(err, "find bucket") + } + + objectName := g.makeObjectName(overridePath, []string{backupID, key}) + obj := bucket.Object(objectName) + writer := obj.NewWriter(ctx) + writer.ContentType = "application/octet-stream" + writer.Metadata = map[string]string{ + "backup-id": backupID, + } + if _, err := writer.Write(byes); err != nil { + return errors.Wrapf(err, "write file: %v", objectName) + } + if err := writer.Close(); err != nil { + return errors.Wrapf(err, "close writer for file: %v", objectName) + } + + metric, err := monitoring.GetMetrics().BackupStoreDataTransferred.GetMetricWithLabelValues("backup-gcs", "class") + if err == nil { + metric.Add(float64(len(byes))) + } + + return nil +} + +func (g *gcsClient) Initialize(ctx context.Context, backupID, overrideBucket, overridePath string) error { + key := "access-check" + + if err := g.PutObject(ctx, backupID, key, overrideBucket, overridePath, []byte("")); err != nil { + return errors.Wrapf(err, "failed to access-check gcs backup module %v %v %v %v", overrideBucket, overridePath, backupID, key) + } + + bucket, err := g.findBucket(ctx, overrideBucket) + if err != nil { + return errors.Wrap(err, "find bucket") + } + + objectName := g.makeObjectName(overridePath, []string{backupID, key}) + if err := bucket.Object(objectName).Delete(ctx); err != nil { + return errors.Wrapf(err, "failed to remove access-check gcs backup module %v", objectName) + } + + return nil +} + +// WriteToFile downloads an object and store its content in destPath +// The file destPath will be created if it doesn't exit +func (g *gcsClient) WriteToFile(ctx context.Context, backupID, key, destPath, overrideBucket, overridePath string) (err error) { + bucket, err := g.findBucket(ctx, overrideBucket) + if err != nil { + return fmt.Errorf("writetofile: find bucket: %w ", err) + } + + // validate destination path + if st, err := os.Stat(destPath); err == nil { + if st.IsDir() { + return fmt.Errorf("file is a directory") + } + } else if !os.IsNotExist(err) { + return err + } + + // create empty file + dir := path.Dir(destPath) + if err := os.MkdirAll(dir, os.ModePerm); err != nil { + return fmt.Errorf("os.mkdir for writetofile %q: %w", dir, err) + } + file, err := os.Create(destPath) + if err != nil { + return fmt.Errorf("os.create for writetofile %q: %w", destPath, err) + } + + // make sure to close and delete in case we return early + closeAndRemove := true + defer func() { + if closeAndRemove { + file.Close() + os.Remove(destPath) + } + }() + + // create reader + object := g.makeObjectName(overridePath, []string{backupID, key}) + rc, err := bucket.Object(object).NewReader(ctx) + if err != nil { + return fmt.Errorf("create reader for writetofile %q: %w", object, err) + } + defer rc.Close() + + // transfer content to the file + if _, err := io.Copy(file, rc); err != nil { + return fmt.Errorf("io.Copy for writetofile:%q %q: %w", destPath, object, err) + } + closeAndRemove = false + if err = file.Close(); err != nil { + return fmt.Errorf("f.Close for writetofile %q: %w", destPath, err) + } + + return nil +} + +func (g *gcsClient) Write(ctx context.Context, backupID, key, overrideBucket, overridePath string, r io.ReadCloser) (int64, error) { + defer r.Close() + + bucket, err := g.findBucket(ctx, overrideBucket) + if err != nil { + return 0, fmt.Errorf("write: find bucket: %w", err) + } + + // create a new writer + path := g.makeObjectName(overridePath, []string{backupID, key}) + writer := bucket.Object(path).NewWriter(ctx) + writer.ContentType = "application/octet-stream" + writer.Metadata = map[string]string{"backup-id": backupID} + + // if we return early make sure writer is closed + defer func() { + writer.Close() + }() + + // copy + written, err := io.Copy(writer, r) + if err != nil { + return 0, fmt.Errorf("io.copy for gcs write %q: %w", path, err) + } + + if metric, err := monitoring.GetMetrics().BackupStoreDataTransferred. + GetMetricWithLabelValues(Name, "class"); err == nil { + metric.Add(float64(written)) + } + return written, nil +} + +func (g *gcsClient) Read(ctx context.Context, backupID, key, overrideBucket, overridePath string, w io.WriteCloser) (int64, error) { + defer w.Close() + + bucket, err := g.findBucket(ctx, overrideBucket) + if err != nil { + err = fmt.Errorf("read: find bucker: %w", err) + if errors.Is(err, storage.ErrObjectNotExist) { + err = backup.NewErrNotFound(err) + } + return 0, err + } + + // create reader + path := g.makeObjectName(overridePath, []string{backupID, key}) + rc, err := bucket.Object(path).NewReader(ctx) + if err != nil { + err = fmt.Errorf("create reader %s: %w", path, err) + if errors.Is(err, storage.ErrObjectNotExist) { + err = backup.NewErrNotFound(err) + } + return 0, err + } + defer rc.Close() + + // copy + read, err := io.Copy(w, rc) + if err != nil { + return read, fmt.Errorf("io.copy for read %q: %w", path, err) + } + + if metric, err := monitoring.GetMetrics().BackupRestoreDataTransferred. + GetMetricWithLabelValues(Name, "class"); err == nil { + metric.Add(float64(float64(read))) + } + + return read, nil +} + +func (g *gcsClient) SourceDataPath() string { + return g.dataPath +} diff --git a/platform/dbops/binaries/weaviate-src/modules/backup-gcs/module.go b/platform/dbops/binaries/weaviate-src/modules/backup-gcs/module.go new file mode 100644 index 0000000000000000000000000000000000000000..1652e7ccaa95c793ed505cd7371ccaf034adf995 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/backup-gcs/module.go @@ -0,0 +1,110 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modstggcs + +import ( + "context" + "os" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" +) + +const ( + Name = "backup-gcs" + AltName1 = "gcs" + gcsBucket = "BACKUP_GCS_BUCKET" + + // this is an optional value, allowing for + // the backup to be stored in a specific + // directory inside the provided bucket. + // + // if left unset, the backup files will + // be stored directly in the root of the + // bucket. + gcsPath = "BACKUP_GCS_PATH" +) + +type clientConfig struct { + Bucket string + + // this is an optional value, allowing for + // the backup to be stored in a specific + // directory inside the provided bucket + BackupPath string +} + +type Module struct { + logger logrus.FieldLogger + *gcsClient + dataPath string +} + +func New() *Module { + return &Module{} +} + +func (m *Module) Name() string { + return Name +} + +func (m *Module) IsExternal() bool { + return true +} + +func (m *Module) AltNames() []string { + return []string{AltName1} +} + +func (m *Module) Type() modulecapabilities.ModuleType { + return modulecapabilities.Backup +} + +func (m *Module) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + m.dataPath = params.GetStorageProvider().DataPath() + + config := &clientConfig{ + Bucket: os.Getenv(gcsBucket), + BackupPath: os.Getenv(gcsPath), + } + if config.Bucket == "" { + return errors.Errorf("backup init: '%s' must be set", gcsBucket) + } + + client, err := newClient(ctx, config, m.dataPath) + if err != nil { + return errors.Wrap(err, "init gcs client") + } + m.gcsClient = client + return nil +} + +func (m *Module) MetaInfo() (map[string]interface{}, error) { + metaInfo := make(map[string]interface{}) + metaInfo["bucketName"] = m.config.Bucket + if root := m.config.BackupPath; root != "" { + metaInfo["rootName"] = root + } + return metaInfo, nil +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.BackupBackend(New()) + _ = modulecapabilities.MetaProvider(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/backup-s3/client.go b/platform/dbops/binaries/weaviate-src/modules/backup-s3/client.go new file mode 100644 index 0000000000000000000000000000000000000000..5408a0593482ee551e079dce53e610abce72b48a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/backup-s3/client.go @@ -0,0 +1,379 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modstgs3 + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "os" + "path" + "strings" + + "github.com/minio/minio-go/v7" + "github.com/minio/minio-go/v7/pkg/credentials" + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/weaviate/weaviate/entities/backup" + ubak "github.com/weaviate/weaviate/usecases/backup" + "github.com/weaviate/weaviate/usecases/modulecomponents" + "github.com/weaviate/weaviate/usecases/monitoring" +) + +const ( + // source : https://github.com/minio/minio-go/blob/master/api-put-object-common.go#L69 + // minio has min part size of 16MB + MINIO_MIN_PART_SIZE = 16 * 1024 * 1024 +) + +type s3Client struct { + client *minio.Client + config *clientConfig + logger logrus.FieldLogger + dataPath string + region string +} + +func newClient(config *clientConfig, logger logrus.FieldLogger, dataPath, bucket, path string) (*s3Client, error) { + region := os.Getenv("AWS_REGION") + if len(region) == 0 { + region = os.Getenv("AWS_DEFAULT_REGION") + } + + var creds *credentials.Credentials + if (os.Getenv("AWS_ACCESS_KEY_ID") != "" || os.Getenv("AWS_ACCESS_KEY") != "") && + (os.Getenv("AWS_SECRET_ACCESS_KEY") != "" || os.Getenv("AWS_SECRET_KEY") != "") { + creds = credentials.NewEnvAWS() + } else { + creds = credentials.NewIAM("") + // .Get() got deprecated with 7.0.83 + // and passing nil will use default context, + if _, err := creds.GetWithContext(nil); err != nil { + // can be anonymous access + creds = credentials.NewEnvAWS() + } + } + + client, err := minio.New(config.Endpoint, &minio.Options{ + Creds: creds, + Region: region, + Secure: config.UseSSL, + }) + if err != nil { + return nil, errors.Wrap(err, "create client") + } + return &s3Client{client, config, logger, dataPath, region}, nil +} + +func (s *s3Client) getClient(ctx context.Context) (*minio.Client, error) { + xAwsAccessKey := modulecomponents.GetValueFromContext(ctx, "X-AWS-ACCESS-KEY") + xAwsSecretKey := modulecomponents.GetValueFromContext(ctx, "X-AWS-SECRET-KEY") + xAwsSessionToken := modulecomponents.GetValueFromContext(ctx, "X-AWS-SESSION-TOKEN") + if xAwsAccessKey != "" && xAwsSecretKey != "" && xAwsSessionToken != "" { + return minio.New(s.config.Endpoint, &minio.Options{ + Creds: credentials.NewStaticV4(xAwsAccessKey, xAwsSecretKey, xAwsSessionToken), + Region: s.region, + Secure: s.config.UseSSL, + }) + } + return s.client, nil +} + +func (s *s3Client) makeObjectName(parts ...string) string { + base := path.Join(parts...) + return path.Join(s.config.BackupPath, base) +} + +func (s *s3Client) HomeDir(backupID, overrideBucket, overridePath string) string { + remoteBucket := s.config.Bucket + remotePath := s.config.BackupPath + + if overridePath != "" { + remotePath = path.Join(overridePath) + } + + if overrideBucket != "" { + remoteBucket = overrideBucket + } + + return "s3://" + path.Join(remoteBucket, remotePath, s.makeObjectName(backupID)) +} + +func (s *s3Client) AllBackups(ctx context.Context, +) ([]*backup.DistributedBackupDescriptor, error) { + var meta []*backup.DistributedBackupDescriptor + objectsInfo := s.client.ListObjects(ctx, + s.config.Bucket, + minio.ListObjectsOptions{ + Recursive: true, + Prefix: s.config.BackupPath, + }, + ) + + for info := range objectsInfo { + if err := ctx.Err(); err != nil { + return nil, err + } + + // Only process global backup files - this is the key filter + // This filters out all other files and only processes backup_config.json + if !strings.HasSuffix(info.Key, ubak.GlobalBackupFile) { + continue + } + + // Get the backup object + obj, err := s.client.GetObject(ctx, + s.config.Bucket, info.Key, minio.GetObjectOptions{}) + if err != nil { + return nil, fmt.Errorf("get object %q: %w", info.Key, err) + } + + // Ensure object is closed to prevent connection leaks + defer obj.Close() + + // Use a buffer to limit memory usage + var buf bytes.Buffer + _, err = io.Copy(&buf, obj) + if err != nil { + return nil, fmt.Errorf("read object %q: %w", info.Key, err) + } + + // Unmarshal the backup metadata + var desc backup.DistributedBackupDescriptor + if err := json.Unmarshal(buf.Bytes(), &desc); err != nil { + return nil, fmt.Errorf("unmarshal object %q: %w", info.Key, err) + } + + meta = append(meta, &desc) + } + + return meta, nil +} + +func (s *s3Client) GetObject(ctx context.Context, backupID, key, overrideBucket, overridePath string) ([]byte, error) { + client, err := s.getClient(ctx) + if err != nil { + return nil, errors.Wrap(err, "get object: failed to get client") + } + remotePath := s.makeObjectName(backupID, key) + + if overridePath != "" { + remotePath = path.Join(overridePath, backupID, key) + } + + bucket := s.config.Bucket + if overrideBucket != "" { + bucket = overrideBucket + } + + if err := ctx.Err(); err != nil { + return nil, backup.NewErrContextExpired(errors.Wrapf(err, "context expired in get object %s", remotePath)) + } + + obj, err := client.GetObject(ctx, bucket, remotePath, minio.GetObjectOptions{}) + if err != nil { + return nil, backup.NewErrInternal(errors.Wrapf(err, "get object %s", remotePath)) + } + + // Ensure object is closed to prevent connection leaks + defer obj.Close() + + // Use a buffer to limit memory usage + var buf bytes.Buffer + _, err = io.Copy(&buf, obj) + if err != nil { + var s3Err minio.ErrorResponse + if errors.As(err, &s3Err) && s3Err.StatusCode == http.StatusNotFound { + return nil, backup.NewErrNotFound(errors.Wrapf(err, "get object contents from %s:%s not found %s", bucket, remotePath, remotePath)) + } + return nil, backup.NewErrInternal(errors.Wrapf(err, "get object contents from %s:%s %s", bucket, remotePath, remotePath)) + } + + contents := buf.Bytes() + metric, err := monitoring.GetMetrics().BackupRestoreDataTransferred.GetMetricWithLabelValues(Name, "class") + if err == nil { + metric.Add(float64(len(contents))) + } + + return contents, nil +} + +func (s *s3Client) PutObject(ctx context.Context, backupID, key, overrideBucket, overridePath string, byes []byte) error { + client, err := s.getClient(ctx) + if err != nil { + return errors.Wrap(err, "put object: failed to get client") + } + + remotePath := s.makeObjectName(backupID, key) + opt := minio.PutObjectOptions{ + ContentType: "application/octet-stream", + PartSize: MINIO_MIN_PART_SIZE, + SendContentMd5: true, + } + reader := bytes.NewReader(byes) + objectSize := int64(len(byes)) + + if overridePath != "" { + remotePath = path.Join(overridePath, backupID, key) + } + + bucket := s.config.Bucket + if overrideBucket != "" { + bucket = overrideBucket + } + + _, err = client.PutObject(ctx, bucket, remotePath, reader, objectSize, opt) + if err != nil { + return backup.NewErrInternal( + errors.Wrapf(err, "put object: %s:%s", bucket, remotePath)) + } + + metric, err := monitoring.GetMetrics().BackupStoreDataTransferred.GetMetricWithLabelValues(Name, "class") + if err == nil { + metric.Add(float64(len(byes))) + } + return nil +} + +func (s *s3Client) Initialize(ctx context.Context, backupID, overrideBucket, overridePath string) error { + client, err := s.getClient(ctx) + if err != nil { + return errors.Wrap(err, "failed to get client") + } + + key := "access-check" + + if err := s.PutObject(ctx, backupID, key, overrideBucket, overridePath, []byte("")); err != nil { + return errors.Wrap(err, "failed to access-check s3 backup module") + } + + objectName := s.makeObjectName(backupID, key) + opt := minio.RemoveObjectOptions{} + if err := client.RemoveObject(ctx, s.config.Bucket, objectName, opt); err != nil { + return errors.Wrap(err, "failed to remove access-check s3 backup module") + } + + return nil +} + +// WriteFile downloads contents of an object to a local file destPath +func (s *s3Client) WriteToFile(ctx context.Context, backupID, key, destPath, overrideBucket, overridePath string) error { + client, err := s.getClient(ctx) + if err != nil { + return errors.Wrap(err, "write to file: cannot get client") + } + remotePath := s.makeObjectName(backupID, key) + if overridePath != "" { + remotePath = path.Join(overridePath, backupID, key) + } + + bucket := s.config.Bucket + if overrideBucket != "" { + bucket = overrideBucket + } + + err = client.FGetObject(ctx, bucket, remotePath, destPath, minio.GetObjectOptions{}) + if err != nil { + return fmt.Errorf("s3.FGetObject %q %q: %w", destPath, remotePath, err) + } + + if st, err := os.Stat(destPath); err == nil { + metric, err := monitoring.GetMetrics().BackupRestoreDataTransferred.GetMetricWithLabelValues(Name, "class") + if err == nil { + metric.Add(float64(st.Size())) + } + } + return nil +} + +func (s *s3Client) Write(ctx context.Context, backupID, key, overrideBucket, overridePath string, r io.ReadCloser) (int64, error) { + defer r.Close() + client, err := s.getClient(ctx) + if err != nil { + return -1, errors.Wrap(err, "write: cannot get client") + } + remotePath := s.makeObjectName(backupID, key) + opt := minio.PutObjectOptions{ + ContentType: "application/octet-stream", + DisableMultipart: false, + PartSize: MINIO_MIN_PART_SIZE, + SendContentMd5: true, + } + + if overridePath != "" { + remotePath = path.Join(overridePath, backupID, key) + } + + bucket := s.config.Bucket + if overrideBucket != "" { + bucket = overrideBucket + } + + info, err := client.PutObject(ctx, bucket, remotePath, r, -1, opt) + if err != nil { + return info.Size, fmt.Errorf("write object %q", remotePath) + } + + if metric, err := monitoring.GetMetrics().BackupStoreDataTransferred. + GetMetricWithLabelValues(Name, "class"); err == nil { + metric.Add(float64(float64(info.Size))) + } + return info.Size, nil +} + +func (s *s3Client) Read(ctx context.Context, backupID, key, overrideBucket, overridePath string, w io.WriteCloser) (int64, error) { + defer w.Close() + client, err := s.getClient(ctx) + if err != nil { + return -1, errors.Wrap(err, "read: cannot get client") + } + remotePath := s.makeObjectName(backupID, key) + + if overridePath != "" { + remotePath = path.Join(overridePath, backupID, key) + } + + bucket := s.config.Bucket + if overrideBucket != "" { + bucket = overrideBucket + } + + obj, err := client.GetObject(ctx, bucket, remotePath, minio.GetObjectOptions{}) + if err != nil { + return 0, fmt.Errorf("get object %q: %w", remotePath, err) + } + + read, err := io.Copy(w, obj) + if err != nil { + err = fmt.Errorf("get object %q: %w", remotePath, err) + var s3Err minio.ErrorResponse + if errors.As(err, &s3Err) && s3Err.StatusCode == http.StatusNotFound { + err = backup.NewErrNotFound(err) + } + return 0, err + } + + if metric, err := monitoring.GetMetrics().BackupRestoreDataTransferred. + GetMetricWithLabelValues(Name, "class"); err == nil { + metric.Add(float64(float64(read))) + } + + return read, nil +} + +func (s *s3Client) SourceDataPath() string { + return s.dataPath +} diff --git a/platform/dbops/binaries/weaviate-src/modules/backup-s3/config.go b/platform/dbops/binaries/weaviate-src/modules/backup-s3/config.go new file mode 100644 index 0000000000000000000000000000000000000000..099444a84841fc9f2807e186e1dabeeedcfada9c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/backup-s3/config.go @@ -0,0 +1,31 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modstgs3 + +type clientConfig struct { + Endpoint string + Bucket string + UseSSL bool + + // this is an optional value, allowing for + // the backup to be stored in a specific + // directory inside the provided bucket + BackupPath string +} + +func newConfig(endpoint, bucket, path string, useSSL bool) *clientConfig { + const DEFAULT_ENDPOINT = "s3.amazonaws.com" + if endpoint == "" { + endpoint = DEFAULT_ENDPOINT + } + return &clientConfig{endpoint, bucket, useSSL, path} +} diff --git a/platform/dbops/binaries/weaviate-src/modules/backup-s3/module.go b/platform/dbops/binaries/weaviate-src/modules/backup-s3/module.go new file mode 100644 index 0000000000000000000000000000000000000000..679f531f87ed84eb54fe6c9255b30e06b34c999d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/backup-s3/module.go @@ -0,0 +1,107 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modstgs3 + +import ( + "context" + "os" + "strings" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" +) + +const ( + Name = "backup-s3" + AltName1 = "s3" + s3Endpoint = "BACKUP_S3_ENDPOINT" + s3Bucket = "BACKUP_S3_BUCKET" + s3UseSSL = "BACKUP_S3_USE_SSL" + + // this is an optional value, allowing for + // the backup to be stored in a specific + // directory inside the provided bucket. + // + // if left unset, the backup files will + // be stored directly in the root of the + // bucket. + s3Path = "BACKUP_S3_PATH" +) + +type Module struct { + *s3Client + logger logrus.FieldLogger + dataPath string + bucket string + path string +} + +func New() *Module { + return &Module{} +} + +func (m *Module) Name() string { + return Name +} + +func (m *Module) IsExternal() bool { + return true +} + +func (m *Module) AltNames() []string { + return []string{AltName1} +} + +func (m *Module) Type() modulecapabilities.ModuleType { + return modulecapabilities.Backup +} + +func (m *Module) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + m.dataPath = params.GetStorageProvider().DataPath() + + bucket := os.Getenv(s3Bucket) + if bucket == "" { + return errors.Errorf("backup init: '%s' must be set", s3Bucket) + } + // SSL on by default + useSSL := strings.ToLower(os.Getenv(s3UseSSL)) != "false" + config := newConfig(os.Getenv(s3Endpoint), bucket, os.Getenv(s3Path), useSSL) + client, err := newClient(config, m.logger, m.dataPath, m.bucket, m.path) + if err != nil { + return errors.Wrap(err, "initialize S3 backup module") + } + m.s3Client = client + return nil +} + +func (m *Module) MetaInfo() (map[string]interface{}, error) { + metaInfo := make(map[string]interface{}, 4) + metaInfo["endpoint"] = m.config.Endpoint + metaInfo["bucketName"] = m.config.Bucket + if root := m.config.BackupPath; root != "" { + metaInfo["rootName"] = root + } + metaInfo["useSSL"] = m.config.UseSSL + return metaInfo, nil +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.BackupBackend(New()) + _ = modulecapabilities.MetaProvider(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/clients/anthropic.go b/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/clients/anthropic.go new file mode 100644 index 0000000000000000000000000000000000000000..023f750a64dc78facea377be17dfa897b5d70fed --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/clients/anthropic.go @@ -0,0 +1,328 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + "github.com/weaviate/weaviate/usecases/modulecomponents/generative" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/generative-anthropic/config" + anthropicparams "github.com/weaviate/weaviate/modules/generative-anthropic/parameters" +) + +type anthropic struct { + apiKey string + httpClient *http.Client + logger logrus.FieldLogger +} + +func New(apiKey string, timeout time.Duration, logger logrus.FieldLogger) *anthropic { + return &anthropic{ + apiKey: apiKey, + httpClient: &http.Client{ + Timeout: timeout, + }, + logger: logger, + } +} + +func (a *anthropic) GenerateSingleResult(ctx context.Context, properties *modulecapabilities.GenerateProperties, prompt string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + forPrompt, err := generative.MakeSinglePrompt(generative.Text(properties), prompt) + if err != nil { + return nil, err + } + return a.generate(ctx, cfg, forPrompt, generative.Blobs([]*modulecapabilities.GenerateProperties{properties}), options, debug) +} + +func (a *anthropic) GenerateAllResults(ctx context.Context, properties []*modulecapabilities.GenerateProperties, task string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + texts := generative.Texts(properties) + forTask, err := generative.MakeTaskPrompt(texts, task) + if err != nil { + return nil, err + } + return a.generate(ctx, cfg, forTask, generative.Blobs(properties), options, debug) +} + +func (a *anthropic) generate(ctx context.Context, cfg moduletools.ClassConfig, prompt string, imageProperties []map[string]*string, options interface{}, debug bool) (*modulecapabilities.GenerateResponse, error) { + params := a.getParameters(cfg, options, imageProperties) + debugInformation := a.getDebugInformation(debug, prompt) + + anthropicURL, err := a.getAnthropicURL(ctx, params.BaseURL) + if err != nil { + return nil, errors.Wrap(err, "get anthropic url") + } + + var content interface{} + if len(params.Images) > 0 { + var promptWithImage contentImageInput + for i := range params.Images { + promptWithImage = append(promptWithImage, contentText{ + Type: "text", + Text: fmt.Sprintf("Image %d:", i+1), + }) + promptWithImage = append(promptWithImage, contentImage{ + Type: "image", + Source: contentSource{ + Type: "base64", + MediaType: "image/jpeg", + Data: params.Images[i], + }, + }) + } + promptWithImage = append(promptWithImage, contentText{ + Type: "text", + Text: prompt, + }) + content = promptWithImage + } else { + content = prompt + } + + input := generateInput{ + Messages: []message{ + { + Role: "user", + Content: content, + }, + }, + Model: params.Model, + MaxTokens: params.MaxTokens, + StopSequences: params.StopSequences, + Temperature: params.Temperature, + TopK: params.TopK, + TopP: params.TopP, + } + + body, err := json.Marshal(input) + if err != nil { + return nil, errors.Wrap(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", anthropicURL, + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + apiKey, err := a.getAPIKey(ctx) + if err != nil { + return nil, errors.Wrap(err, "Anthropic API key") + } + + req.Header.Add("x-api-key", apiKey) + req.Header.Add("Content-Type", "application/json") + req.Header.Add("Request-Source", "unspecified:weaviate") + req.Header.Add("anthropic-version", "2023-06-01") + + res, err := a.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "do POST request") + } + + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + var resBody generateResponse + + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if res.StatusCode != 200 && resBody.Type == "error" { + return nil, fmt.Errorf("Anthropic API error: %s - %s", resBody.Error.Type, resBody.Error.Message) + } + + textResponse := resBody.Content[0].Text + return &modulecapabilities.GenerateResponse{ + Result: &textResponse, + Debug: debugInformation, + Params: a.getResponseParams(resBody.Usage), + }, nil +} + +func (a *anthropic) getParameters(cfg moduletools.ClassConfig, options interface{}, imagePropertiesArray []map[string]*string) anthropicparams.Params { + settings := config.NewClassSettings(cfg) + + var params anthropicparams.Params + if p, ok := options.(anthropicparams.Params); ok { + params = p + } + + if params.BaseURL == "" { + params.BaseURL = settings.BaseURL() + } + if params.Model == "" { + params.Model = settings.Model() + } + if params.Temperature == nil { + temperature := settings.Temperature() + params.Temperature = &temperature + } + if params.TopK == nil { + topK := settings.TopK() + params.TopK = &topK + } + if params.TopP == nil { + topP := settings.TopP() + params.TopP = &topP + } + if len(params.StopSequences) == 0 { + params.StopSequences = settings.StopSequences() + } + if params.MaxTokens == nil { + // respect module config settings + maxTokens := settings.MaxTokens() + if maxTokens == nil { + // fallback to default values + maxTokens = settings.GetMaxTokensForModel(params.Model) + } + params.MaxTokens = maxTokens + } + + params.Images = generative.ParseImageProperties(params.Images, params.ImageProperties, imagePropertiesArray) + + return params +} + +func (a *anthropic) getDebugInformation(debug bool, prompt string) *modulecapabilities.GenerateDebugInformation { + if debug { + return &modulecapabilities.GenerateDebugInformation{ + Prompt: prompt, + } + } + return nil +} + +func (a *anthropic) getResponseParams(usage *usage) map[string]interface{} { + if usage != nil { + return map[string]interface{}{anthropicparams.Name: map[string]interface{}{"usage": usage}} + } + return nil +} + +func GetResponseParams(result map[string]interface{}) *responseParams { + if params, ok := result[anthropicparams.Name].(map[string]interface{}); ok { + if usage, ok := params["usage"].(*usage); ok { + return &responseParams{Usage: usage} + } + } + return nil +} + +func (a *anthropic) getAnthropicURL(ctx context.Context, baseURL string) (string, error) { + passedBaseURL := baseURL + if headerBaseURL := modulecomponents.GetValueFromContext(ctx, "X-Anthropic-Baseurl"); headerBaseURL != "" { + passedBaseURL = headerBaseURL + } + return url.JoinPath(passedBaseURL, "/v1/messages") +} + +func (a *anthropic) getAPIKey(ctx context.Context) (string, error) { + if apiKey := modulecomponents.GetValueFromContext(ctx, "X-Anthropic-Api-Key"); apiKey != "" { + return apiKey, nil + } + if a.apiKey != "" { + return a.apiKey, nil + } + return "", errors.New("no api key found for Anthropic " + + "neither in request header: X-Anthropic-Api-Key " + + "nor in the environment variable under ANTHROPIC_APIKEY") +} + +type generateInput struct { + Messages []message `json:"messages,omitempty"` + Model string `json:"model,omitempty"` + MaxTokens *int `json:"max_tokens,omitempty"` + StopSequences []string `json:"stop_sequences,omitempty"` + Temperature *float64 `json:"temperature,omitempty"` + TopK *int `json:"top_k,omitempty"` + TopP *float64 `json:"top_p,omitempty"` +} + +type message struct { + Role string `json:"role"` + Content interface{} `json:"content"` +} + +type contentImageInput []interface{} + +type contentText struct { + Type string `json:"type"` + Text string `json:"text"` +} + +type contentImage struct { + Type string `json:"type"` + Source contentSource `json:"source"` +} + +type contentSource struct { + Type string `json:"type"` + MediaType string `json:"media_type"` + Data *string `json:"data,omitempty"` +} + +type generateResponse struct { + Type string `json:"type"` + Error errorMessage `json:"error,omitempty"` + ID string `json:"id,omitempty"` + Role string `json:"role,omitempty"` + Content []content `json:"content,omitempty"` + Model string `json:"model,omitempty"` + StopReason StopReason `json:"stop_reason,omitempty"` + StopSequence string `json:"stop_sequence,omitempty"` + Usage *usage `json:"usage,omitempty"` +} + +type content struct { + Type string `json:"type"` + Text string `json:"text"` +} + +type StopReason string + +const ( + EndTurn StopReason = "end_turn" + MaxTokens StopReason = "max_tokens" + StopSequence StopReason = "stop_sequence" + ToolUse StopReason = "tool_use" +) + +type usage struct { + InputTokens int `json:"input_tokens,omitempty"` + OutputTokens int `json:"output_tokens,omitempty"` +} + +type errorMessage struct { + Type string `json:"type"` + Message string `json:"message"` +} + +type responseParams struct { + Usage *usage `json:"usage,omitempty"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/clients/anthropic_meta.go b/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/clients/anthropic_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..59902ff3b9897ed03618c11f04efcae496fe12a4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/clients/anthropic_meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (a *anthropic) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Generative Search - Anthropic", + "documentationHref": "https://docs.anthropic.com/en/api/getting-started", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/clients/anthropic_meta_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/clients/anthropic_meta_test.go new file mode 100644 index 0000000000000000000000000000000000000000..cc07ee46a76232001cfa6d7a66aeca244341d61c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/clients/anthropic_meta_test.go @@ -0,0 +1,36 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestAnthropicMetaInfo(t *testing.T) { + t.Run("when getting meta info for Anthropic", func(t *testing.T) { + a := &anthropic{} + meta, err := a.MetaInfo() + + assert.Nil(t, err) + assert.NotNil(t, meta) + + name, ok := meta["name"] + assert.True(t, ok) + assert.Equal(t, "Generative Search - Anthropic", name) + + documentationHref, ok := meta["documentationHref"] + assert.True(t, ok) + assert.Equal(t, "https://docs.anthropic.com/en/api/getting-started", documentationHref) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/clients/anthropic_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/clients/anthropic_test.go new file mode 100644 index 0000000000000000000000000000000000000000..185f6c35986f327d6435405a3c8fc57e6b6c3a10 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/clients/anthropic_test.go @@ -0,0 +1,200 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} + +func TestGenerate(t *testing.T) { + properties := []*modulecapabilities.GenerateProperties{{Text: map[string]string{"prop": "My name is John"}}} + + tests := []struct { + name string + answer generateResponse + timeout time.Duration + expectedResult string + }{ + { + name: "when the server has a successful answer", + answer: generateResponse{ + Content: []content{{Text: "John"}}, + }, + expectedResult: "John", + }, + { + name: "when the server has an error", + answer: generateResponse{ + Error: errorMessage{Type: "error", Message: "some error from the server"}, + }, + }, + { + name: "when the server does not respond in time", + answer: generateResponse{}, + timeout: time.Millisecond, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + handler := &testAnthropicHandler{ + t: t, + answer: test.answer, + timeout: test.timeout, + } + server := httptest.NewServer(handler) + defer server.Close() + + a := New("apiKey", test.timeout, nullLogger()) + + settings := &fakeClassConfig{baseURL: server.URL} + res, err := a.GenerateAllResults(context.Background(), properties, "What is my name?", nil, false, settings) + + if len(test.answer.Content) == 0 { + assert.Error(t, err) + assert.Nil(t, res) + } else { + assert.NoError(t, err) + assert.Equal(t, test.expectedResult, *res.Result) + } + }) + } + + t.Run("when X-Anthropic-BaseURL header is passed", func(t *testing.T) { + a := New("apiKey", 5*time.Second, nullLogger()) + + baseURL := "http://default-url.com" + ctxWithValue := context.WithValue(context.Background(), + "X-Anthropic-Baseurl", []string{"http://base-url-passed-in-header.com"}) + + buildURL, err := a.getAnthropicURL(ctxWithValue, baseURL) + require.NoError(t, err) + assert.Equal(t, "http://base-url-passed-in-header.com/v1/messages", buildURL) + + buildURL, err = a.getAnthropicURL(context.TODO(), baseURL) + require.NoError(t, err) + assert.Equal(t, "http://default-url.com/v1/messages", buildURL) + }) +} + +type testAnthropicHandler struct { + t *testing.T + answer generateResponse + timeout time.Duration +} + +func (f *testAnthropicHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/v1/messages", r.URL.String()) + assert.Equal(f.t, http.MethodPost, r.Method) + + time.Sleep(f.timeout) + + if len(f.answer.Content) == 0 { + w.WriteHeader(http.StatusInternalServerError) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b generateInput + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + outBytes, err := json.Marshal(f.answer) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +type fakeClassConfig struct { + baseURL string +} + +func (cfg *fakeClassConfig) Tenant() string { + return "" +} + +func (cfg *fakeClassConfig) Class() map[string]interface{} { + return nil +} + +func (cfg *fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + settings := map[string]interface{}{ + "baseURL": cfg.baseURL, + } + return settings +} + +func (cfg *fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} + +// Add more methods to implement the moduletools.ClassConfig interface +func (f fakeClassConfig) BaseURL() string { + return f.baseURL +} + +func (f fakeClassConfig) Model() string { + return "claude-2" +} + +func (f fakeClassConfig) MaxTokens() int { + return 100 +} + +func (f fakeClassConfig) StopSequences() []string { + return []string{} +} + +func (f fakeClassConfig) Temperature() float64 { + return 0.7 +} + +func (f fakeClassConfig) K() int { + return 0 +} + +func (f fakeClassConfig) P() float64 { + return 1.0 +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/config.go b/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/config.go new file mode 100644 index 0000000000000000000000000000000000000000..ea47649553ef157a40385123e47c2f36395510b1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/config.go @@ -0,0 +1,41 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativeanthropic + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/generative-anthropic/config" +) + +func (m *GenerativeAnthropicModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeAnthropicModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeAnthropicModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := config.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/config/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/config/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..eebbb0061378abfd63deb4248f5ddc1caab7d675 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/config/class_settings.go @@ -0,0 +1,139 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + baseURLProperty = "baseURL" + modelProperty = "model" + maxTokensProperty = "maxTokens" + stopSequencesProperty = "stopSequences" + temperatureProperty = "temperature" + topKProperty = "topK" + topPProperty = "topP" +) + +// todo: anthropic make a distinction between input and output tokens +// so have a context window and a max output tokens, while the max +// input tokens for all models is 200000, the max output tokens is shorter at +// 4096 +var defaultMaxTokens = map[string]int{ + "claude-3-5-sonnet-20240620": 4096, + "claude-3-opus-20240229": 4096, + "claude-3-sonnet-20240229": 4096, + "claude-3-haiku-20240307": 4096, + "claude-3-5-sonnet-20241022": 8192, + "claude-3-5-haiku-20241022": 8192, +} + +var ( + DefaultBaseURL = "https://api.anthropic.com" + DefaultAnthropicModel = "claude-3-5-sonnet-20240620" + DefaultAnthropicTemperature = 1.0 + // DefaultAnthropicMaxTokens - 4096 is the max output tokens, input tokens are typically much larger + DefaultAnthropicMaxTokens = 4096 + DefaultAnthropicTopK = 0 + DefaultAnthropicTopP = 0.0 + DefaultAnthropicStopSequences = []string{} +) + +type classSettings struct { + cfg moduletools.ClassConfig + propertyValuesHelper basesettings.PropertyValuesHelper +} + +// NewClassSettings creates a new classSettings instance +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, propertyValuesHelper: basesettings.NewPropertyValuesHelper("generative-anthropic")} +} + +func (ic *classSettings) Validate(class *models.Class) error { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + return nil +} + +func (ic *classSettings) getStringProperty(property string, defaultValue string) *string { + asString := ic.propertyValuesHelper.GetPropertyAsStringWithNotExists(ic.cfg, property, "", defaultValue) + return &asString +} + +func (ic *classSettings) getIntProperty(name string, defaultValue *int) *int { + wrongVal := -1 + return ic.propertyValuesHelper.GetPropertyAsIntWithNotExists(ic.cfg, name, &wrongVal, defaultValue) +} + +func (ic *classSettings) getFloatProperty(name string, defaultValue *float64) *float64 { + wrongVal := float64(-1.0) + return ic.propertyValuesHelper.GetPropertyAsFloat64WithNotExists(ic.cfg, name, &wrongVal, defaultValue) +} + +func (ic *classSettings) getListOfStringsProperty(name string, defaultValue []string) *[]string { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return &defaultValue + } + + model, ok := ic.cfg.ClassByModuleName("generative-anthropic")[name] + if ok { + asStringList, ok := model.([]string) + if ok { + return &asStringList + } + var empty []string + return &empty + } + return &defaultValue +} + +func (ic *classSettings) GetMaxTokensForModel(model string) *int { + if maxTokens, ok := defaultMaxTokens[model]; ok { + return &maxTokens + } + return &DefaultAnthropicMaxTokens +} + +func (ic *classSettings) BaseURL() string { + return *ic.getStringProperty(baseURLProperty, DefaultBaseURL) +} + +func (ic *classSettings) Model() string { + return *ic.getStringProperty(modelProperty, DefaultAnthropicModel) +} + +func (ic *classSettings) MaxTokens() *int { + return ic.getIntProperty(maxTokensProperty, nil) +} + +func (ic *classSettings) Temperature() float64 { + return *ic.getFloatProperty(temperatureProperty, &DefaultAnthropicTemperature) +} + +func (ic *classSettings) TopK() int { + return *ic.getIntProperty(topKProperty, &DefaultAnthropicTopK) +} + +func (ic *classSettings) TopP() float64 { + return *ic.getFloatProperty(topPProperty, &DefaultAnthropicTopP) +} + +func (ic *classSettings) StopSequences() []string { + return *ic.getListOfStringsProperty(stopSequencesProperty, DefaultAnthropicStopSequences) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/config/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/config/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..68e0bb7a381102511a2f218a9b537164682300be --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/config/class_settings_test.go @@ -0,0 +1,158 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_classSettings_Validate(t *testing.T) { + tests := []struct { + name string + cfg moduletools.ClassConfig + wantModel string + wantMaxTokens *int + wantTemperature float64 + wantTopK int + wantTopP float64 + wantStopSequences []string + wantBaseURL string + wantErr error + }{ + { + name: "default settings", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{}, + }, + wantModel: "claude-3-5-sonnet-20240620", + wantMaxTokens: nil, + wantTemperature: 1.0, + wantTopK: 0, + wantTopP: 0.0, + wantStopSequences: []string{}, + wantBaseURL: "https://api.anthropic.com", + wantErr: nil, + }, + { + name: "everything non default configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "claude-3-opus-20240229", + "maxTokens": 3000, + "temperature": 0.7, + "topK": 5, + "topP": 0.9, + "stopSequences": []string{"stop1", "stop2"}, + "baseURL": "https://custom.anthropic.api", + }, + }, + wantModel: "claude-3-opus-20240229", + wantMaxTokens: ptrInt(3000), + wantTemperature: 0.7, + wantTopK: 5, + wantTopP: 0.9, + wantStopSequences: []string{"stop1", "stop2"}, + wantBaseURL: "https://custom.anthropic.api", + wantErr: nil, + }, + { + name: "new model name configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "some-new-model-name", + }, + }, + wantModel: "some-new-model-name", + wantMaxTokens: nil, + wantTemperature: 1.0, + wantTopK: 0, + wantTopP: 0.0, + wantStopSequences: []string{}, + wantBaseURL: "https://api.anthropic.com", + wantErr: nil, + }, + { + name: "default settings with claude-3-haiku-20240307", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "claude-3-haiku-20240307", + }, + }, + wantModel: "claude-3-haiku-20240307", + wantMaxTokens: nil, + wantTemperature: 1.0, + wantTopK: 0, + wantTopP: 0.0, + wantStopSequences: []string{}, + wantBaseURL: "https://api.anthropic.com", + wantErr: nil, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.cfg) + if tt.wantErr != nil { + assert.Equal(t, tt.wantErr.Error(), ic.Validate(nil).Error()) + } else { + assert.NoError(t, ic.Validate(nil)) + assert.Equal(t, tt.wantModel, ic.Model()) + assert.Equal(t, tt.wantMaxTokens, ic.MaxTokens()) + assert.Equal(t, tt.wantTemperature, ic.Temperature()) + assert.Equal(t, tt.wantTopK, ic.TopK()) + assert.Equal(t, tt.wantTopP, ic.TopP()) + assert.Equal(t, tt.wantStopSequences, ic.StopSequences()) + assert.Equal(t, tt.wantBaseURL, ic.BaseURL()) + } + }) + } +} + +type fakeClassConfig struct { + classConfig map[string]interface{} +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} + +func ptrInt(in int) *int { + return &in +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/module.go b/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/module.go new file mode 100644 index 0000000000000000000000000000000000000000..e64e5a1732d777a40dc781065bd88b94e3f530e1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/module.go @@ -0,0 +1,87 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativeanthropic + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/generative-anthropic/clients" + "github.com/weaviate/weaviate/modules/generative-anthropic/parameters" +) + +const Name = "generative-anthropic" + +func New() *GenerativeAnthropicModule { + return &GenerativeAnthropicModule{} +} + +type GenerativeAnthropicModule struct { + generative generativeClient + additionalPropertiesProvider map[string]modulecapabilities.GenerativeProperty +} + +type generativeClient interface { + modulecapabilities.GenerativeClient + MetaInfo() (map[string]interface{}, error) +} + +func (m *GenerativeAnthropicModule) Name() string { + return Name +} + +func (m *GenerativeAnthropicModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextGenerative +} + +func (m *GenerativeAnthropicModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrap(err, "init q/a") + } + + return nil +} + +func (m *GenerativeAnthropicModule) initAdditional(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("ANTHROPIC_APIKEY") + + client := clients.New(apiKey, timeout, logger) + + m.generative = client + m.additionalPropertiesProvider = parameters.AdditionalGenerativeParameters(m.generative) + + return nil +} + +func (m *GenerativeAnthropicModule) MetaInfo() (map[string]interface{}, error) { + return m.generative.MetaInfo() +} + +func (m *GenerativeAnthropicModule) AdditionalGenerativeProperties() map[string]modulecapabilities.GenerativeProperty { + return m.additionalPropertiesProvider +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.AdditionalGenerativeProperties(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/parameters/graphql.go b/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/parameters/graphql.go new file mode 100644 index 0000000000000000000000000000000000000000..a731caa382a80061d50d7eef6ef26fef9fa60c7d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/parameters/graphql.go @@ -0,0 +1,101 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "fmt" + + "github.com/tailor-inc/graphql" +) + +func input(prefix string) *graphql.InputObjectFieldConfig { + return &graphql.InputObjectFieldConfig{ + Description: fmt.Sprintf("%s settings", Name), + Type: graphql.NewInputObject(graphql.InputObjectConfig{ + Name: fmt.Sprintf("%s%sInputObject", prefix, Name), + Fields: graphql.InputObjectConfigFieldMap{ + "baseURL": &graphql.InputObjectFieldConfig{ + Description: "baseURL", + Type: graphql.String, + }, + "model": &graphql.InputObjectFieldConfig{ + Description: "model", + Type: graphql.String, + }, + "temperature": &graphql.InputObjectFieldConfig{ + Description: "temperature", + Type: graphql.Float, + }, + "maxTokens": &graphql.InputObjectFieldConfig{ + Description: "maxTokens", + Type: graphql.Int, + }, + "stopSequences": &graphql.InputObjectFieldConfig{ + Description: "stopSequences", + Type: graphql.NewList(graphql.String), + }, + "topP": &graphql.InputObjectFieldConfig{ + Description: "topP", + Type: graphql.Float, + }, + "topK": &graphql.InputObjectFieldConfig{ + Description: "topK", + Type: graphql.Int, + }, + "images": &graphql.InputObjectFieldConfig{ + Description: "images", + Type: graphql.NewList(graphql.String), + }, + "imageProperties": &graphql.InputObjectFieldConfig{ + Description: "imageProperties", + Type: graphql.NewList(graphql.String), + }, + }, + }), + DefaultValue: nil, + } +} + +func output(prefix string) *graphql.Field { + return &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sFields", prefix, Name), + Fields: graphql.Fields{ + "type": &graphql.Field{Type: graphql.String}, + "error": &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sErrorFields", prefix, Name), + Fields: graphql.Fields{ + "type": &graphql.Field{Type: graphql.String}, + "message": &graphql.Field{Type: graphql.String}, + }, + })}, + "id": &graphql.Field{Type: graphql.String}, + "role": &graphql.Field{Type: graphql.String}, + "content": &graphql.Field{Type: graphql.NewList(graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sContentFields", prefix, Name), + Fields: graphql.Fields{ + "type": &graphql.Field{Type: graphql.String}, + "text": &graphql.Field{Type: graphql.String}, + }, + }))}, + "model": &graphql.Field{Type: graphql.String}, + "stopReason": &graphql.Field{Type: graphql.String}, + "stopSequence": &graphql.Field{Type: graphql.String}, + "usage": &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sUsageFields", prefix, Name), + Fields: graphql.Fields{ + "inputTokens": &graphql.Field{Type: graphql.Int}, + "outputTokens": &graphql.Field{Type: graphql.Int}, + }, + })}, + }, + })} +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/parameters/params.go b/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/parameters/params.go new file mode 100644 index 0000000000000000000000000000000000000000..49132e1bd93fcce24be2c1f052dd9d6064990862 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/parameters/params.go @@ -0,0 +1,61 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/usecases/modulecomponents/gqlparser" +) + +type Params struct { + BaseURL string + Model string + Temperature *float64 + MaxTokens *int + StopSequences []string + TopP *float64 + TopK *int + Images []*string + ImageProperties []string +} + +func extract(field *ast.ObjectField) interface{} { + out := Params{} + fields, ok := field.Value.GetValue().([]*ast.ObjectField) + if ok { + for _, f := range fields { + switch f.Name.Value { + case "baseURL": + out.BaseURL = gqlparser.GetValueAsStringOrEmpty(f) + case "model": + out.Model = gqlparser.GetValueAsStringOrEmpty(f) + case "temperature": + out.Temperature = gqlparser.GetValueAsFloat64(f) + case "maxTokens": + out.MaxTokens = gqlparser.GetValueAsInt(f) + case "stopSequences": + out.StopSequences = gqlparser.GetValueAsStringArray(f) + case "topP": + out.TopP = gqlparser.GetValueAsFloat64(f) + case "topK": + out.TopK = gqlparser.GetValueAsInt(f) + case "images": + out.Images = gqlparser.GetValueAsStringPtrArray(f) + case "imageProperties": + out.ImageProperties = gqlparser.GetValueAsStringArray(f) + default: + // do nothing + } + } + } + return out +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/parameters/provider.go b/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/parameters/provider.go new file mode 100644 index 0000000000000000000000000000000000000000..7cc511ec0b108122bb5a3f1447caac1f2cda1a1d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-anthropic/parameters/provider.go @@ -0,0 +1,22 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import "github.com/weaviate/weaviate/entities/modulecapabilities" + +const Name = "anthropic" + +func AdditionalGenerativeParameters(client modulecapabilities.GenerativeClient) map[string]modulecapabilities.GenerativeProperty { + return map[string]modulecapabilities.GenerativeProperty{ + Name: {Client: client, RequestParamsFunction: input, ResponseParamsFunction: output, ExtractRequestParamsFunction: extract}, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/clients/anyscale.go b/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/clients/anyscale.go new file mode 100644 index 0000000000000000000000000000000000000000..d05b74c355a009c04a53ea28698f0a554eee2f49 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/clients/anyscale.go @@ -0,0 +1,211 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + "github.com/weaviate/weaviate/usecases/modulecomponents/generative" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/generative-anyscale/config" + anyscaleparams "github.com/weaviate/weaviate/modules/generative-anyscale/parameters" +) + +type anyscale struct { + apiKey string + httpClient *http.Client + logger logrus.FieldLogger +} + +func New(apiKey string, timeout time.Duration, logger logrus.FieldLogger) *anyscale { + return &anyscale{ + apiKey: apiKey, + httpClient: &http.Client{ + Timeout: timeout, + }, + logger: logger, + } +} + +func (v *anyscale) GenerateSingleResult(ctx context.Context, properties *modulecapabilities.GenerateProperties, prompt string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + forPrompt, err := generative.MakeSinglePrompt(generative.Text(properties), prompt) + if err != nil { + return nil, err + } + return v.Generate(ctx, cfg, forPrompt, options, debug) +} + +func (v *anyscale) GenerateAllResults(ctx context.Context, properties []*modulecapabilities.GenerateProperties, task string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + forTask, err := generative.MakeTaskPrompt(generative.Texts(properties), task) + if err != nil { + return nil, err + } + return v.Generate(ctx, cfg, forTask, options, debug) +} + +func (v *anyscale) getParameters(cfg moduletools.ClassConfig, options interface{}) anyscaleparams.Params { + settings := config.NewClassSettings(cfg) + + var params anyscaleparams.Params + if p, ok := options.(anyscaleparams.Params); ok { + params = p + } + if params.BaseURL == "" { + baseURL := settings.BaseURL() + params.BaseURL = baseURL + } + if params.Model == "" { + model := settings.Model() + params.Model = model + } + if params.Temperature == nil { + temperature := settings.Temperature() + params.Temperature = &temperature + } + return params +} + +func (v *anyscale) Generate(ctx context.Context, cfg moduletools.ClassConfig, prompt string, options interface{}, debug bool) (*modulecapabilities.GenerateResponse, error) { + params := v.getParameters(cfg, options) + debugInformation := v.getDebugInformation(debug, prompt) + + anyscaleUrl := v.getAnyscaleUrl(ctx, params.BaseURL) + anyscalePrompt := []map[string]string{ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": prompt}, + } + input := generateInput{ + Messages: anyscalePrompt, + Model: params.Model, + Temperature: params.Temperature, + } + + body, err := json.Marshal(input) + if err != nil { + return nil, errors.Wrap(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", anyscaleUrl, + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + apiKey, err := v.getApiKey(ctx) + if err != nil { + return nil, errors.Wrapf(err, "Anyscale (OpenAI) API Key") + } + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", apiKey)) + req.Header.Add("Content-Type", "application/json") + + res, err := v.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + var resBody generateResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if res.StatusCode != 200 || resBody.Error != nil { + if resBody.Error != nil { + return nil, errors.Errorf("connection to Anyscale API failed with status: %d error: %v", res.StatusCode, resBody.Error.Message) + } + return nil, errors.Errorf("connection to Anyscale API failed with status: %d", res.StatusCode) + } + + textResponse := resBody.Choices[0].Message.Content + + return &modulecapabilities.GenerateResponse{ + Result: &textResponse, + Debug: debugInformation, + }, nil +} + +func (v *anyscale) getAnyscaleUrl(ctx context.Context, baseURL string) string { + passedBaseURL := baseURL + if headerBaseURL := modulecomponents.GetValueFromContext(ctx, "X-Anyscale-Baseurl"); headerBaseURL != "" { + passedBaseURL = headerBaseURL + } + return fmt.Sprintf("%s/v1/chat/completions", passedBaseURL) +} + +func (v *anyscale) getApiKey(ctx context.Context) (string, error) { + // note Anyscale uses the OpenAI API Key in it's requests. + if apiKey := modulecomponents.GetValueFromContext(ctx, "X-Anyscale-Api-Key"); apiKey != "" { + return apiKey, nil + } + if v.apiKey != "" { + return v.apiKey, nil + } + return "", errors.New("no api key found " + + "neither in request header: X-Anyscale-Api-Key " + + "nor in environment variable under ANYSCALE_APIKEY") +} + +func (v *anyscale) getDebugInformation(debug bool, prompt string) *modulecapabilities.GenerateDebugInformation { + if debug { + return &modulecapabilities.GenerateDebugInformation{ + Prompt: prompt, + } + } + return nil +} + +type generateInput struct { + Model string `json:"model"` + Messages []map[string]string `json:"messages"` + Temperature *float64 `json:"temperature,omitempty"` +} + +type Message struct { + Role string `json:"role"` + Content string `json:"content"` +} + +type Choice struct { + Message Message `json:"message"` + Index int `json:"index"` + FinishReason string `json:"finish_reason"` +} + +// The entire response for an error ends up looking different, may want to add omitempty everywhere. +type generateResponse struct { + ID string `json:"id"` + Object string `json:"object"` + Created int64 `json:"created"` + Model string `json:"model"` + Choices []Choice `json:"choices"` + Usage map[string]int `json:"usage"` + Error *anyscaleApiError `json:"error,omitempty"` +} + +type anyscaleApiError struct { + Message string `json:"message"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/clients/anyscale_meta.go b/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/clients/anyscale_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..86ebcea70feb544a5e29fa213da1e428f63275d7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/clients/anyscale_meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *anyscale) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Generative Search - Anyscale", + "documentationHref": "https://docs.anyscale.com/endpoints/overview", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/clients/anyscale_meta_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/clients/anyscale_meta_test.go new file mode 100644 index 0000000000000000000000000000000000000000..cf0ac1cc1d73e1e37f7116eb39568c4e1cf4d18f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/clients/anyscale_meta_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestGetMeta(t *testing.T) { + t.Run("when the server is providing meta", func(t *testing.T) { + server := httptest.NewServer(&testMetaHandler{t: t}) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + meta, err := c.MetaInfo() + + assert.Nil(t, err) + assert.NotNil(t, meta) + metaModel := meta["name"] + assert.True(t, metaModel != nil) + documentationHref := meta["documentationHref"] + assert.True(t, documentationHref != nil) + }) +} + +type testMetaHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testMetaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/meta", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.Write([]byte(f.metaInfo())) +} + +func (f *testMetaHandler) metaInfo() string { + return `{ + "hostname": "http://127.0.0.1:8080", + "modules": { + "generative-cohere": { + "documentationHref": URL_HERE", + "name": "Cohere Generative Module" + } + }, + "version": "1.16.0" +}` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/clients/anyscale_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/clients/anyscale_test.go new file mode 100644 index 0000000000000000000000000000000000000000..7505be85bf9da9faf8b8a0a6b37bbab00729707d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/clients/anyscale_test.go @@ -0,0 +1,167 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} + +func TestGetAnswer(t *testing.T) { + textProperties := []*modulecapabilities.GenerateProperties{{Text: map[string]string{"prop": "My name is john"}}} + + tests := []struct { + name string + answer generateResponse + timeout time.Duration + expectedResult string + }{ + { + name: "when the server has a successful aner", + answer: generateResponse{ + Choices: []Choice{{Message: Message{Content: "John"}}}, + Error: nil, + }, + expectedResult: "John", + }, + { + name: "when the server has a an error", + answer: generateResponse{ + Error: &anyscaleApiError{ + Message: "some error from the server", + }, + }, + }, + { + name: "when the server does not respond in time", + answer: generateResponse{Error: &anyscaleApiError{Message: "context deadline exceeded"}}, + timeout: time.Second, + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + handler := &testAnswerHandler{ + t: t, + answer: test.answer, + timeout: test.timeout, + } + server := httptest.NewServer(handler) + defer server.Close() + + c := New("apiKey", test.timeout, nullLogger()) + + cfg := &fakeClassConfig{baseURL: server.URL} + res, err := c.GenerateAllResults(context.Background(), textProperties, "What is my name?", nil, false, cfg) + + if test.answer.Error != nil { + assert.Contains(t, err.Error(), test.answer.Error.Message) + } else { + assert.Equal(t, test.expectedResult, *res.Result) + } + }) + } + + t.Run("when X-Anyscale-BaseURL header is passed", func(t *testing.T) { + c := New("apiKey", 5*time.Second, nullLogger()) + baseUrl := "https://api.endpoints.anyscale.com" + buildURL := c.getAnyscaleUrl(context.Background(), baseUrl) + assert.Equal(t, "https://api.endpoints.anyscale.com/v1/chat/completions", buildURL) + }) +} + +type testAnswerHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + answer generateResponse + timeout time.Duration +} + +func (f *testAnswerHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/v1/chat/completions", r.URL.String()) + assert.Equal(f.t, http.MethodPost, r.Method) + + time.Sleep(f.timeout) + + if f.answer.Error != nil && f.answer.Error.Message != "" { + outBytes, err := json.Marshal(f.answer) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b map[string]interface{} + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + outBytes, err := json.Marshal(f.answer) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +type fakeClassConfig struct { + baseURL string +} + +func (cfg *fakeClassConfig) Tenant() string { + return "" +} + +func (cfg *fakeClassConfig) Class() map[string]interface{} { + return nil +} + +func (cfg *fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + settings := map[string]interface{}{ + "baseURL": cfg.baseURL, + } + return settings +} + +func (cfg *fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/config.go b/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/config.go new file mode 100644 index 0000000000000000000000000000000000000000..ae8d5bb5d7fa4ae4538dd124d0b87034cfbcccec --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/config.go @@ -0,0 +1,41 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativeanyscale + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/generative-anyscale/config" +) + +func (m *GenerativeAnyscaleModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeAnyscaleModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeAnyscaleModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := config.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/config/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/config/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..5a1efd80457c60ed79a3240fe521250ba89257c1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/config/class_settings.go @@ -0,0 +1,89 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + baseURLProperty = "baseURL" + modelProperty = "model" + temperatureProperty = "temperature" +) + +var availableAnyscaleModels = []string{ + "meta-llama/Llama-2-70b-chat-hf", + "meta-llama/Llama-2-13b-chat-hf", + "meta-llama/Llama-2-7b-chat-hf", + "codellama/CodeLlama-34b-Instruct-hf", + "mistralai/Mistral-7B-Instruct-v0.1", + "mistralai/Mixtral-8x7B-Instruct-v0.1", +} + +// note we might want to separate the baseURL and completions URL in the future. Fine-tuned models also use this URL. 12/3/23 +var ( + DefaultBaseURL = "https://api.endpoints.anyscale.com" + DefaultAnyscaleModel = "meta-llama/Llama-2-70b-chat-hf" + DefaultAnyscaleTemperature float64 = 0 +) + +type classSettings struct { + cfg moduletools.ClassConfig + propertyValuesHelper basesettings.PropertyValuesHelper +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, propertyValuesHelper: basesettings.NewPropertyValuesHelper("generative-anyscale")} +} + +func (ic *classSettings) Validate(class *models.Class) error { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + model := ic.getStringProperty(modelProperty, DefaultAnyscaleModel) + if model == nil || !ic.validateModel(*model) { + return errors.Errorf("wrong Anyscale model name, available model names are: %v", availableAnyscaleModels) + } + + return nil +} + +func (ic *classSettings) getStringProperty(name, defaultValue string) *string { + asString := ic.propertyValuesHelper.GetPropertyAsStringWithNotExists(ic.cfg, name, "", defaultValue) + return &asString +} + +func (ic *classSettings) getFloat64Property(name string, defaultValue *float64) *float64 { + var wrongVal float64 = -1 + return ic.propertyValuesHelper.GetPropertyAsFloat64WithNotExists(ic.cfg, name, &wrongVal, defaultValue) +} + +func (ic *classSettings) validateModel(model string) bool { + return basesettings.ValidateSetting(model, availableAnyscaleModels) +} + +func (ic *classSettings) BaseURL() string { + return *ic.getStringProperty(baseURLProperty, DefaultBaseURL) +} + +func (ic *classSettings) Model() string { + return *ic.getStringProperty(modelProperty, DefaultAnyscaleModel) +} + +func (ic *classSettings) Temperature() float64 { + return *ic.getFloat64Property(temperatureProperty, &DefaultAnyscaleTemperature) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/config/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/config/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..358eaf03a3f1104e6e6e48827ebe7a1a5df172c3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/config/class_settings_test.go @@ -0,0 +1,127 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_classSettings_Validate(t *testing.T) { + tests := []struct { + name string + cfg moduletools.ClassConfig + wantModel string + wantTemperature float64 + wantBaseURL string + wantErr error + }{ + { + name: "default settings", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{}, + }, + wantModel: "meta-llama/Llama-2-70b-chat-hf", + wantTemperature: 0, + wantBaseURL: "https://api.endpoints.anyscale.com", + wantErr: nil, + }, + { + name: "everything non default configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "meta-llama/Llama-2-70b-chat-hf", + "temperature": 1, + }, + }, + wantModel: "meta-llama/Llama-2-70b-chat-hf", + wantTemperature: 1, + wantBaseURL: "https://api.endpoints.anyscale.com", + wantErr: nil, + }, + { + name: "everything non default configured and base url", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "meta-llama/Llama-2-70b-chat-hf", + "temperature": 1, + "baseURL": "https://custom.endpoint.com", + }, + }, + wantModel: "meta-llama/Llama-2-70b-chat-hf", + wantTemperature: 1, + wantBaseURL: "https://custom.endpoint.com", + wantErr: nil, + }, + { + name: "unsupported model", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "unsupported", + "temperature": 1, + "baseURL": "https://custom.endpoint.com", + }, + }, + wantErr: errors.New("wrong Anyscale model name, available model names are: [meta-llama/Llama-2-70b-chat-hf meta-llama/Llama-2-13b-chat-hf meta-llama/Llama-2-7b-chat-hf codellama/CodeLlama-34b-Instruct-hf mistralai/Mistral-7B-Instruct-v0.1 mistralai/Mixtral-8x7B-Instruct-v0.1]"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.cfg) + if tt.wantErr != nil { + assert.Equal(t, tt.wantErr.Error(), ic.Validate(nil).Error()) + } else { + assert.NoError(t, ic.Validate(nil)) + assert.Equal(t, tt.wantBaseURL, ic.BaseURL()) + assert.Equal(t, tt.wantModel, ic.Model()) + assert.Equal(t, tt.wantTemperature, ic.Temperature()) + } + }) + } +} + +type fakeClassConfig struct { + classConfig map[string]interface{} +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/module.go b/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/module.go new file mode 100644 index 0000000000000000000000000000000000000000..e85d9e765052e4b11760c40af68d39d5504274e1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/module.go @@ -0,0 +1,86 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativeanyscale + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/generative-anyscale/clients" + "github.com/weaviate/weaviate/modules/generative-anyscale/parameters" +) + +const Name = "generative-anyscale" + +func New() *GenerativeAnyscaleModule { + return &GenerativeAnyscaleModule{} +} + +type GenerativeAnyscaleModule struct { + generative generativeClient + additionalPropertiesProvider map[string]modulecapabilities.GenerativeProperty +} + +type generativeClient interface { + modulecapabilities.GenerativeClient + MetaInfo() (map[string]interface{}, error) +} + +func (m *GenerativeAnyscaleModule) Name() string { + return Name +} + +func (m *GenerativeAnyscaleModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextGenerative +} + +func (m *GenerativeAnyscaleModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrapf(err, "init %s", Name) + } + + return nil +} + +func (m *GenerativeAnyscaleModule) initAdditional(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("ANYSCALE_APIKEY") + + client := clients.New(apiKey, timeout, logger) + m.generative = client + m.additionalPropertiesProvider = parameters.AdditionalGenerativeParameters(client) + + return nil +} + +func (m *GenerativeAnyscaleModule) MetaInfo() (map[string]interface{}, error) { + return m.generative.MetaInfo() +} + +func (m *GenerativeAnyscaleModule) AdditionalGenerativeProperties() map[string]modulecapabilities.GenerativeProperty { + return m.additionalPropertiesProvider +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.AdditionalGenerativeProperties(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/parameters/graphql.go b/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/parameters/graphql.go new file mode 100644 index 0000000000000000000000000000000000000000..3d0142afa3290b29c347272d01ed34b252351f73 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/parameters/graphql.go @@ -0,0 +1,42 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "fmt" + + "github.com/tailor-inc/graphql" +) + +func input(prefix string) *graphql.InputObjectFieldConfig { + return &graphql.InputObjectFieldConfig{ + Description: fmt.Sprintf("%s settings", Name), + Type: graphql.NewInputObject(graphql.InputObjectConfig{ + Name: fmt.Sprintf("%s%sInputObject", prefix, Name), + Fields: graphql.InputObjectConfigFieldMap{ + "baseURL": &graphql.InputObjectFieldConfig{ + Description: "baseURL", + Type: graphql.String, + }, + "model": &graphql.InputObjectFieldConfig{ + Description: "model", + Type: graphql.String, + }, + "temperature": &graphql.InputObjectFieldConfig{ + Description: "temperature", + Type: graphql.Float, + }, + }, + }), + DefaultValue: nil, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/parameters/params.go b/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/parameters/params.go new file mode 100644 index 0000000000000000000000000000000000000000..3512653f54291a9c2204f70c20df476115447444 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/parameters/params.go @@ -0,0 +1,43 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/usecases/modulecomponents/gqlparser" +) + +type Params struct { + BaseURL string + Model string + Temperature *float64 +} + +func extract(field *ast.ObjectField) interface{} { + out := Params{} + fields, ok := field.Value.GetValue().([]*ast.ObjectField) + if ok { + for _, f := range fields { + switch f.Name.Value { + case "baseURL": + out.BaseURL = gqlparser.GetValueAsStringOrEmpty(f) + case "model": + out.Model = gqlparser.GetValueAsStringOrEmpty(f) + case "temperature": + out.Temperature = gqlparser.GetValueAsFloat64(f) + default: + // do nothing + } + } + } + return out +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/parameters/provider.go b/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/parameters/provider.go new file mode 100644 index 0000000000000000000000000000000000000000..de986649735abbc0b9b63731afdb67c135117e61 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-anyscale/parameters/provider.go @@ -0,0 +1,22 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import "github.com/weaviate/weaviate/entities/modulecapabilities" + +const Name = "anyscale" + +func AdditionalGenerativeParameters(client modulecapabilities.GenerativeClient) map[string]modulecapabilities.GenerativeProperty { + return map[string]modulecapabilities.GenerativeProperty{ + Name: {Client: client, RequestParamsFunction: input, ResponseParamsFunction: nil, ExtractRequestParamsFunction: extract}, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-aws/clients/aws.go b/platform/dbops/binaries/weaviate-src/modules/generative-aws/clients/aws.go new file mode 100644 index 0000000000000000000000000000000000000000..be4a56b4085b6420b97a8871b8732da7d22bab12 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-aws/clients/aws.go @@ -0,0 +1,836 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "strings" + "time" + + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/credentials" + "github.com/aws/aws-sdk-go-v2/service/bedrockruntime" + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + generativeconfig "github.com/weaviate/weaviate/modules/generative-aws/config" + awsparams "github.com/weaviate/weaviate/modules/generative-aws/parameters" + "github.com/weaviate/weaviate/usecases/modulecomponents" + generativecomponents "github.com/weaviate/weaviate/usecases/modulecomponents/generative" +) + +func buildBedrockUrl(service, region, model string) string { + urlTemplate := "https://%s.%s.amazonaws.com/model/%s/invoke" + return fmt.Sprintf(urlTemplate, fmt.Sprintf("%s-runtime", service), region, model) +} + +func buildSagemakerUrl(service, region, endpoint string) string { + urlTemplate := "https://runtime.%s.%s.amazonaws.com/endpoints/%s/invocations" + return fmt.Sprintf(urlTemplate, service, region, endpoint) +} + +type awsClient struct { + awsAccessKey string + awsSecretKey string + awsSessionToken string + buildBedrockUrlFn func(service, region, model string) string + buildSagemakerUrlFn func(service, region, endpoint string) string + httpClient *http.Client + logger logrus.FieldLogger +} + +func New(awsAccessKey, awsSecretKey, awsSessionToken string, timeout time.Duration, logger logrus.FieldLogger) *awsClient { + return &awsClient{ + awsAccessKey: awsAccessKey, + awsSecretKey: awsSecretKey, + awsSessionToken: awsSessionToken, + httpClient: &http.Client{ + Timeout: timeout, + }, + buildBedrockUrlFn: buildBedrockUrl, + buildSagemakerUrlFn: buildSagemakerUrl, + logger: logger, + } +} + +func (v *awsClient) GenerateSingleResult(ctx context.Context, properties *modulecapabilities.GenerateProperties, prompt string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + forPrompt, err := generativecomponents.MakeSinglePrompt(generativecomponents.Text(properties), prompt) + if err != nil { + return nil, err + } + return v.Generate(ctx, cfg, forPrompt, generativecomponents.Blobs([]*modulecapabilities.GenerateProperties{properties}), options, debug) +} + +func (v *awsClient) GenerateAllResults(ctx context.Context, properties []*modulecapabilities.GenerateProperties, task string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + forTask, err := generativecomponents.MakeTaskPrompt(generativecomponents.Texts(properties), task) + if err != nil { + return nil, err + } + return v.Generate(ctx, cfg, forTask, generativecomponents.Blobs(properties), options, debug) +} + +func (v *awsClient) Generate(ctx context.Context, cfg moduletools.ClassConfig, prompt string, imageProperties []map[string]*string, options interface{}, debug bool) (*modulecapabilities.GenerateResponse, error) { + params := v.getParameters(cfg, options, imageProperties) + service := params.Service + debugInformation := v.getDebugInformation(debug, prompt) + + accessKey, err := v.getAwsAccessKey(ctx) + if err != nil { + return nil, errors.Wrapf(err, "AWS Access Key") + } + secretKey, err := v.getAwsAccessSecret(ctx) + if err != nil { + return nil, errors.Wrapf(err, "AWS Secret Key") + } + awsSessionToken, err := v.getAwsSessionToken(ctx) + if err != nil { + return nil, err + } + maxRetries := 5 + + if v.isBedrock(service) { + return v.sendBedrockRequest(ctx, + prompt, + accessKey, secretKey, awsSessionToken, maxRetries, + params, + cfg, + debugInformation, + ) + } else if v.isSagemaker(service) { + var body []byte + var endpointUrl string + var host string + var path string + var err error + + region := params.Region + endpoint := params.Endpoint + targetModel := params.TargetModel + targetVariant := params.TargetVariant + + endpointUrl = v.buildSagemakerUrlFn(service, region, endpoint) + host = "runtime." + service + "." + region + ".amazonaws.com" + path = "/endpoints/" + endpoint + "/invocations" + + headers := map[string]string{ + "accept": "*/*", + "content-type": contentType, + } + + if targetModel != "" { + headers["x-amzn-sagemaker-target-model"] = targetModel + } + if targetVariant != "" { + headers["x-amzn-sagemaker-target-variant"] = targetVariant + } + body, err = json.Marshal(sagemakerGenerateRequest{ + Prompt: prompt, + }) + if err != nil { + return nil, errors.Wrapf(err, "marshal body") + } + headers["host"] = host + amzDate, headers, authorizationHeader := getAuthHeader(accessKey, secretKey, host, service, region, path, body, headers) + headers["Authorization"] = authorizationHeader + headers["x-amz-date"] = amzDate + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, endpointUrl, bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + + for k, v := range headers { + req.Header.Set(k, v) + } + + res, err := v.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + return v.parseSagemakerResponse(bodyBytes, res) + } else { + return &modulecapabilities.GenerateResponse{ + Result: nil, + Debug: debugInformation, + }, nil + } +} + +func (v *awsClient) getDebugInformation(debug bool, prompt string) *modulecapabilities.GenerateDebugInformation { + if debug { + return &modulecapabilities.GenerateDebugInformation{ + Prompt: prompt, + } + } + return nil +} + +func (v *awsClient) getParameters(cfg moduletools.ClassConfig, options interface{}, imagePropertiesArray []map[string]*string) awsparams.Params { + settings := generativeconfig.NewClassSettings(cfg) + + service := settings.Service() + var params awsparams.Params + if p, ok := options.(awsparams.Params); ok { + params = p + } + + if params.Service == "" { + params.Service = settings.Service() + } + if params.Region == "" { + params.Region = settings.Region() + } + if params.Endpoint == "" { + params.Endpoint = settings.Endpoint() + } + if params.TargetModel == "" { + params.TargetModel = settings.TargetModel() + } + if params.TargetVariant == "" { + params.TargetVariant = settings.TargetVariant() + } + if params.Model == "" { + params.Model = settings.Model() + } + if params.Temperature == nil { + temperature := settings.Temperature(service, params.Model) + params.Temperature = temperature + } + + params.Images = generativecomponents.ParseImageProperties(params.Images, params.ImageProperties, imagePropertiesArray) + + return params +} + +func (v *awsClient) sendBedrockRequest( + ctx context.Context, + prompt string, + awsKey, awsSecret, awsSessionToken string, + maxRetries int, + params awsparams.Params, + cfg moduletools.ClassConfig, + debugInformation *modulecapabilities.GenerateDebugInformation, +) (*modulecapabilities.GenerateResponse, error) { + model := params.Model + region := params.Region + req, err := v.createRequestBody(prompt, params, cfg) + if err != nil { + return nil, fmt.Errorf("failed to create request for model %s: %w", model, err) + } + + body, err := json.Marshal(req) + if err != nil { + return nil, fmt.Errorf("failed to marshal request for model %s: %w", model, err) + } + + sdkConfig, err := config.LoadDefaultConfig(ctx, + config.WithRegion(region), + config.WithCredentialsProvider( + credentials.NewStaticCredentialsProvider(awsKey, awsSecret, awsSessionToken), + ), + config.WithRetryMaxAttempts(maxRetries), + ) + if err != nil { + return nil, fmt.Errorf("failed to load AWS configuration: %w", err) + } + + client := bedrockruntime.NewFromConfig(sdkConfig) + result, err := client.InvokeModel(ctx, &bedrockruntime.InvokeModelInput{ + ModelId: aws.String(model), + ContentType: aws.String("application/json"), + Accept: aws.String("application/json"), + Body: body, + }) + if err != nil { + errMsg := err.Error() + if strings.Contains(errMsg, "no such host") { + return nil, fmt.Errorf("Bedrock service is not available in the selected region. " + + "Please double-check the service availability for your region at " + + "https://aws.amazon.com/about-aws/global-infrastructure/regional-product-services/") + } else if strings.Contains(errMsg, "Could not resolve the foundation model") { + return nil, fmt.Errorf("could not resolve the foundation model from model identifier: \"%v\". "+ + "Please verify that the requested model exists and is accessible within the specified region", model) + } else { + return nil, fmt.Errorf("couldn't invoke %s model: %w", model, err) + } + } + + return v.parseBedrockResponse(result.Body, model, debugInformation) +} + +func (v *awsClient) createRequestBody(prompt string, params awsparams.Params, cfg moduletools.ClassConfig) (interface{}, error) { + settings := generativeconfig.NewClassSettings(cfg) + model := params.Model + service := settings.Service() + if v.isAmazonTitanModel(model) { + return bedrockAmazonGenerateRequest{ + InputText: prompt, + }, nil + } else if v.isAmazonNovaModel(model) { + var content []bedrockAmazonNovaContent + for i := range params.Images { + content = append(content, bedrockAmazonNovaContent{ + Image: &bedrockAmazonNovaContentImage{ + Format: "jpg", + Source: bedrockAmazonNovaContentImageSource{ + Bytes: params.Images[i], + }, + }, + }) + } + content = append(content, bedrockAmazonNovaContent{ + Text: &prompt, + }) + return bedrockAmazonNovaRequest{ + Messages: []bedrockAmazonNovaMessage{ + { + Role: "user", + Content: content, + }, + }, + InferenceConfig: &bedrockAmazonNovaInferenceConfig{ + Temperature: params.Temperature, + }, + }, nil + } else if v.isAnthropicClaude3Model(model) { + var content []bedrockAnthropicClaude3Content + for i := range params.Images { + imageName := fmt.Sprintf("Image %d:", i+1) + content = append(content, bedrockAnthropicClaude3Content{ + Type: "text", + Text: &imageName, + }) + content = append(content, bedrockAnthropicClaude3Content{ + Type: "image", + Source: &bedrockAnthropicClaudeV3Source{ + ContentType: "base64", + MediaType: "image/jpeg", + Data: params.Images[i], + }, + }) + } + content = append(content, bedrockAnthropicClaude3Content{ + Type: "text", + Text: &prompt, + }) + return bedrockAnthropicClaude3Request{ + AnthropicVersion: "bedrock-2023-05-31", + MaxTokens: settings.MaxTokenCount(service, model), + Messages: []bedrockAnthropicClaude3Message{ + { + Role: "user", + Content: content, + }, + }, + }, nil + } else if v.isAnthropicModel(model) { + var builder strings.Builder + builder.WriteString("\n\nHuman: ") + builder.WriteString(prompt) + builder.WriteString("\n\nAssistant:") + return bedrockAnthropicGenerateRequest{ + Prompt: builder.String(), + Temperature: params.Temperature, + MaxTokensToSample: settings.MaxTokenCount(service, model), + StopSequences: settings.StopSequences(service, model), + TopK: settings.TopK(service, model), + TopP: settings.TopP(service, model), + AnthropicVersion: "bedrock-2023-05-31", + }, nil + } else if v.isAI21Model(model) { + return bedrockAI21GenerateRequest{ + Prompt: prompt, + Temperature: params.Temperature, + MaxTokens: settings.MaxTokenCount(service, model), + TopP: settings.TopP(service, model), + StopSequences: settings.StopSequences(service, model), + }, nil + } else if v.isCohereCommandRModel(model) { + return bedrockCohereCommandRRequest{ + Message: prompt, + }, nil + } else if v.isCohereModel(model) { + return bedrockCohereRequest{ + Prompt: prompt, + Temperature: params.Temperature, + MaxTokens: settings.MaxTokenCount(service, model), + // ReturnLikeliHood: "GENERATION", // contray to docs, this is invalid + }, nil + } else if v.isMistralAIModel(model) { + return bedrockMistralAIRequest{ + Prompt: fmt.Sprintf("[INST] %s [/INST]", prompt), + Temperature: params.Temperature, + MaxTokens: settings.MaxTokenCount(service, model), + }, nil + } else if v.isMetaModel(model) { + return bedrockMetaRequest{ + Prompt: prompt, + Temperature: params.Temperature, + MaxGenLen: settings.MaxTokenCount(service, model), + }, nil + } + return nil, fmt.Errorf("unspported model: %s", model) +} + +func (v *awsClient) parseBedrockResponse(bodyBytes []byte, + model string, + debug *modulecapabilities.GenerateDebugInformation, +) (*modulecapabilities.GenerateResponse, error) { + content, err := v.getBedrockResponseMessage(model, bodyBytes) + if err != nil { + return nil, err + } + + if content != "" { + return &modulecapabilities.GenerateResponse{ + Result: &content, + Debug: debug, + }, nil + } + + return &modulecapabilities.GenerateResponse{ + Result: nil, + Debug: debug, + }, nil +} + +func (v *awsClient) getBedrockResponseMessage(model string, bodyBytes []byte) (string, error) { + var content string + var resBodyMap map[string]interface{} + if err := json.Unmarshal(bodyBytes, &resBodyMap); err != nil { + return "", errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if v.isCohereCommandRModel(model) { + var resBody bedrockCohereCommandRResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return "", errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + return resBody.Text, nil + } else if v.isAnthropicClaude3Model(model) { + var resBody bedrockAnthropicClaude3Response + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return "", errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + if len(resBody.Content) > 0 && resBody.Content[0].Text != nil { + return *resBody.Content[0].Text, nil + } + return "", fmt.Errorf("no message from model: %s", model) + } else if v.isAnthropicModel(model) { + var resBody bedrockAnthropicClaudeResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return "", errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + return resBody.Completion, nil + } else if v.isAI21Model(model) { + var resBody bedrockAI21Response + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return "", errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + if len(resBody.Completions) > 0 { + return resBody.Completions[0].Data.Text, nil + } + return "", fmt.Errorf("no message from model: %s", model) + } else if v.isMistralAIModel(model) { + var resBody bedrockMistralAIResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return "", errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + if len(resBody.Outputs) > 0 { + return resBody.Outputs[0].Text, nil + } + return "", fmt.Errorf("no message from model: %s", model) + } else if v.isMetaModel(model) { + var resBody bedrockMetaResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return "", errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + return resBody.Generation, nil + } else if v.isAmazonNovaModel(model) { + var resBody bedrockNovaResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return "", errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + if len(resBody.Output.Message.Content) > 0 { + return resBody.Output.Message.Content[0].Text, nil + } + return "", nil + } + + var resBody bedrockGenerateResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return "", errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if len(resBody.Results) == 0 && len(resBody.Generations) == 0 { + return "", fmt.Errorf("received empty response from AWS Bedrock") + } + + if len(resBody.Results) > 0 && len(resBody.Results[0].CompletionReason) > 0 { + content = resBody.Results[0].OutputText + } else if len(resBody.Generations) > 0 { + content = resBody.Generations[0].Text + } + + return content, nil +} + +func (v *awsClient) parseSagemakerResponse(bodyBytes []byte, res *http.Response) (*modulecapabilities.GenerateResponse, error) { + var resBody sagemakerGenerateResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if res.StatusCode != 200 || resBody.Message != nil { + if resBody.Message != nil { + return nil, fmt.Errorf("connection to AWS Sagemaker failed with status: %v error: %s", + res.StatusCode, *resBody.Message) + } + return nil, fmt.Errorf("connection to AWS Sagemaker failed with status: %d", res.StatusCode) + } + + if len(resBody.Generations) == 0 { + return nil, fmt.Errorf("received empty response from AWS Sagemaker") + } + + if len(resBody.Generations) > 0 && len(resBody.Generations[0].Id) > 0 { + content := resBody.Generations[0].Text + if content != "" { + return &modulecapabilities.GenerateResponse{ + Result: &content, + }, nil + } + } + return &modulecapabilities.GenerateResponse{ + Result: nil, + }, nil +} + +func (v *awsClient) isSagemaker(service string) bool { + return service == "sagemaker" +} + +func (v *awsClient) isBedrock(service string) bool { + return service == "bedrock" +} + +func (v *awsClient) getAwsAccessKey(ctx context.Context) (string, error) { + if awsAccessKey := modulecomponents.GetValueFromContext(ctx, "X-Aws-Access-Key"); awsAccessKey != "" { + return awsAccessKey, nil + } + if v.awsAccessKey != "" { + return v.awsAccessKey, nil + } + return "", errors.New("no access key found " + + "neither in request header: X-AWS-Access-Key " + + "nor in environment variable under AWS_ACCESS_KEY_ID or AWS_ACCESS_KEY") +} + +func (v *awsClient) getAwsAccessSecret(ctx context.Context) (string, error) { + if awsSecret := modulecomponents.GetValueFromContext(ctx, "X-Aws-Secret-Key"); awsSecret != "" { + return awsSecret, nil + } + if v.awsSecretKey != "" { + return v.awsSecretKey, nil + } + return "", errors.New("no secret found " + + "neither in request header: X-Aws-Secret-Key " + + "nor in environment variable under AWS_SECRET_ACCESS_KEY or AWS_SECRET_KEY") +} + +func (v *awsClient) getAwsSessionToken(ctx context.Context) (string, error) { + if awsSessionToken := modulecomponents.GetValueFromContext(ctx, "X-Aws-Session-Token"); awsSessionToken != "" { + return awsSessionToken, nil + } + if v.awsSessionToken != "" { + return v.awsSessionToken, nil + } + return "", nil +} + +func (v *awsClient) isAmazonTitanModel(model string) bool { + return strings.HasPrefix(model, "amazon.titan") +} + +func (v *awsClient) isAmazonNovaModel(model string) bool { + return strings.HasPrefix(model, "amazon.nova") +} + +func (v *awsClient) isAI21Model(model string) bool { + return strings.HasPrefix(model, "ai21") +} + +func (v *awsClient) isAnthropicModel(model string) bool { + return strings.HasPrefix(model, "anthropic") +} + +func (v *awsClient) isAnthropicClaude3Model(model string) bool { + return strings.HasPrefix(model, "anthropic.claude-3") +} + +func (v *awsClient) isCohereModel(model string) bool { + return strings.HasPrefix(model, "cohere") +} + +func (v *awsClient) isCohereCommandRModel(model string) bool { + return strings.HasPrefix(model, "cohere.command-r") +} + +func (v *awsClient) isMistralAIModel(model string) bool { + return strings.HasPrefix(model, "mistral") +} + +func (v *awsClient) isMetaModel(model string) bool { + return strings.HasPrefix(model, "meta") +} + +type bedrockAmazonGenerateRequest struct { + InputText string `json:"inputText,omitempty"` + TextGenerationConfig *textGenerationConfig `json:"textGenerationConfig,omitempty"` +} + +type bedrockAmazonNovaRequest struct { + Messages []bedrockAmazonNovaMessage `json:"messages,omitempty"` + InferenceConfig *bedrockAmazonNovaInferenceConfig `json:"inferenceConfig,omitempty"` +} + +type bedrockAmazonNovaMessage struct { + Role string `json:"role,omitempty"` + Content []bedrockAmazonNovaContent `json:"content,omitempty"` +} + +type bedrockAmazonNovaContent struct { + Text *string `json:"text,omitempty"` + Image *bedrockAmazonNovaContentImage `json:"image,omitempty"` +} + +type bedrockAmazonNovaContentImage struct { + Format string `json:"format,omitempty"` + Source bedrockAmazonNovaContentImageSource `json:"source,omitempty"` +} + +type bedrockAmazonNovaContentImageSource struct { + Bytes *string `json:"bytes,omitempty"` +} + +type bedrockAmazonNovaInferenceConfig struct { + MaxNewTokens *int `json:"max_new_tokens,omitempty"` + TopP *float64 `json:"top_p,omitempty"` + TopK *int `json:"top_k,omitempty"` + Temperature *float64 `json:"temperature,omitempty"` +} + +type bedrockAnthropicGenerateRequest struct { + Prompt string `json:"prompt,omitempty"` + MaxTokensToSample *int `json:"max_tokens_to_sample,omitempty"` + Temperature *float64 `json:"temperature,omitempty"` + StopSequences []string `json:"stop_sequences,omitempty"` + TopK *int `json:"top_k,omitempty"` + TopP *float64 `json:"top_p,omitempty"` + AnthropicVersion string `json:"anthropic_version,omitempty"` +} + +type bedrockAnthropicClaudeResponse struct { + Completion string `json:"completion"` +} + +type bedrockAnthropicClaude3Request struct { + AnthropicVersion string `json:"anthropic_version,omitempty"` + MaxTokens *int `json:"max_tokens,omitempty"` + Messages []bedrockAnthropicClaude3Message `json:"messages,omitempty"` +} + +type bedrockAnthropicClaude3Message struct { + Role string `json:"role,omitempty"` + Content []bedrockAnthropicClaude3Content `json:"content,omitempty"` +} + +type bedrockAnthropicClaude3Content struct { + // possible values are: image, text + Type string `json:"type,omitempty"` + Text *string `json:"text,omitempty"` + Source *bedrockAnthropicClaudeV3Source `json:"source,omitempty"` +} + +type bedrockAnthropicClaude3Response struct { + ID string `json:"id,omitempty"` + Type string `json:"type,omitempty"` + Role string `json:"role,omitempty"` + Model string `json:"model,omitempty"` + StopReason string `json:"stop_reason,omitempty"` + Usage bedrockAnthropicClaude3UsageResponse `json:"usage,omitempty"` + Content []bedrockAnthropicClaude3Content `json:"content,omitempty"` +} + +type bedrockAnthropicClaude3UsageResponse struct { + InputTokens int `json:"input_tokens,omitempty"` + OutputTokens int `json:"output_tokens,omitempty"` +} + +type bedrockAnthropicClaudeV3Source struct { + // possible values are: base64 + ContentType string `json:"type,omitempty"` + // possible values are: image/jpeg + MediaType string `json:"media_type,omitempty"` + // base64 encoded image + Data *string `json:"data,omitempty"` +} + +type bedrockAI21GenerateRequest struct { + Prompt string `json:"prompt,omitempty"` + MaxTokens *int `json:"maxTokens,omitempty"` + Temperature *float64 `json:"temperature,omitempty"` + TopP *float64 `json:"top_p,omitempty"` + StopSequences []string `json:"stop_sequences,omitempty"` + CountPenalty penalty `json:"countPenalty,omitempty"` + PresencePenalty penalty `json:"presencePenalty,omitempty"` + FrequencyPenalty penalty `json:"frequencyPenalty,omitempty"` +} + +type bedrockAI21Response struct { + Completions []bedrockAI21Completion `json:"completions,omitempty"` +} + +type bedrockAI21Completion struct { + Data bedrockAI21Data `json:"data,omitempty"` +} + +type bedrockAI21Data struct { + Text string `json:"text,omitempty"` +} + +type bedrockCohereRequest struct { + Prompt string `json:"prompt,omitempty"` + MaxTokens *int `json:"max_tokens,omitempty"` + Temperature *float64 `json:"temperature,omitempty"` + ReturnLikeliHood string `json:"return_likelihood,omitempty"` +} + +type bedrockCohereCommandRRequest struct { + Message string `json:"message,omitempty"` +} + +type penalty struct { + Scale int `json:"scale,omitempty"` +} + +type sagemakerGenerateRequest struct { + Prompt string `json:"prompt,omitempty"` +} + +type textGenerationConfig struct { + MaxTokenCount int `json:"maxTokenCount"` + StopSequences []string `json:"stopSequences"` + Temperature float64 `json:"temperature"` + TopP int `json:"topP"` +} + +type bedrockGenerateResponse struct { + InputTextTokenCount int `json:"InputTextTokenCount,omitempty"` + Results []Result `json:"results,omitempty"` + Generations []BedrockGeneration `json:"generations,omitempty"` + Message *string `json:"message,omitempty"` +} + +type bedrockNovaResponse struct { + Output bedrockNovaResponseOutput `json:"output,omitempty"` +} + +type bedrockNovaResponseOutput struct { + Message bedrockNovaResponseMessage `json:"message,omitempty"` +} + +type bedrockNovaResponseMessage struct { + Content []bedrockNovaResponseContent `json:"content,omitempty"` +} + +type bedrockNovaResponseContent struct { + Text string `json:"text,omitempty"` +} + +type bedrockCohereCommandRResponse struct { + ChatHistory []bedrockCohereChatHistory `json:"chat_history,omitempty"` + ResponseID string `json:"response_id,omitempty"` + GenerationID string `json:"generation_id,omitempty"` + FinishReason string `json:"finish_reason,omitempty"` + Text string `json:"text,omitempty"` +} + +type bedrockCohereChatHistory struct { + Message string `json:"message,omitempty"` + Role string `json:"role,omitempty"` +} + +type sagemakerGenerateResponse struct { + Generations []Generation `json:"generations,omitempty"` + Message *string `json:"message,omitempty"` +} + +type Generation struct { + Id string `json:"id,omitempty"` + Text string `json:"text,omitempty"` +} + +type BedrockGeneration struct { + Id string `json:"id,omitempty"` + Text string `json:"text,omitempty"` + FinishReason string `json:"finish_reason,omitempty"` +} + +type Result struct { + TokenCount int `json:"tokenCount,omitempty"` + OutputText string `json:"outputText,omitempty"` + CompletionReason string `json:"completionReason,omitempty"` +} + +type bedrockMistralAIRequest struct { + Prompt string `json:"prompt,omitempty"` + MaxTokens *int `json:"max_tokens,omitempty"` + Temperature *float64 `json:"temperature,omitempty"` + TopP *float64 `json:"topP,omitempty"` + TopK *int `json:"topK,omitempty"` +} + +type bedrockMistralAIResponse struct { + Outputs []bedrockMistralAIOutput `json:"outputs,omitempty"` +} + +type bedrockMistralAIOutput struct { + Text string `json:"text,omitempty"` +} + +type bedrockMetaRequest struct { + Prompt string `json:"prompt,omitempty"` + MaxGenLen *int `json:"max_gen_len,omitempty"` + Temperature *float64 `json:"temperature,omitempty"` +} + +type bedrockMetaResponse struct { + Generation string `json:"generation,omitempty"` + PromptTokenCount *int `json:"prompt_token_count,omitempty"` + GenerationTokenCount *int `json:"generation_token_count,omitempty"` + StopReason string `json:"stop_reason,omitempty"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-aws/clients/aws_meta.go b/platform/dbops/binaries/weaviate-src/modules/generative-aws/clients/aws_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..fde318a5c69162f4c5e6062209a0ccebce59127c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-aws/clients/aws_meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *awsClient) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Generative Search - AWS", + "documentationHref": "https://docs.aws.amazon.com/bedrock/latest/APIReference/welcome.html", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-aws/clients/aws_meta_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-aws/clients/aws_meta_test.go new file mode 100644 index 0000000000000000000000000000000000000000..bc3f4911aa79171a04fb18e708527e6387f12a07 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-aws/clients/aws_meta_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestGetMeta(t *testing.T) { + t.Run("when the server is providing meta", func(t *testing.T) { + server := httptest.NewServer(&testMetaHandler{t: t}) + defer server.Close() + c := New(server.URL, "", "", 60*time.Second, nullLogger()) + meta, err := c.MetaInfo() + + assert.Nil(t, err) + assert.NotNil(t, meta) + metaModel := meta["name"] + assert.True(t, metaModel != nil) + documentationHref := meta["documentationHref"] + assert.True(t, documentationHref != nil) + }) +} + +type testMetaHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testMetaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/meta", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.Write([]byte(f.metaInfo())) +} + +func (f *testMetaHandler) metaInfo() string { + return `{ + "hostname": "http://127.0.0.1:8080", + "modules": { + "generative-aws": { + "documentationHref": "to be announced", + "name": "AWS Generative Module" + } + }, + "version": "1.16.0" +}` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-aws/clients/aws_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-aws/clients/aws_test.go new file mode 100644 index 0000000000000000000000000000000000000000..b1159e0f9918cce0f48784f77d44798fe427c172 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-aws/clients/aws_test.go @@ -0,0 +1,123 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "strings" + "testing" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/modulecapabilities" +) + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} + +func TestGetAnswer(t *testing.T) { + t.Run("when the server has a successful answer ", func(t *testing.T) { + t.Skip("Skipping this test for now") + handler := &testAnswerHandler{ + t: t, + } + server := httptest.NewServer(handler) + defer server.Close() + + c := &awsClient{ + httpClient: &http.Client{}, + logger: nullLogger(), + awsAccessKey: "123", + awsSecretKey: "123", + buildBedrockUrlFn: func(service, region, model string) string { + return server.URL + }, + buildSagemakerUrlFn: func(service, region, endpoint string) string { + return server.URL + }, + } + + props := []*modulecapabilities.GenerateProperties{{Text: map[string]string{"prop": "My name is john"}}} + expected := modulecapabilities.GenerateResponse{ + Result: ptString("John"), + } + + res, err := c.GenerateAllResults(context.Background(), props, "What is my name?", nil, false, nil) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when the server has a an error", func(t *testing.T) { + t.Skip("Skipping this test for now") + server := httptest.NewServer(&testAnswerHandler{ + t: t, + }) + defer server.Close() + + c := &awsClient{ + httpClient: &http.Client{}, + logger: nullLogger(), + awsAccessKey: "123", + awsSecretKey: "123", + buildBedrockUrlFn: func(service, region, model string) string { + return server.URL + }, + buildSagemakerUrlFn: func(service, region, endpoint string) string { + return server.URL + }, + } + + props := []*modulecapabilities.GenerateProperties{{Text: map[string]string{"prop": "My name is john"}}} + + _, err := c.GenerateAllResults(context.Background(), props, "What is my name?", nil, false, nil) + + require.NotNil(t, err) + assert.EqualError(t, err, "connection to AWS failed with status: 200 error: some error from the server") + }) +} + +type testAnswerHandler struct { + t *testing.T +} + +func (f *testAnswerHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, http.MethodPost, r.Method) + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var outBytes []byte + authHeader := r.Header["Authorization"][0] + if strings.Contains(authHeader, "bedrock") { + var request bedrockAmazonGenerateRequest + require.Nil(f.t, json.Unmarshal(bodyBytes, &request)) + + outBytes, err = json.Marshal(request) + require.Nil(f.t, err) + } + + w.Write(outBytes) +} + +func ptString(in string) *string { + return &in +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-aws/clients/signer.go b/platform/dbops/binaries/weaviate-src/modules/generative-aws/clients/signer.go new file mode 100644 index 0000000000000000000000000000000000000000..8436079b90b2c94f1d78b603c813736d3870f81f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-aws/clients/signer.go @@ -0,0 +1,110 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "crypto/hmac" + "crypto/sha256" + "encoding/hex" + "fmt" + "net/http" + "sort" + "strings" + "time" +) + +const ( + contentType = "application/json" + algorithm = "AWS4-HMAC-SHA256" +) + +func getAuthHeader(awsAccessKey string, awsSecretKey string, host string, service string, region string, path string, body []byte, headers map[string]string) (string, map[string]string, string) { + t := time.Now().UTC() + amzDate := t.Format("20060102T150405Z") + shortDate := t.Format("20060102") + + hashedPayload := sha256Hash(body) + + canonicalHeaders, signedHeaders := getCanonicalHeaders(headers) + + canonicalRequest := strings.Join([]string{ + http.MethodPost, + path, + "", + canonicalHeaders, + signedHeaders, + hashedPayload, + }, "\n") + + hashedCanonicalRequest := sha256Hash([]byte(canonicalRequest)) + + credentialScope := strings.Join([]string{shortDate, region, service, "aws4_request"}, "/") + + stringToSign := strings.Join([]string{ + algorithm, + amzDate, + credentialScope, + hashedCanonicalRequest, + }, "\n") + + signingKey := getSigningKey(awsSecretKey, shortDate, region, service) + + signature := hmacSHA256(signingKey, stringToSign) + + authorizationHeader := fmt.Sprintf("%s Credential=%s/%s, SignedHeaders=%s, Signature=%s", algorithm, awsAccessKey, credentialScope, signedHeaders, signature) + return amzDate, headers, authorizationHeader +} + +func getCanonicalHeaders(headers map[string]string) (string, string) { + var canonicalHeaders []string + var signedHeaders []string + + keys := make([]string, 0, len(headers)) + for k := range headers { + keys = append(keys, k) + } + + sort.Strings(keys) + + for _, k := range keys { + canonicalHeaders = append(canonicalHeaders, fmt.Sprintf("%s:%s\n", strings.ToLower(k), headers[k])) + signedHeaders = append(signedHeaders, strings.ToLower(k)) + } + + return strings.Join(canonicalHeaders, ""), strings.Join(signedHeaders, ";") +} + +func hmacSHA256(key []byte, message string) string { + mac := hmac.New(sha256.New, key) + mac.Write([]byte(message)) + return hex.EncodeToString(mac.Sum(nil)) +} + +func getSigningKey(secretKey, date, region, service string) []byte { + key := "AWS4" + secretKey + kDate := hmacSHA256Bytes([]byte(key), date) + kRegion := hmacSHA256Bytes(kDate, region) + kService := hmacSHA256Bytes(kRegion, service) + kSigning := hmacSHA256Bytes(kService, "aws4_request") + return kSigning +} + +func hmacSHA256Bytes(key []byte, message string) []byte { + mac := hmac.New(sha256.New, key) + mac.Write([]byte(message)) + return mac.Sum(nil) +} + +func sha256Hash(body []byte) string { + hash := sha256.Sum256(body) + return hex.EncodeToString(hash[:]) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-aws/config.go b/platform/dbops/binaries/weaviate-src/modules/generative-aws/config.go new file mode 100644 index 0000000000000000000000000000000000000000..a0175cc26fa9aeeb14ee7ec59e8d3a5f0572df3b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-aws/config.go @@ -0,0 +1,41 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativeaws + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/generative-aws/config" +) + +func (m *GenerativeAWSModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeAWSModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeAWSModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := config.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-aws/config/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/generative-aws/config/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..5acc8c56578833e25168442384442d864c7368d9 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-aws/config/class_settings.go @@ -0,0 +1,361 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "fmt" + "strings" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + serviceProperty = "service" + regionProperty = "region" + modelProperty = "model" + endpointProperty = "endpoint" + targetModelProperty = "targetModel" + targetVariantProperty = "targetVariant" + maxTokenCountProperty = "maxTokenCount" + maxTokensToSampleProperty = "maxTokensToSample" + stopSequencesProperty = "stopSequences" + temperatureProperty = "temperature" + topPProperty = "topP" + topKProperty = "topK" +) + +const ( + Bedrock = "bedrock" + Sagemaker = "sagemaker" +) + +var ( + DefaultTitanMaxTokens = 8192 + DefaultTitanStopSequences = []string{} + DefaultTitanTemperature = 0.0 + DefaultTitanTopP = 1.0 + DefaultService = Bedrock +) + +var ( + DefaultAnthropicMaxTokensToSample = 300 + DefaultAnthropicStopSequences = []string{"\\n\\nHuman:"} + DefaultAnthropicTemperature = 1.0 + DefaultAnthropicTopK = 250 + DefaultAnthropicTopP = 0.999 +) + +var DefaultAI21MaxTokens = 300 + +var ( + DefaultCohereMaxTokens = 100 + DefaultCohereTemperature = 0.8 + DefaultAI21Temperature = 0.7 + DefaultCohereTopP = 1.0 +) + +var ( + DefaultMistralAIMaxTokens = 200 + DefaultMistralAITemperature = 0.5 +) + +var ( + DefaultMetaMaxTokens = 512 + DefaultMetaTemperature = 0.5 +) + +var availableAWSServices = []string{ + DefaultService, + Sagemaker, +} + +var availableBedrockModels = []string{ + "ai21.j2-ultra-v1", + "ai21.j2-mid-v1", + "amazon.titan-text-lite-v1", + "amazon.titan-text-express-v1", + "amazon.titan-text-premier-v1:0", + "anthropic.claude-v2", + "anthropic.claude-v2:1", + "anthropic.claude-instant-v1", + "anthropic.claude-3-sonnet-20240229-v1:0", + "anthropic.claude-3-haiku-20240307-v1:0", + "cohere.command-text-v14", + "cohere.command-light-text-v14", + "cohere.command-r-v1:0", + "cohere.command-r-plus-v1:0", + "meta.llama3-8b-instruct-v1:0", + "meta.llama3-70b-instruct-v1:0", + "meta.llama2-13b-chat-v1", + "meta.llama2-70b-chat-v1", + "mistral.mistral-7b-instruct-v0:2", + "mistral.mixtral-8x7b-instruct-v0:1", + "mistral.mistral-large-2402-v1:0", +} + +type classSettings struct { + cfg moduletools.ClassConfig + propertyValuesHelper basesettings.PropertyValuesHelper +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, propertyValuesHelper: basesettings.NewPropertyValuesHelper("generative-aws")} +} + +func (ic *classSettings) Validate(class *models.Class) error { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + + var errorMessages []string + + service := ic.Service() + if service == "" || !ic.validatAvailableAWSSetting(service, availableAWSServices) { + errorMessages = append(errorMessages, fmt.Sprintf("wrong %s, available services are: %v", serviceProperty, availableAWSServices)) + } + + if isBedrock(service) { + model := ic.Model() + if model != "" && !ic.validateAWSSetting(model, availableBedrockModels) { + errorMessages = append(errorMessages, fmt.Sprintf("wrong %s: %s, available model names are: %v", modelProperty, model, availableBedrockModels)) + } + + maxTokenCount := ic.MaxTokenCount(Bedrock, model) + if maxTokenCount != nil && (*maxTokenCount < 1 || *maxTokenCount > 8192) { + errorMessages = append(errorMessages, fmt.Sprintf("%s has to be an integer value between 1 and 8096", maxTokenCountProperty)) + } + temperature := ic.Temperature(Bedrock, model) + if temperature != nil && (*temperature < 0 || *temperature > 1) { + errorMessages = append(errorMessages, fmt.Sprintf("%s has to be float value between 0 and 1", temperatureProperty)) + } + topP := ic.TopP(Bedrock, model) + if topP != nil && (*topP < 0 || *topP > 1) { + errorMessages = append(errorMessages, fmt.Sprintf("%s has to be an integer value between 0 and 1", topPProperty)) + } + + endpoint := ic.Endpoint() + if endpoint != "" { + errorMessages = append(errorMessages, fmt.Sprintf("wrong configuration: %s, not applicable to %s", endpoint, service)) + } + } + + if isSagemaker(service) { + endpoint := ic.Endpoint() + if endpoint == "" { + errorMessages = append(errorMessages, fmt.Sprintf("%s cannot be empty", endpointProperty)) + } + model := ic.Model() + if model != "" { + errorMessages = append(errorMessages, fmt.Sprintf("wrong configuration: %s, not applicable to %s. did you mean %s", modelProperty, service, targetModelProperty)) + } + } + + if len(errorMessages) > 0 { + return fmt.Errorf("%s", strings.Join(errorMessages, ", ")) + } + + return nil +} + +func (ic *classSettings) validatAvailableAWSSetting(value string, availableValues []string) bool { + for i := range availableValues { + if value == availableValues[i] { + return true + } + } + return false +} + +func (ic *classSettings) validateAWSSetting(value string, availableValues []string) bool { + for i := range availableValues { + if value == availableValues[i] { + return true + } + } + return false +} + +func (ic *classSettings) getStringProperty(name, defaultValue string) string { + return ic.propertyValuesHelper.GetPropertyAsString(ic.cfg, name, defaultValue) +} + +func (ic *classSettings) getFloatProperty(name string, defaultValue *float64) *float64 { + return ic.propertyValuesHelper.GetPropertyAsFloat64(ic.cfg, name, defaultValue) +} + +func (ic *classSettings) getIntProperty(name string, defaultValue *int) *int { + wrongVal := -1 + return ic.propertyValuesHelper.GetPropertyAsIntWithNotExists(ic.cfg, name, &wrongVal, defaultValue) +} + +func (ic *classSettings) getListOfStringsProperty(name string, defaultValue []string) *[]string { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return &defaultValue + } + + model, ok := ic.cfg.ClassByModuleName("generative-aws")[name] + if ok { + asStringList, ok := model.([]string) + if ok { + return &asStringList + } + var empty []string + return &empty + } + return &defaultValue +} + +// AWS params +func (ic *classSettings) Service() string { + return ic.getStringProperty(serviceProperty, DefaultService) +} + +func (ic *classSettings) Region() string { + return ic.getStringProperty(regionProperty, "") +} + +func (ic *classSettings) Model() string { + return ic.getStringProperty(modelProperty, "") +} + +func (ic *classSettings) MaxTokenCount(service, model string) *int { + if isBedrock(service) { + if isAmazonModel(model) { + return ic.getIntProperty(maxTokenCountProperty, &DefaultTitanMaxTokens) + } + if isAnthropicModel(model) { + return ic.getIntProperty(maxTokensToSampleProperty, &DefaultAnthropicMaxTokensToSample) + } + if isAI21Model(model) { + return ic.getIntProperty(maxTokenCountProperty, &DefaultAI21MaxTokens) + } + if isCohereModel(model) { + return ic.getIntProperty(maxTokenCountProperty, &DefaultCohereMaxTokens) + } + if isMistralAIModel(model) { + return ic.getIntProperty(maxTokenCountProperty, &DefaultMistralAIMaxTokens) + } + if isMetaModel(model) { + return ic.getIntProperty(maxTokenCountProperty, &DefaultMetaMaxTokens) + } + } + return ic.getIntProperty(maxTokenCountProperty, nil) +} + +func (ic *classSettings) StopSequences(service, model string) []string { + if isBedrock(service) { + if isAmazonModel(model) { + return *ic.getListOfStringsProperty(stopSequencesProperty, DefaultTitanStopSequences) + } + if isAnthropicModel(model) { + return *ic.getListOfStringsProperty(stopSequencesProperty, DefaultAnthropicStopSequences) + } + } + return *ic.getListOfStringsProperty(stopSequencesProperty, nil) +} + +func (ic *classSettings) Temperature(service, model string) *float64 { + if isBedrock(service) { + if isAmazonModel(model) { + return ic.getFloatProperty(temperatureProperty, &DefaultTitanTemperature) + } + if isAnthropicModel(model) { + return ic.getFloatProperty(temperatureProperty, &DefaultAnthropicTemperature) + } + if isCohereModel(model) { + return ic.getFloatProperty(temperatureProperty, &DefaultCohereTemperature) + } + if isAI21Model(model) { + return ic.getFloatProperty(temperatureProperty, &DefaultAI21Temperature) + } + if isMistralAIModel(model) { + return ic.getFloatProperty(temperatureProperty, &DefaultMistralAITemperature) + } + if isMetaModel(model) { + return ic.getFloatProperty(temperatureProperty, &DefaultMetaTemperature) + } + } + return ic.getFloatProperty(temperatureProperty, nil) +} + +func (ic *classSettings) TopP(service, model string) *float64 { + if isBedrock(service) { + if isAmazonModel(model) { + return ic.getFloatProperty(topPProperty, &DefaultTitanTopP) + } + if isAnthropicModel(model) { + return ic.getFloatProperty(topPProperty, &DefaultAnthropicTopP) + } + if isCohereModel(model) { + return ic.getFloatProperty(topPProperty, &DefaultCohereTopP) + } + } + return ic.getFloatProperty(topPProperty, nil) +} + +func (ic *classSettings) TopK(service, model string) *int { + if isBedrock(service) { + if isAnthropicModel(model) { + return ic.getIntProperty(topKProperty, &DefaultAnthropicTopK) + } + } + return ic.getIntProperty(topKProperty, nil) +} + +func (ic *classSettings) Endpoint() string { + return ic.getStringProperty(endpointProperty, "") +} + +func (ic *classSettings) TargetModel() string { + return ic.getStringProperty(targetModelProperty, "") +} + +func (ic *classSettings) TargetVariant() string { + return ic.getStringProperty(targetVariantProperty, "") +} + +func isSagemaker(service string) bool { + return service == Sagemaker +} + +func isBedrock(service string) bool { + return service == Bedrock +} + +func isAmazonModel(model string) bool { + return strings.HasPrefix(model, "amazon") +} + +func isAI21Model(model string) bool { + return strings.HasPrefix(model, "ai21") +} + +func isAnthropicModel(model string) bool { + return strings.HasPrefix(model, "anthropic") +} + +func isCohereModel(model string) bool { + return strings.HasPrefix(model, "cohere") +} + +func isMistralAIModel(model string) bool { + return strings.HasPrefix(model, "mistral") +} + +func isMetaModel(model string) bool { + return strings.HasPrefix(model, "meta") +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-aws/config/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-aws/config/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..3809ee9cb9455c620c5a8358ce8537890c2e647b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-aws/config/class_settings_test.go @@ -0,0 +1,216 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_classSettings_Validate(t *testing.T) { + tests := []struct { + name string + cfg moduletools.ClassConfig + wantService string + wantRegion string + wantModel string + wantEndpoint string + wantTargetModel string + wantTargetVariant string + wantMaxTokenCount int + wantStopSequences []string + wantTemperature float64 + wantTopP float64 + wantErr error + }{ + { + name: "default settings", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{}, + }, + wantService: "bedrock", + wantRegion: "", + wantModel: "", + wantMaxTokenCount: 8192, + wantTemperature: 0, + wantTopP: 1, + }, + { + name: "happy flow - Bedrock", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "service": "bedrock", + "region": "us-east-1", + "model": "ai21.j2-ultra-v1", + }, + }, + wantService: "bedrock", + wantRegion: "us-east-1", + wantModel: "ai21.j2-ultra-v1", + wantMaxTokenCount: 8192, + wantStopSequences: nil, + wantTemperature: 0.7, + wantTopP: 1, + }, + { + name: "happy flow - Sagemaker", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "service": "sagemaker", + "region": "us-east-1", + "endpoint": "my-endpoint-deployment", + "targetModel": "model", + "targetVariant": "variant-1", + }, + }, + wantService: "sagemaker", + wantRegion: "us-east-1", + wantEndpoint: "my-endpoint-deployment", + wantTargetModel: "model", + wantTargetVariant: "variant-1", + }, + { + name: "custom values - Bedrock", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "service": "bedrock", + "region": "us-east-1", + "model": "amazon.titan-text-lite-v1", + "maxTokenCount": 1, + "stopSequences": []string{"test", "test2"}, + "temperature": 0.2, + "topP": 0, + }, + }, + wantService: "bedrock", + wantRegion: "us-east-1", + wantModel: "amazon.titan-text-lite-v1", + wantMaxTokenCount: 1, + wantStopSequences: []string{"test", "test2"}, + wantTemperature: 0.2, + wantTopP: 0, + }, + { + name: "custom values - Sagemaker", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "service": "sagemaker", + "region": "us-east-1", + "endpoint": "this-is-my-endpoint", + "targetModel": "my-target-model", + "targetVariant": "my-target¬variant", + }, + }, + wantService: "sagemaker", + wantRegion: "us-east-1", + wantEndpoint: "this-is-my-endpoint", + wantTargetModel: "my-target-model", + wantTargetVariant: "my-target¬variant", + }, + { + name: "wrong temperature", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "service": "bedrock", + "region": "us-east-1", + "model": "amazon.titan-text-lite-v1", + "temperature": 2, + }, + }, + wantErr: fmt.Errorf("temperature has to be float value between 0 and 1"), + }, + { + name: "wrong maxTokenCount", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "service": "bedrock", + "region": "us-east-1", + "model": "amazon.titan-text-lite-v1", + "maxTokenCount": 9000, + }, + }, + wantErr: fmt.Errorf("maxTokenCount has to be an integer value between 1 and 8096"), + }, + { + name: "wrong topP", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "service": "bedrock", + "region": "us-east-1", + "model": "amazon.titan-text-lite-v1", + "topP": 2000, + }, + }, + wantErr: fmt.Errorf("topP has to be an integer value between 0 and 1"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.cfg) + if tt.wantErr != nil { + assert.EqualError(t, ic.Validate(nil), tt.wantErr.Error()) + } else { + assert.NoError(t, ic.Validate(nil)) + assert.Equal(t, tt.wantService, ic.Service()) + assert.Equal(t, tt.wantRegion, ic.Region()) + assert.Equal(t, tt.wantModel, ic.Model()) + assert.Equal(t, tt.wantEndpoint, ic.Endpoint()) + assert.Equal(t, tt.wantTargetModel, ic.TargetModel()) + assert.Equal(t, tt.wantTargetVariant, ic.TargetVariant()) + if ic.Temperature(ic.Service(), ic.Model()) != nil { + assert.Equal(t, tt.wantTemperature, *ic.Temperature(ic.Service(), ic.Model())) + } + assert.Equal(t, tt.wantStopSequences, ic.StopSequences(ic.Service(), ic.Model())) + if ic.TopP(ic.Service(), ic.Model()) != nil { + assert.Equal(t, tt.wantTopP, *ic.TopP(ic.Service(), ic.Model())) + } + } + }) + } +} + +type fakeClassConfig struct { + classConfig map[string]interface{} +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-aws/module.go b/platform/dbops/binaries/weaviate-src/modules/generative-aws/module.go new file mode 100644 index 0000000000000000000000000000000000000000..0d22e8b16166c79e486c86ba48843f597fe868d2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-aws/module.go @@ -0,0 +1,102 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativeaws + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/generative-aws/clients" + "github.com/weaviate/weaviate/modules/generative-aws/parameters" +) + +const Name = "generative-aws" + +func New() *GenerativeAWSModule { + return &GenerativeAWSModule{} +} + +type GenerativeAWSModule struct { + generative generativeClient + additionalPropertiesProvider map[string]modulecapabilities.GenerativeProperty +} + +type generativeClient interface { + modulecapabilities.GenerativeClient + MetaInfo() (map[string]interface{}, error) +} + +func (m *GenerativeAWSModule) Name() string { + return Name +} + +func (m *GenerativeAWSModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextGenerative +} + +func (m *GenerativeAWSModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrapf(err, "init %s", Name) + } + + return nil +} + +func (m *GenerativeAWSModule) initAdditional(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + awsAccessKey := m.getAWSAccessKey() + awsSecret := m.getAWSSecretAccessKey() + awsSessionToken := os.Getenv("AWS_SESSION_TOKEN") + client := clients.New(awsAccessKey, awsSecret, awsSessionToken, timeout, logger) + + m.generative = client + m.additionalPropertiesProvider = parameters.AdditionalGenerativeParameters(m.generative) + + return nil +} + +func (m *GenerativeAWSModule) getAWSAccessKey() string { + if os.Getenv("AWS_ACCESS_KEY_ID") != "" { + return os.Getenv("AWS_ACCESS_KEY_ID") + } + return os.Getenv("AWS_ACCESS_KEY") +} + +func (m *GenerativeAWSModule) getAWSSecretAccessKey() string { + if os.Getenv("AWS_SECRET_ACCESS_KEY") != "" { + return os.Getenv("AWS_SECRET_ACCESS_KEY") + } + return os.Getenv("AWS_SECRET_KEY") +} + +func (m *GenerativeAWSModule) MetaInfo() (map[string]interface{}, error) { + return m.generative.MetaInfo() +} + +func (m *GenerativeAWSModule) AdditionalGenerativeProperties() map[string]modulecapabilities.GenerativeProperty { + return m.additionalPropertiesProvider +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.AdditionalGenerativeProperties(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-aws/parameters/graphql.go b/platform/dbops/binaries/weaviate-src/modules/generative-aws/parameters/graphql.go new file mode 100644 index 0000000000000000000000000000000000000000..e7167c6a6ab482689547544b39b36f31459cd179 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-aws/parameters/graphql.go @@ -0,0 +1,66 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "fmt" + + "github.com/tailor-inc/graphql" +) + +func input(prefix string) *graphql.InputObjectFieldConfig { + return &graphql.InputObjectFieldConfig{ + Description: fmt.Sprintf("%s settings", Name), + Type: graphql.NewInputObject(graphql.InputObjectConfig{ + Name: fmt.Sprintf("%s%sInputObject", prefix, Name), + Fields: graphql.InputObjectConfigFieldMap{ + "service": &graphql.InputObjectFieldConfig{ + Description: "service", + Type: graphql.String, + }, + "region": &graphql.InputObjectFieldConfig{ + Description: "region", + Type: graphql.String, + }, + "endpoint": &graphql.InputObjectFieldConfig{ + Description: "endpoint", + Type: graphql.String, + }, + "targetModel": &graphql.InputObjectFieldConfig{ + Description: "targetModel", + Type: graphql.String, + }, + "targetVariant": &graphql.InputObjectFieldConfig{ + Description: "targetVariant", + Type: graphql.String, + }, + "model": &graphql.InputObjectFieldConfig{ + Description: "model", + Type: graphql.String, + }, + "temperature": &graphql.InputObjectFieldConfig{ + Description: "temperature", + Type: graphql.Float, + }, + "images": &graphql.InputObjectFieldConfig{ + Description: "images", + Type: graphql.NewList(graphql.String), + }, + "imageProperties": &graphql.InputObjectFieldConfig{ + Description: "imageProperties", + Type: graphql.NewList(graphql.String), + }, + }, + }), + DefaultValue: nil, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-aws/parameters/params.go b/platform/dbops/binaries/weaviate-src/modules/generative-aws/parameters/params.go new file mode 100644 index 0000000000000000000000000000000000000000..8d4d966616d22b84c2b1a68032cffa7ddc040ba2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-aws/parameters/params.go @@ -0,0 +1,61 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/usecases/modulecomponents/gqlparser" +) + +type Params struct { + Service string + Region string + Endpoint string + TargetModel string + TargetVariant string + Model string + Temperature *float64 + Images []*string + ImageProperties []string +} + +func extract(field *ast.ObjectField) interface{} { + out := Params{} + fields, ok := field.Value.GetValue().([]*ast.ObjectField) + if ok { + for _, f := range fields { + switch f.Name.Value { + case "service": + out.Service = gqlparser.GetValueAsStringOrEmpty(f) + case "region": + out.Region = gqlparser.GetValueAsStringOrEmpty(f) + case "endpoint": + out.Endpoint = gqlparser.GetValueAsStringOrEmpty(f) + case "targetModel": + out.TargetModel = gqlparser.GetValueAsStringOrEmpty(f) + case "targetVariant": + out.TargetVariant = gqlparser.GetValueAsStringOrEmpty(f) + case "model": + out.Model = gqlparser.GetValueAsStringOrEmpty(f) + case "temperature": + out.Temperature = gqlparser.GetValueAsFloat64(f) + case "images": + out.Images = gqlparser.GetValueAsStringPtrArray(f) + case "imageProperties": + out.ImageProperties = gqlparser.GetValueAsStringArray(f) + default: + // do nothing + } + } + } + return out +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-aws/parameters/provider.go b/platform/dbops/binaries/weaviate-src/modules/generative-aws/parameters/provider.go new file mode 100644 index 0000000000000000000000000000000000000000..5661fb4a7fdb3590121534923d4c1047d61cffc9 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-aws/parameters/provider.go @@ -0,0 +1,22 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import "github.com/weaviate/weaviate/entities/modulecapabilities" + +const Name = "aws" + +func AdditionalGenerativeParameters(client modulecapabilities.GenerativeClient) map[string]modulecapabilities.GenerativeProperty { + return map[string]modulecapabilities.GenerativeProperty{ + Name: {Client: client, RequestParamsFunction: input, ResponseParamsFunction: nil, ExtractRequestParamsFunction: extract}, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-cohere/clients/cohere.go b/platform/dbops/binaries/weaviate-src/modules/generative-cohere/clients/cohere.go new file mode 100644 index 0000000000000000000000000000000000000000..344919e8b61d6a5240246e3adb67c850be9b5b4c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-cohere/clients/cohere.go @@ -0,0 +1,268 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + "github.com/weaviate/weaviate/usecases/modulecomponents/generative" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/generative-cohere/config" + cohereparams "github.com/weaviate/weaviate/modules/generative-cohere/parameters" +) + +type cohere struct { + apiKey string + httpClient *http.Client + logger logrus.FieldLogger +} + +func New(apiKey string, timeout time.Duration, logger logrus.FieldLogger) *cohere { + return &cohere{ + apiKey: apiKey, + httpClient: &http.Client{ + Timeout: timeout, + }, + logger: logger, + } +} + +func (v *cohere) GenerateSingleResult(ctx context.Context, properties *modulecapabilities.GenerateProperties, prompt string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + forPrompt, err := generative.MakeSinglePrompt(generative.Text(properties), prompt) + if err != nil { + return nil, err + } + return v.Generate(ctx, cfg, forPrompt, options, debug) +} + +func (v *cohere) GenerateAllResults(ctx context.Context, properties []*modulecapabilities.GenerateProperties, task string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + forTask, err := generative.MakeTaskPrompt(generative.Texts(properties), task) + if err != nil { + return nil, err + } + return v.Generate(ctx, cfg, forTask, options, debug) +} + +func (v *cohere) Generate(ctx context.Context, cfg moduletools.ClassConfig, prompt string, options interface{}, debug bool) (*modulecapabilities.GenerateResponse, error) { + params := v.getParameters(cfg, options) + debugInformation := v.getDebugInformation(debug, prompt) + + cohereUrl, err := v.getCohereUrl(ctx, params.BaseURL) + if err != nil { + return nil, errors.Wrap(err, "join Cohere API host and path") + } + input := generateInput{ + Message: prompt, + Model: params.Model, + Temperature: params.Temperature, + MaxTokens: params.MaxTokens, + K: params.K, + P: params.P, + StopSequences: params.StopSequences, + FrequencyPenalty: params.FrequencyPenalty, + PresencePenalty: params.PresencePenalty, + } + + body, err := json.Marshal(input) + if err != nil { + return nil, errors.Wrap(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", cohereUrl, + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + apiKey, err := v.getApiKey(ctx) + if err != nil { + return nil, errors.Wrapf(err, "Cohere API Key") + } + req.Header.Add("Authorization", fmt.Sprintf("BEARER %s", apiKey)) + req.Header.Add("Content-Type", "application/json") + req.Header.Add("Request-Source", "unspecified:weaviate") + + res, err := v.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + var resBody generateResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if res.StatusCode != 200 { + if resBody.Message != "" { + return nil, errors.Errorf("connection to Cohere API failed with status: %d error: %v", res.StatusCode, resBody.Message) + } + return nil, errors.Errorf("connection to Cohere API failed with status: %d", res.StatusCode) + } + + textResponse := resBody.Text + + return &modulecapabilities.GenerateResponse{ + Result: &textResponse, + Debug: debugInformation, + Params: v.getResponseParams(resBody.Meta), + }, nil +} + +func (v *cohere) getParameters(cfg moduletools.ClassConfig, options interface{}) cohereparams.Params { + settings := config.NewClassSettings(cfg) + + var params cohereparams.Params + if p, ok := options.(cohereparams.Params); ok { + params = p + } + if params.BaseURL == "" { + baseURL := settings.BaseURL() + params.BaseURL = baseURL + } + if params.Model == "" { + model := settings.Model() + params.Model = model + } + if params.Temperature == nil { + temperature := settings.Temperature() + params.Temperature = &temperature + } + if params.K == nil { + k := settings.K() + params.K = &k + } + if len(params.StopSequences) == 0 { + params.StopSequences = settings.StopSequences() + } + if params.MaxTokens == nil { + maxTokens := settings.GetMaxTokensForModel(params.Model) + params.MaxTokens = &maxTokens + } + return params +} + +func (v *cohere) getDebugInformation(debug bool, prompt string) *modulecapabilities.GenerateDebugInformation { + if debug { + return &modulecapabilities.GenerateDebugInformation{ + Prompt: prompt, + } + } + return nil +} + +func (v *cohere) getResponseParams(meta *meta) map[string]interface{} { + if meta != nil { + return map[string]interface{}{cohereparams.Name: map[string]interface{}{"meta": meta}} + } + return nil +} + +func GetResponseParams(result map[string]interface{}) *responseParams { + if params, ok := result[cohereparams.Name].(map[string]interface{}); ok { + if meta, ok := params["meta"].(*meta); ok { + return &responseParams{Meta: meta} + } + } + return nil +} + +func (v *cohere) getCohereUrl(ctx context.Context, baseURL string) (string, error) { + passedBaseURL := baseURL + if headerBaseURL := modulecomponents.GetValueFromContext(ctx, "X-Cohere-Baseurl"); headerBaseURL != "" { + passedBaseURL = headerBaseURL + } + return url.JoinPath(passedBaseURL, "/v1/chat") +} + +func (v *cohere) getApiKey(ctx context.Context) (string, error) { + if apiKey := modulecomponents.GetValueFromContext(ctx, "X-Cohere-Api-Key"); apiKey != "" { + return apiKey, nil + } + if v.apiKey != "" { + return v.apiKey, nil + } + return "", errors.New("no api key found " + + "neither in request header: X-Cohere-Api-Key " + + "nor in environment variable under COHERE_APIKEY") +} + +type generateInput struct { + ChatHistory []message `json:"chat_history,omitempty"` + Message string `json:"message"` + Model string `json:"model"` + Temperature *float64 `json:"temperature,omitempty"` + MaxTokens *int `json:"max_tokens,omitempty"` + K *int `json:"k,omitempty"` + P *float64 `json:"p,omitempty"` + StopSequences []string `json:"stop_sequences,omitempty"` + FrequencyPenalty *float64 `json:"frequency_penalty,omitempty"` + PresencePenalty *float64 `json:"presence_penalty,omitempty"` +} + +type message struct { + Role string `json:"role"` + Message string `json:"message"` +} + +type generateResponse struct { + Text string `json:"text"` + // When an error occurs then the error message object is being returned with an error message + // https://docs.cohere.com/reference/errors + Message string `json:"message"` + Meta *meta `json:"meta,omitempty"` +} + +type meta struct { + ApiVersion *apiVersion `json:"api_version,omitempty"` + BilledUnits *billedUnits `json:"billed_units,omitempty"` + Tokens *tokens `json:"tokens,omitempty"` + Warnings []string `json:"warnings,omitempty"` +} + +type apiVersion struct { + Version *string `json:"version,omitempty"` + IsDeprecated *bool `json:"is_deprecated,omitempty"` + IsExperimental *bool `json:"is_experimental,omitempty"` +} + +type billedUnits struct { + InputTokens *float64 `json:"input_tokens,omitempty"` + OutputTokens *float64 `json:"output_tokens,omitempty"` + SearchUnits *float64 `json:"search_units,omitempty"` + Classifications *float64 `json:"classifications,omitempty"` +} + +type tokens struct { + InputTokens *float64 `json:"input_tokens,omitempty"` + OutputTokens *float64 `json:"output_tokens,omitempty"` +} + +type responseParams struct { + Meta *meta `json:"meta,omitempty"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-cohere/clients/cohere_meta.go b/platform/dbops/binaries/weaviate-src/modules/generative-cohere/clients/cohere_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..c08fabbe8cbab4689b178ced8b586c5589136045 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-cohere/clients/cohere_meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *cohere) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Generative Search - Cohere", + "documentationHref": "https://docs.cohere.com/reference/chat", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-cohere/clients/cohere_meta_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-cohere/clients/cohere_meta_test.go new file mode 100644 index 0000000000000000000000000000000000000000..cf0ac1cc1d73e1e37f7116eb39568c4e1cf4d18f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-cohere/clients/cohere_meta_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestGetMeta(t *testing.T) { + t.Run("when the server is providing meta", func(t *testing.T) { + server := httptest.NewServer(&testMetaHandler{t: t}) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + meta, err := c.MetaInfo() + + assert.Nil(t, err) + assert.NotNil(t, meta) + metaModel := meta["name"] + assert.True(t, metaModel != nil) + documentationHref := meta["documentationHref"] + assert.True(t, documentationHref != nil) + }) +} + +type testMetaHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testMetaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/meta", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.Write([]byte(f.metaInfo())) +} + +func (f *testMetaHandler) metaInfo() string { + return `{ + "hostname": "http://127.0.0.1:8080", + "modules": { + "generative-cohere": { + "documentationHref": URL_HERE", + "name": "Cohere Generative Module" + } + }, + "version": "1.16.0" +}` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-cohere/clients/cohere_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-cohere/clients/cohere_test.go new file mode 100644 index 0000000000000000000000000000000000000000..b2acf42c950e49e98f0dc462ed15b08299475e08 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-cohere/clients/cohere_test.go @@ -0,0 +1,173 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} + +func TestGetAnswer(t *testing.T) { + props := []*modulecapabilities.GenerateProperties{{Text: map[string]string{"prop": "My name is john"}}} + + tests := []struct { + name string + answer generateResponse + timeout time.Duration + expectedResult string + }{ + { + name: "when the server has a successful aner", + answer: generateResponse{ + Text: "John", + }, + expectedResult: "John", + }, + { + name: "when the server has a an error", + answer: generateResponse{ + Message: "some error from the server", + }, + }, + { + name: "when the server does not respond in time", + answer: generateResponse{Message: "context deadline exceeded"}, + timeout: time.Second, + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + handler := &testAnswerHandler{ + t: t, + answer: test.answer, + timeout: test.timeout, + } + server := httptest.NewServer(handler) + defer server.Close() + + c := New("apiKey", test.timeout, nullLogger()) + + cfg := &fakeClassConfig{baseURL: server.URL} + res, err := c.GenerateAllResults(context.Background(), props, "What is my name?", nil, false, cfg) + + if test.answer.Message != "" { + assert.Contains(t, err.Error(), test.answer.Message) + } else { + assert.Equal(t, test.expectedResult, *res.Result) + } + }) + } + + t.Run("when X-Cohere-BaseURL header is passed", func(t *testing.T) { + c := New("apiKey", 5*time.Second, nullLogger()) + + baseURL := "http://default-url.com" + ctxWithValue := context.WithValue(context.Background(), + "X-Cohere-Baseurl", []string{"http://base-url-passed-in-header.com"}) + + buildURL, err := c.getCohereUrl(ctxWithValue, baseURL) + require.NoError(t, err) + assert.Equal(t, "http://base-url-passed-in-header.com/v1/chat", buildURL) + + buildURL, err = c.getCohereUrl(context.TODO(), baseURL) + require.NoError(t, err) + assert.Equal(t, "http://default-url.com/v1/chat", buildURL) + }) +} + +type testAnswerHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + answer generateResponse + timeout time.Duration +} + +func (f *testAnswerHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/v1/chat", r.URL.String()) + assert.Equal(f.t, http.MethodPost, r.Method) + + time.Sleep(f.timeout) + + if f.answer.Message != "" { + outBytes, err := json.Marshal(f.answer) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b map[string]interface{} + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + outBytes, err := json.Marshal(f.answer) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +type fakeClassConfig struct { + baseURL string +} + +func (cfg *fakeClassConfig) Tenant() string { + return "" +} + +func (cfg *fakeClassConfig) Class() map[string]interface{} { + return nil +} + +func (cfg *fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + settings := map[string]interface{}{ + "baseURL": cfg.baseURL, + } + return settings +} + +func (cfg *fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-cohere/config.go b/platform/dbops/binaries/weaviate-src/modules/generative-cohere/config.go new file mode 100644 index 0000000000000000000000000000000000000000..25d226ef3744a694cb55211e4dd19dd9022088ec --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-cohere/config.go @@ -0,0 +1,41 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativecohere + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/generative-cohere/config" +) + +func (m *GenerativeCohereModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeCohereModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeCohereModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := config.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-cohere/config/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/generative-cohere/config/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..c27ca44826333f001c058f76e0cd8577b6e3f491 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-cohere/config/class_settings.go @@ -0,0 +1,131 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + baseURLProperty = "baseURL" + modelProperty = "model" + temperatureProperty = "temperature" + maxTokensProperty = "maxTokens" + kProperty = "k" + stopSequencesProperty = "stopSequences" +) + +var availableCohereModels = []string{ + "command-r-plus", "command-r", "command-xlarge-beta", + "command-xlarge", "command-medium", "command-xlarge-nightly", "command-medium-nightly", "xlarge", "medium", + "command", "command-light", "command-nightly", "command-light-nightly", "base", "base-light", +} + +// note it might not like this -- might want int values for e.g. MaxTokens +var ( + DefaultBaseURL = "https://api.cohere.ai" + DefaultCohereModel = "command-r" + DefaultCohereTemperature float64 = 0 + DefaultCohereMaxTokens = 2048 + DefaultCohereK = 0 + DefaultCohereStopSequences = []string{} +) + +type classSettings struct { + cfg moduletools.ClassConfig + propertyValuesHelper basesettings.PropertyValuesHelper +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, propertyValuesHelper: basesettings.NewPropertyValuesHelper("generative-cohere")} +} + +func (ic *classSettings) Validate(class *models.Class) error { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + model := ic.getStringProperty(modelProperty, DefaultCohereModel) + if model == nil || !ic.validateModel(*model) { + return errors.Errorf("wrong Cohere model name, available model names are: %v", availableCohereModels) + } + + return nil +} + +func (ic *classSettings) getStringProperty(name, defaultValue string) *string { + asString := ic.propertyValuesHelper.GetPropertyAsStringWithNotExists(ic.cfg, name, "", defaultValue) + return &asString +} + +func (ic *classSettings) getIntProperty(name string, defaultValue *int) *int { + wrongVal := -1 + return ic.propertyValuesHelper.GetPropertyAsIntWithNotExists(ic.cfg, name, &wrongVal, defaultValue) +} + +func (ic *classSettings) getFloat64Property(name string, defaultValue *float64) *float64 { + wrongVal := float64(-1) + return ic.propertyValuesHelper.GetPropertyAsFloat64WithNotExists(ic.cfg, name, &wrongVal, defaultValue) +} + +func (ic *classSettings) getListOfStringsProperty(name string, defaultValue []string) *[]string { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return &defaultValue + } + + model, ok := ic.cfg.ClassByModuleName("generative-cohere")[name] + if ok { + asStringList, ok := model.([]string) + if ok { + return &asStringList + } + var empty []string + return &empty + } + return &defaultValue +} + +func (ic *classSettings) GetMaxTokensForModel(model string) int { + return DefaultCohereMaxTokens +} + +func (ic *classSettings) validateModel(model string) bool { + return basesettings.ValidateSetting(model, availableCohereModels) +} + +func (ic *classSettings) BaseURL() string { + return *ic.getStringProperty(baseURLProperty, DefaultBaseURL) +} + +func (ic *classSettings) Model() string { + return *ic.getStringProperty(modelProperty, DefaultCohereModel) +} + +func (ic *classSettings) MaxTokens() int { + return *ic.getIntProperty(maxTokensProperty, &DefaultCohereMaxTokens) +} + +func (ic *classSettings) Temperature() float64 { + return *ic.getFloat64Property(temperatureProperty, &DefaultCohereTemperature) +} + +func (ic *classSettings) K() int { + return *ic.getIntProperty(kProperty, &DefaultCohereK) +} + +func (ic *classSettings) StopSequences() []string { + return *ic.getListOfStringsProperty(stopSequencesProperty, DefaultCohereStopSequences) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-cohere/config/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-cohere/config/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..0e48ee5239f1f52ff7584179a507925dbafbdf3f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-cohere/config/class_settings_test.go @@ -0,0 +1,158 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "testing" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_classSettings_Validate(t *testing.T) { + tests := []struct { + name string + cfg moduletools.ClassConfig + wantModel string + wantMaxTokens int + wantTemperature float64 + wantK int + wantStopSequences []string + wantBaseURL string + wantErr error + }{ + { + name: "default settings", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{}, + }, + wantModel: "command-r", + wantMaxTokens: 2048, + wantTemperature: 0, + wantK: 0, + wantStopSequences: []string{}, + wantBaseURL: "https://api.cohere.ai", + wantErr: nil, + }, + { + name: "everything non default configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "command-xlarge", + "maxTokens": 2048, + "temperature": 1, + "k": 2, + "stopSequences": []string{"stop1", "stop2"}, + }, + }, + wantModel: "command-xlarge", + wantMaxTokens: 2048, + wantTemperature: 1, + wantK: 2, + wantStopSequences: []string{"stop1", "stop2"}, + wantBaseURL: "https://api.cohere.ai", + wantErr: nil, + }, + { + name: "wrong model configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "wrong-model", + }, + }, + wantErr: errors.Errorf("wrong Cohere model name, available model names are: " + + "[command-r-plus command-r command-xlarge-beta command-xlarge command-medium command-xlarge-nightly " + + "command-medium-nightly xlarge medium command command-light command-nightly command-light-nightly base base-light]"), + }, + { + name: "default settings with command-light-nightly", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "command-light-nightly", + }, + }, + wantModel: "command-light-nightly", + wantMaxTokens: 2048, + wantTemperature: 0, + wantK: 0, + wantStopSequences: []string{}, + wantBaseURL: "https://api.cohere.ai", + wantErr: nil, + }, + { + name: "default settings with command-light-nightly and baseURL", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "command-light-nightly", + "baseURL": "http://custom-url.com", + }, + }, + wantModel: "command-light-nightly", + wantMaxTokens: 2048, + wantTemperature: 0, + wantK: 0, + wantStopSequences: []string{}, + wantBaseURL: "http://custom-url.com", + wantErr: nil, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.cfg) + if tt.wantErr != nil { + assert.Equal(t, tt.wantErr.Error(), ic.Validate(nil).Error()) + } else { + assert.NoError(t, ic.Validate(nil)) + assert.Equal(t, tt.wantModel, ic.Model()) + assert.Equal(t, tt.wantMaxTokens, ic.MaxTokens()) + assert.Equal(t, tt.wantTemperature, ic.Temperature()) + assert.Equal(t, tt.wantK, ic.K()) + assert.Equal(t, tt.wantStopSequences, ic.StopSequences()) + } + }) + } +} + +type fakeClassConfig struct { + classConfig map[string]interface{} +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-cohere/module.go b/platform/dbops/binaries/weaviate-src/modules/generative-cohere/module.go new file mode 100644 index 0000000000000000000000000000000000000000..1231822878c2f57a1e50db1ae8a61708b8bfefc6 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-cohere/module.go @@ -0,0 +1,86 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativecohere + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/generative-cohere/clients" + "github.com/weaviate/weaviate/modules/generative-cohere/parameters" +) + +const Name = "generative-cohere" + +func New() *GenerativeCohereModule { + return &GenerativeCohereModule{} +} + +type GenerativeCohereModule struct { + generative generativeClient + additionalPropertiesProvider map[string]modulecapabilities.GenerativeProperty +} + +type generativeClient interface { + modulecapabilities.GenerativeClient + MetaInfo() (map[string]interface{}, error) +} + +func (m *GenerativeCohereModule) Name() string { + return Name +} + +func (m *GenerativeCohereModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextGenerative +} + +func (m *GenerativeCohereModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrapf(err, "init %s", Name) + } + + return nil +} + +func (m *GenerativeCohereModule) initAdditional(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("COHERE_APIKEY") + + client := clients.New(apiKey, timeout, logger) + m.generative = client + m.additionalPropertiesProvider = parameters.AdditionalGenerativeParameters(m.generative) + + return nil +} + +func (m *GenerativeCohereModule) MetaInfo() (map[string]interface{}, error) { + return m.generative.MetaInfo() +} + +func (m *GenerativeCohereModule) AdditionalGenerativeProperties() map[string]modulecapabilities.GenerativeProperty { + return m.additionalPropertiesProvider +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.AdditionalGenerativeProperties(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-cohere/parameters/graphql.go b/platform/dbops/binaries/weaviate-src/modules/generative-cohere/parameters/graphql.go new file mode 100644 index 0000000000000000000000000000000000000000..550ae4e68561c5b3f65e2be212027a14b887d4a3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-cohere/parameters/graphql.go @@ -0,0 +1,104 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "fmt" + + "github.com/tailor-inc/graphql" +) + +func input(prefix string) *graphql.InputObjectFieldConfig { + return &graphql.InputObjectFieldConfig{ + Description: fmt.Sprintf("%s settings", Name), + Type: graphql.NewInputObject(graphql.InputObjectConfig{ + Name: fmt.Sprintf("%s%sInputObject", prefix, Name), + Fields: graphql.InputObjectConfigFieldMap{ + "baseURL": &graphql.InputObjectFieldConfig{ + Description: "baseURL", + Type: graphql.String, + }, + "model": &graphql.InputObjectFieldConfig{ + Description: "model", + Type: graphql.String, + }, + "temperature": &graphql.InputObjectFieldConfig{ + Description: "temperature", + Type: graphql.Float, + }, + "maxTokens": &graphql.InputObjectFieldConfig{ + Description: "maxTokens", + Type: graphql.Int, + }, + "k": &graphql.InputObjectFieldConfig{ + Description: "n", + Type: graphql.Int, + }, + "p": &graphql.InputObjectFieldConfig{ + Description: "n", + Type: graphql.Float, + }, + "stopSequences": &graphql.InputObjectFieldConfig{ + Description: "stopSequences", + Type: graphql.NewList(graphql.String), + }, + "frequencyPenalty": &graphql.InputObjectFieldConfig{ + Description: "frequencyPenalty", + Type: graphql.Float, + }, + "presencePenalty": &graphql.InputObjectFieldConfig{ + Description: "presencePenalty", + Type: graphql.Float, + }, + }, + }), + DefaultValue: nil, + } +} + +func output(prefix string) *graphql.Field { + return &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sFields", prefix, Name), + Fields: graphql.Fields{ + "meta": &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sUsageMetadataFields", prefix, Name), + Fields: graphql.Fields{ + "api_version": &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sApiVersionFields", prefix, Name), + Fields: graphql.Fields{ + "version": &graphql.Field{Type: graphql.String}, + "is_deprecated": &graphql.Field{Type: graphql.Boolean}, + "is_experimental": &graphql.Field{Type: graphql.Boolean}, + }, + })}, + "billed_units": &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sBilledUnitsFields", prefix, Name), + Fields: graphql.Fields{ + "input_tokens": &graphql.Field{Type: graphql.Float}, + "output_tokens": &graphql.Field{Type: graphql.Float}, + "search_units": &graphql.Field{Type: graphql.Float}, + "classifications": &graphql.Field{Type: graphql.Float}, + }, + })}, + "tokens": &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sTokensFields", prefix, Name), + Fields: graphql.Fields{ + "input_tokens": &graphql.Field{Type: graphql.Float}, + "output_tokens": &graphql.Field{Type: graphql.Float}, + }, + })}, + "warnings": &graphql.Field{Type: graphql.NewList(graphql.String)}, + }, + })}, + }, + })} +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-cohere/parameters/params.go b/platform/dbops/binaries/weaviate-src/modules/generative-cohere/parameters/params.go new file mode 100644 index 0000000000000000000000000000000000000000..ea45c121483ae190e7984af075b3eec15cad0009 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-cohere/parameters/params.go @@ -0,0 +1,61 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/usecases/modulecomponents/gqlparser" +) + +type Params struct { + BaseURL string + Model string + Temperature *float64 + MaxTokens *int + K *int + P *float64 + StopSequences []string + FrequencyPenalty *float64 + PresencePenalty *float64 +} + +func extract(field *ast.ObjectField) interface{} { + out := Params{} + fields, ok := field.Value.GetValue().([]*ast.ObjectField) + if ok { + for _, f := range fields { + switch f.Name.Value { + case "baseURL": + out.BaseURL = gqlparser.GetValueAsStringOrEmpty(f) + case "model": + out.Model = gqlparser.GetValueAsStringOrEmpty(f) + case "temperature": + out.Temperature = gqlparser.GetValueAsFloat64(f) + case "maxTokens": + out.MaxTokens = gqlparser.GetValueAsInt(f) + case "k": + out.K = gqlparser.GetValueAsInt(f) + case "P": + out.P = gqlparser.GetValueAsFloat64(f) + case "stopSequences": + out.StopSequences = gqlparser.GetValueAsStringArray(f) + case "frequencyPenalty": + out.FrequencyPenalty = gqlparser.GetValueAsFloat64(f) + case "presencePenalty": + out.PresencePenalty = gqlparser.GetValueAsFloat64(f) + default: + // do nothing + } + } + } + return out +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-cohere/parameters/provider.go b/platform/dbops/binaries/weaviate-src/modules/generative-cohere/parameters/provider.go new file mode 100644 index 0000000000000000000000000000000000000000..a9db7e16e1c21cdd6664547835bd5c9ea0d3d493 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-cohere/parameters/provider.go @@ -0,0 +1,22 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import "github.com/weaviate/weaviate/entities/modulecapabilities" + +const Name = "cohere" + +func AdditionalGenerativeParameters(client modulecapabilities.GenerativeClient) map[string]modulecapabilities.GenerativeProperty { + return map[string]modulecapabilities.GenerativeProperty{ + Name: {Client: client, RequestParamsFunction: input, ResponseParamsFunction: output, ExtractRequestParamsFunction: extract}, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-databricks/clients/databricks.go b/platform/dbops/binaries/weaviate-src/modules/generative-databricks/clients/databricks.go new file mode 100644 index 0000000000000000000000000000000000000000..243388ba2abb6701c73d8eb348a290a89b1c909e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-databricks/clients/databricks.go @@ -0,0 +1,343 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "strconv" + "strings" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + generativecomponents "github.com/weaviate/weaviate/usecases/modulecomponents/generative" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/generative-databricks/config" + databricksparams "github.com/weaviate/weaviate/modules/generative-databricks/parameters" +) + +func buildEndpointFn(endpoint string) (string, error) { + if endpoint == "" { + return "", fmt.Errorf("endpoint cannot be empty") + } + return endpoint, nil +} + +type databricks struct { + databricksToken string + buildEndpoint func(endpoint string) (string, error) + httpClient *http.Client + logger logrus.FieldLogger +} + +func New(databricksToken string, timeout time.Duration, logger logrus.FieldLogger) *databricks { + return &databricks{ + databricksToken: databricksToken, + httpClient: &http.Client{ + Timeout: timeout, + }, + buildEndpoint: buildEndpointFn, + logger: logger, + } +} + +func (v *databricks) GenerateSingleResult(ctx context.Context, properties *modulecapabilities.GenerateProperties, prompt string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + forPrompt, err := generativecomponents.MakeSinglePrompt(generativecomponents.Text(properties), prompt) + if err != nil { + return nil, err + } + return v.Generate(ctx, cfg, forPrompt, options, debug) +} + +func (v *databricks) GenerateAllResults(ctx context.Context, properties []*modulecapabilities.GenerateProperties, task string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + forTask, err := generativecomponents.MakeTaskPrompt(generativecomponents.Texts(properties), task) + if err != nil { + return nil, err + } + return v.Generate(ctx, cfg, forTask, options, debug) +} + +func (v *databricks) Generate(ctx context.Context, cfg moduletools.ClassConfig, prompt string, options interface{}, debug bool) (*modulecapabilities.GenerateResponse, error) { + params := v.getParameters(cfg, options) + debugInformation := v.getDebugInformation(debug, prompt) + + oaiUrl, err := v.buildDatabricksEndpoint(ctx, params.Endpoint) + if err != nil { + return nil, errors.Wrap(err, "url join path") + } + + input, err := v.generateInput(prompt, params) + if err != nil { + return nil, errors.Wrap(err, "generate input") + } + + body, err := json.Marshal(input) + if err != nil { + return nil, errors.Wrap(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", oaiUrl, + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + apiKey, err := v.getApiKey(ctx) + if err != nil { + return nil, errors.Wrapf(err, "Databricks Token") + } + req.Header.Add(v.getApiKeyHeaderAndValue(apiKey)) + req.Header.Add("Content-Type", "application/json") + if userAgent := modulecomponents.GetValueFromContext(ctx, "X-Databricks-User-Agent"); userAgent != "" { + req.Header.Add("User-Agent", userAgent) + } + + res, err := v.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + var resBody generateResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if res.StatusCode != 200 || resBody.Error != nil { + return nil, v.getError(res.StatusCode, resBody.Error) + } + + responseParams := v.getResponseParams(resBody.Usage) + textResponse := resBody.Choices[0].Text + if len(resBody.Choices) > 0 && textResponse != "" { + trimmedResponse := strings.Trim(textResponse, "\n") + return &modulecapabilities.GenerateResponse{ + Result: &trimmedResponse, + Debug: debugInformation, + Params: responseParams, + }, nil + } + + message := resBody.Choices[0].Message + if message != nil { + textResponse = message.Content + trimmedResponse := strings.Trim(textResponse, "\n") + return &modulecapabilities.GenerateResponse{ + Result: &trimmedResponse, + Debug: debugInformation, + Params: responseParams, + }, nil + } + + return &modulecapabilities.GenerateResponse{ + Result: nil, + Debug: debugInformation, + }, nil +} + +func (v *databricks) getParameters(cfg moduletools.ClassConfig, options interface{}) databricksparams.Params { + settings := config.NewClassSettings(cfg) + + var params databricksparams.Params + if p, ok := options.(databricksparams.Params); ok { + params = p + } + + if params.Endpoint == "" { + params.Endpoint = settings.Endpoint() + } + if params.Temperature == nil { + temperature := settings.Temperature() + params.Temperature = &temperature + } + if params.TopP == nil { + topP := settings.TopP() + params.TopP = &topP + } + if params.MaxTokens == nil { + maxTokens := settings.MaxTokens() + params.MaxTokens = maxTokens + } + + return params +} + +func (v *databricks) getDebugInformation(debug bool, prompt string) *modulecapabilities.GenerateDebugInformation { + if debug { + return &modulecapabilities.GenerateDebugInformation{ + Prompt: prompt, + } + } + return nil +} + +func (v *databricks) getResponseParams(usage *usage) map[string]interface{} { + if usage != nil { + return map[string]interface{}{databricksparams.Name: map[string]interface{}{"usage": usage}} + } + return nil +} + +func GetResponseParams(result map[string]interface{}) *responseParams { + if params, ok := result[databricksparams.Name].(map[string]interface{}); ok { + if usage, ok := params["usage"].(*usage); ok { + return &responseParams{Usage: usage} + } + } + return nil +} + +func (v *databricks) buildDatabricksEndpoint(ctx context.Context, endpoint string) (string, error) { + if headerEndpoint := modulecomponents.GetValueFromContext(ctx, "X-Databricks-Endpoint"); headerEndpoint != "" { + return headerEndpoint, nil + } + return v.buildEndpoint(endpoint) +} + +func (v *databricks) generateInput(prompt string, params databricksparams.Params) (generateInput, error) { + var input generateInput + messages := []message{{ + Role: "user", + Content: prompt, + }} + + input = generateInput{ + Messages: messages, + Stream: false, + Logprobs: params.Logprobs, + TopLogprobs: params.TopLogprobs, + MaxTokens: params.MaxTokens, + N: params.N, + Stop: params.Stop, + Temperature: params.Temperature, + TopP: params.TopP, + } + + return input, nil +} + +func (v *databricks) getError(statusCode int, resBodyError *databricksApiError) error { + endpoint := "Databricks Foundation Model API" + if resBodyError != nil { + return fmt.Errorf("connection to: %s failed with status: %d error: %v", endpoint, statusCode, resBodyError.Message) + } + return fmt.Errorf("connection to: %s failed with status: %d", endpoint, statusCode) +} + +func (v *databricks) getApiKeyHeaderAndValue(apiKey string) (string, string) { + return "Authorization", fmt.Sprintf("Bearer %s", apiKey) +} + +func (v *databricks) getApiKey(ctx context.Context) (string, error) { + var apiKey, envVarValue, envVar string + + apiKey = "X-Databricks-Token" + envVar = "DATABRICKS_TOKEN" + envVarValue = v.databricksToken + + return v.getApiKeyFromContext(ctx, apiKey, envVarValue, envVar) +} + +func (v *databricks) getApiKeyFromContext(ctx context.Context, apiKey, envVarValue, envVar string) (string, error) { + if apiKeyValue := modulecomponents.GetValueFromContext(ctx, apiKey); apiKeyValue != "" { + return apiKeyValue, nil + } + if envVarValue != "" { + return envVarValue, nil + } + return "", fmt.Errorf("no api key found neither in request header: %s nor in environment variable under %s", apiKey, envVar) +} + +type generateInput struct { + Prompt string `json:"prompt,omitempty"` + Messages []message `json:"messages,omitempty"` + Stream bool `json:"stream,omitempty"` + Model string `json:"model,omitempty"` + Logprobs *bool `json:"logprobs,omitempty"` + TopLogprobs *int `json:"top_logprobs,omitempty"` + MaxTokens *int `json:"max_tokens,omitempty"` + N *int `json:"n,omitempty"` + Stop []string `json:"stop,omitempty"` + Temperature *float64 `json:"temperature,omitempty"` + TopP *float64 `json:"top_p,omitempty"` +} + +type message struct { + Role string `json:"role"` + Content string `json:"content"` + Name string `json:"name,omitempty"` +} + +type generateResponse struct { + Choices []choice + Usage *usage `json:"usage,omitempty"` + Error *databricksApiError `json:"error,omitempty"` +} + +type choice struct { + FinishReason string + Index float32 + Logprobs string + Text string `json:"text,omitempty"` + Message *message `json:"message,omitempty"` +} + +type databricksApiError struct { + Message string `json:"message"` + ErrorCode databricksCode `json:"error_code"` +} + +type usage struct { + PromptTokens *int `json:"prompt_tokens,omitempty"` + CompletionTokens *int `json:"completion_tokens,omitempty"` + TotalTokens *int `json:"total_tokens,omitempty"` +} + +type databricksCode string + +func (c *databricksCode) String() string { + if c == nil { + return "" + } + return string(*c) +} + +func (c *databricksCode) UnmarshalJSON(data []byte) (err error) { + if number, err := strconv.Atoi(string(data)); err == nil { + str := strconv.Itoa(number) + *c = databricksCode(str) + return nil + } + var str string + err = json.Unmarshal(data, &str) + if err != nil { + return err + } + *c = databricksCode(str) + return nil +} + +type responseParams struct { + Usage *usage `json:"usage,omitempty"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-databricks/clients/databricks_meta.go b/platform/dbops/binaries/weaviate-src/modules/generative-databricks/clients/databricks_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..614a13e27bbf575b83e703febd77fe03674247eb --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-databricks/clients/databricks_meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *databricks) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Generative Search - Databricks", + "documentationHref": "https://docs.databricks.com/en/machine-learning/foundation-models/api-reference.html#completion-task", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-databricks/clients/databricks_meta_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-databricks/clients/databricks_meta_test.go new file mode 100644 index 0000000000000000000000000000000000000000..91224a320a58f33ad3b9933cba4f1db5c6a5f892 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-databricks/clients/databricks_meta_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestGetMeta(t *testing.T) { + t.Run("when the server is providing meta", func(t *testing.T) { + server := httptest.NewServer(&testMetaHandler{t: t}) + defer server.Close() + c := New("", 0, nullLogger()) + meta, err := c.MetaInfo() + + assert.Nil(t, err) + assert.NotNil(t, meta) + metaModel := meta["name"] + assert.True(t, metaModel != nil) + documentationHref := meta["documentationHref"] + assert.True(t, documentationHref != nil) + }) +} + +type testMetaHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testMetaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/meta", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.Write([]byte(f.metaInfo())) +} + +func (f *testMetaHandler) metaInfo() string { + return `{ + "hostname": "http://127.0.0.1:8080", + "modules": { + "generative-databricks": { + "documentationHref": "https://docs.databricks.com/en/machine-learning/foundation-models/api-reference.html#completion-task", + "name": "Generative Search - Databricks" + } + }, + "version": "1.16.0" +}` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-databricks/clients/databricks_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-databricks/clients/databricks_test.go new file mode 100644 index 0000000000000000000000000000000000000000..45d434ff124aea504faadf5824d3c8e759976ef3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-databricks/clients/databricks_test.go @@ -0,0 +1,198 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/modulecapabilities" +) + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} + +func fakeBuildUrl(serverURL string) (string, error) { + return serverURL, nil +} + +func TestGetAnswer(t *testing.T) { + props := []*modulecapabilities.GenerateProperties{{Text: map[string]string{"prop": "My name is john"}}} + t.Run("when the server has a successful answer ", func(t *testing.T) { + handler := &testAnswerHandler{ + t: t, + answer: generateResponse{ + Choices: []choice{{ + FinishReason: "test", + Index: 0, + Logprobs: "", + Text: "John", + }}, + Error: nil, + }, + } + server := httptest.NewServer(handler) + defer server.Close() + + c := New("databricksToken", 0, nullLogger()) + c.buildEndpoint = func(endpoint string) (string, error) { + return fakeBuildUrl(server.URL) + } + + expected := modulecapabilities.GenerateResponse{ + Result: ptString("John"), + } + + res, err := c.GenerateAllResults(context.Background(), props, "What is my name?", nil, false, nil) + + assert.Nil(t, err) + assert.Equal(t, expected, *res) + }) + + t.Run("when X-Databricks-Endpoint header is passed", func(t *testing.T) { + c := New("databricksToken", 0, nullLogger()) + + endpoint := "http://default" + ctxWithValue := context.WithValue(context.Background(), + "X-Databricks-Endpoint", []string{"http://base-url-passed-in-header.com"}) + + buildURL, err := c.buildDatabricksEndpoint(ctxWithValue, endpoint) + require.NoError(t, err) + assert.Equal(t, "http://base-url-passed-in-header.com", buildURL) + + buildURL, err = c.buildDatabricksEndpoint(context.TODO(), endpoint) + require.NoError(t, err) + assert.Equal(t, "http://default", buildURL) + + buildURL, err = c.buildDatabricksEndpoint(context.TODO(), "") + require.Error(t, err) + assert.ErrorContains(t, err, "endpoint cannot be empty") + assert.Equal(t, "", buildURL) + }) + + t.Run("when X-Databricks-User-Agent header is passed", func(t *testing.T) { + userAgent := "weaviate+spark_connector" + handler := &testAnswerHandler{ + t: t, + answer: generateResponse{ + Choices: []choice{{ + FinishReason: "test", + Index: 0, + Logprobs: "", + Text: "John", + }}, + Error: nil, + }, + userAgent: userAgent, + } + server := httptest.NewServer(handler) + defer server.Close() + + c := New("databricksToken", 0, nullLogger()) + c.buildEndpoint = func(endpoint string) (string, error) { + return fakeBuildUrl(server.URL) + } + + expected := modulecapabilities.GenerateResponse{ + Result: ptString("John"), + } + + ctxWithValue := context.WithValue(context.Background(), + "X-Databricks-User-Agent", []string{userAgent}) + + res, err := c.GenerateAllResults(ctxWithValue, props, "What is my name?", nil, false, nil) + + assert.Nil(t, err) + assert.Equal(t, expected, *res) + }) +} + +func TestDatabricksApiErrorDecode(t *testing.T) { + t.Run("getModelStringQuery", func(t *testing.T) { + type args struct { + response []byte + } + tests := []struct { + name string + args args + want string + }{ + { + name: "Error code as string text", + args: args{ + response: []byte(`{"message": "Human-readable error message.", "error_code": "Error code"}`), + }, + want: "Error code", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var got *databricksApiError + err := json.Unmarshal(tt.args.response, &got) + require.NoError(t, err) + + if got.ErrorCode.String() != tt.want { + t.Errorf("databricksApiError.ErrorCode = %v, want %v", got.ErrorCode, tt.want) + } + }) + } + }) +} + +type testAnswerHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + answer generateResponse + userAgent string +} + +func (f *testAnswerHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.userAgent != "" { + assert.Equal(f.t, f.userAgent, r.UserAgent()) + } + if f.answer.Error != nil && f.answer.Error.Message != "" { + outBytes, err := json.Marshal(f.answer) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b map[string]interface{} + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + outBytes, err := json.Marshal(f.answer) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +func ptString(in string) *string { + return &in +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-databricks/config.go b/platform/dbops/binaries/weaviate-src/modules/generative-databricks/config.go new file mode 100644 index 0000000000000000000000000000000000000000..bf1fb4770bd20b279bd4069ae4edaee030b76252 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-databricks/config.go @@ -0,0 +1,41 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativedatabricks + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/generative-databricks/config" +) + +func (m *GenerativeDatabricksModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeDatabricksModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeDatabricksModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := config.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-databricks/config/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/generative-databricks/config/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..c9514d847e76dccefae459ed2a49f3666e761a93 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-databricks/config/class_settings.go @@ -0,0 +1,118 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "math" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + temperatureProperty = "temperature" + maxTokensProperty = "maxTokens" + + topPProperty = "topP" + topKProperty = "topK" +) + +var ( + DefaultDatabricksTemperature = 1.0 + DefaultDatabricksTopP = 1.0 + DefaultDatabricksMaxTokens = 8192.0 + DefaultDatabricksTopK = math.MaxInt64 +) + +type ClassSettings interface { + MaxTokens() *int + Temperature() float64 + TopP() float64 + TopK() int + Validate(class *models.Class) error + Endpoint() string +} + +type classSettings struct { + cfg moduletools.ClassConfig + propertyValuesHelper basesettings.PropertyValuesHelper +} + +func NewClassSettings(cfg moduletools.ClassConfig) ClassSettings { + return &classSettings{cfg: cfg, propertyValuesHelper: basesettings.NewPropertyValuesHelper("generative-databricks")} +} + +func (ic *classSettings) Validate(class *models.Class) error { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + + temperature := ic.getFloatProperty(temperatureProperty, &DefaultDatabricksTemperature) + if temperature == nil || (*temperature < 0 || *temperature > 1) { + return errors.Errorf("Wrong temperature configuration, values are between 0.0 and 1.0") + } + + maxTokens := ic.getIntProperty(maxTokensProperty, nil) + if maxTokens != nil && *maxTokens <= 0 { + return errors.Errorf("Wrong maxTokens configuration, values should be greater than zero or nil") + } + + topP := ic.getFloatProperty(topPProperty, &DefaultDatabricksTopP) + if topP == nil || (*topP < 0 || *topP > 5) { + return errors.Errorf("Wrong topP configuration, values are should have a minimal value of 1 and max of 5") + } + + topK := ic.getIntProperty(topKProperty, &DefaultDatabricksTopK) + if topK != nil && (*topK <= 0) { + return errors.Errorf("Wrong topK configuration, values should be greater than zero or nil") + } + + return nil +} + +func (ic *classSettings) getStringProperty(name, defaultValue string) *string { + asString := ic.propertyValuesHelper.GetPropertyAsStringWithNotExists(ic.cfg, name, "", defaultValue) + return &asString +} + +func (ic *classSettings) getFloatProperty(name string, defaultValue *float64) *float64 { + wrongVal := float64(-1.0) + return ic.propertyValuesHelper.GetPropertyAsFloat64WithNotExists(ic.cfg, name, &wrongVal, defaultValue) +} + +func (ic *classSettings) getIntProperty(name string, defaultValue *int) *int { + wrongVal := -1 + return ic.propertyValuesHelper.GetPropertyAsIntWithNotExists(ic.cfg, name, &wrongVal, defaultValue) +} + +func (ic *classSettings) MaxTokens() *int { + return ic.getIntProperty(maxTokensProperty, nil) +} + +func (ic *classSettings) Temperature() float64 { + return *ic.getFloatProperty(temperatureProperty, &DefaultDatabricksTemperature) +} + +func (ic *classSettings) TopP() float64 { + return *ic.getFloatProperty(topPProperty, &DefaultDatabricksTopP) +} + +func (ic *classSettings) TopK() int { + return *ic.getIntProperty(topKProperty, &DefaultDatabricksTopK) +} + +func (ic *classSettings) Endpoint() string { + return *ic.getStringProperty("endpoint", "") +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-databricks/config/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-databricks/config/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..94716089d87a0c61bbfe7f4403186cc0d5ddaa5b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-databricks/config/class_settings_test.go @@ -0,0 +1,180 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "math" + "testing" + + // "github.com/pkg/errors" + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_classSettings_Validate(t *testing.T) { + maxTokens42 := 42 + + tests := []struct { + name string + cfg moduletools.ClassConfig + wantMaxTokens *int + wantTemperature float64 + wantTopP float64 + wantTopK int + wantEndpoint string + wantErr error + }{ + { + name: "default settings", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{}, + }, + wantMaxTokens: nil, + wantTemperature: 1.0, + wantTopP: 1, + wantTopK: math.MaxInt64, + wantEndpoint: "", + wantErr: nil, + }, + { + name: "Happy flow", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "endpoint": "https://foo.bar.com", + }, + }, + wantMaxTokens: nil, + wantTemperature: 1.0, + wantTopP: 1, + wantTopK: math.MaxInt64, + wantEndpoint: "https://foo.bar.com", + wantErr: nil, + }, + { + name: "Everything non default configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "endpoint": "https://foo.bar.com", + "maxTokens": 42, + "temperature": 0.5, + "topP": 3, + "topK": 1, + }, + }, + wantMaxTokens: &maxTokens42, + wantTemperature: 0.5, + wantTopP: 3, + wantTopK: 1, + wantEndpoint: "https://foo.bar.com", + wantErr: nil, + }, + { + name: "Wrong maxTokens configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "endpoint": "https://foo.bar.com", + "maxTokens": true, + }, + }, + wantErr: errors.Errorf("Wrong maxTokens configuration, values should be greater than zero or nil"), + }, + { + name: "Wrong temperature configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "endpoint": "https://foo.bar.com", + "temperature": true, + }, + }, + wantErr: errors.Errorf("Wrong temperature configuration, values are between 0.0 and 1.0"), + }, + { + name: "Wrong topP configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "endpoint": "https://foo.bar.com", + "topP": true, + }, + }, + wantErr: errors.Errorf("Wrong topP configuration, values are should have a minimal value of 1 and max of 5"), + }, + { + name: "Wrong topK configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "endpoint": "https://foo.bar.com", + "topK": true, + }, + }, + wantErr: errors.Errorf("Wrong topK configuration, values should be greater than zero or nil"), + }, + { + name: "Empty endpoint", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "endpoint": "", + }, + }, + wantErr: errors.Errorf("endpoint cannot be empty"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.cfg) + if tt.wantErr != nil { + assert.Error(t, tt.wantErr, ic.Validate(nil)) + } else { + assert.NoError(t, ic.Validate(nil)) + assert.Equal(t, tt.wantMaxTokens, ic.MaxTokens()) + assert.Equal(t, tt.wantTemperature, ic.Temperature()) + assert.Equal(t, tt.wantTopP, ic.TopP()) + assert.Equal(t, tt.wantTopK, ic.TopK()) + assert.Equal(t, tt.wantEndpoint, ic.Endpoint()) + } + }) + } +} + +type fakeClassConfig struct { + classConfig map[string]interface{} +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-databricks/module.go b/platform/dbops/binaries/weaviate-src/modules/generative-databricks/module.go new file mode 100644 index 0000000000000000000000000000000000000000..b930e2e16616ad0cca5f39cf0c048d337897412d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-databricks/module.go @@ -0,0 +1,85 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativedatabricks + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/generative-databricks/clients" + "github.com/weaviate/weaviate/modules/generative-databricks/parameters" +) + +const Name = "generative-databricks" + +func New() *GenerativeDatabricksModule { + return &GenerativeDatabricksModule{} +} + +type GenerativeDatabricksModule struct { + generative generativeClient + additionalPropertiesProvider map[string]modulecapabilities.GenerativeProperty +} + +type generativeClient interface { + modulecapabilities.GenerativeClient + MetaInfo() (map[string]interface{}, error) +} + +func (m *GenerativeDatabricksModule) Name() string { + return Name +} + +func (m *GenerativeDatabricksModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextGenerative +} + +func (m *GenerativeDatabricksModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrapf(err, "init %s", Name) + } + return nil +} + +func (m *GenerativeDatabricksModule) initAdditional(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + databricksToken := os.Getenv("DATABRICKS_TOKEN") + + client := clients.New(databricksToken, timeout, logger) + m.generative = client + m.additionalPropertiesProvider = parameters.AdditionalGenerativeParameters(m.generative) + + return nil +} + +func (m *GenerativeDatabricksModule) MetaInfo() (map[string]interface{}, error) { + return m.generative.MetaInfo() +} + +func (m *GenerativeDatabricksModule) AdditionalGenerativeProperties() map[string]modulecapabilities.GenerativeProperty { + return m.additionalPropertiesProvider +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.AdditionalGenerativeProperties(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-databricks/parameters/graphql.go b/platform/dbops/binaries/weaviate-src/modules/generative-databricks/parameters/graphql.go new file mode 100644 index 0000000000000000000000000000000000000000..1955a5fdf450075b6cea6062ec38bd072d03a3f8 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-databricks/parameters/graphql.go @@ -0,0 +1,86 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "fmt" + + "github.com/tailor-inc/graphql" +) + +func input(prefix string) *graphql.InputObjectFieldConfig { + return &graphql.InputObjectFieldConfig{ + Description: fmt.Sprintf("%s settings", Name), + Type: graphql.NewInputObject(graphql.InputObjectConfig{ + Name: fmt.Sprintf("%s%sInputObject", prefix, Name), + Fields: graphql.InputObjectConfigFieldMap{ + "model": &graphql.InputObjectFieldConfig{ + Description: "model", + Type: graphql.String, + }, + "frequencyPenalty": &graphql.InputObjectFieldConfig{ + Description: "frequencyPenalty", + Type: graphql.Float, + }, + "logprobs": &graphql.InputObjectFieldConfig{ + Description: "logprobs", + Type: graphql.Boolean, + }, + "topLogprobs": &graphql.InputObjectFieldConfig{ + Description: "topLogprobs", + Type: graphql.Int, + }, + "maxTokens": &graphql.InputObjectFieldConfig{ + Description: "maxTokens", + Type: graphql.Int, + }, + "n": &graphql.InputObjectFieldConfig{ + Description: "n", + Type: graphql.Int, + }, + "presencePenalty": &graphql.InputObjectFieldConfig{ + Description: "presencePenalty", + Type: graphql.Float, + }, + "stop": &graphql.InputObjectFieldConfig{ + Description: "stop", + Type: graphql.NewList(graphql.String), + }, + "temperature": &graphql.InputObjectFieldConfig{ + Description: "temperature", + Type: graphql.Float, + }, + "topP": &graphql.InputObjectFieldConfig{ + Description: "topP", + Type: graphql.Float, + }, + }, + }), + DefaultValue: nil, + } +} + +func output(prefix string) *graphql.Field { + return &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sFields", prefix, Name), + Fields: graphql.Fields{ + "usage": &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sUsageMetadataFields", prefix, Name), + Fields: graphql.Fields{ + "prompt_tokens": &graphql.Field{Type: graphql.Int}, + "completion_tokens": &graphql.Field{Type: graphql.Int}, + "total_tokens": &graphql.Field{Type: graphql.Int}, + }, + })}, + }, + })} +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-databricks/parameters/params.go b/platform/dbops/binaries/weaviate-src/modules/generative-databricks/parameters/params.go new file mode 100644 index 0000000000000000000000000000000000000000..a3dac905535a1b01aee5da720335d88bcf777320 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-databricks/parameters/params.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/usecases/modulecomponents/gqlparser" +) + +type Params struct { + Endpoint string + Model string + FrequencyPenalty *float64 + Logprobs *bool + TopLogprobs *int + MaxTokens *int + N *int + PresencePenalty *float64 + Stop []string + Temperature *float64 + TopP *float64 +} + +func extract(field *ast.ObjectField) interface{} { + out := Params{} + fields, ok := field.Value.GetValue().([]*ast.ObjectField) + if ok { + for _, f := range fields { + switch f.Name.Value { + case "Endpoint": + out.Endpoint = gqlparser.GetValueAsStringOrEmpty(f) + case "model": + out.Model = gqlparser.GetValueAsStringOrEmpty(f) + case "frequencyPenalty": + out.FrequencyPenalty = gqlparser.GetValueAsFloat64(f) + case "logprobs": + out.Logprobs = gqlparser.GetValueAsBool(f) + case "topLogprobs": + out.TopLogprobs = gqlparser.GetValueAsInt(f) + case "maxTokens": + out.MaxTokens = gqlparser.GetValueAsInt(f) + case "n": + out.N = gqlparser.GetValueAsInt(f) + case "presencePenalty": + out.PresencePenalty = gqlparser.GetValueAsFloat64(f) + case "stop": + out.Stop = gqlparser.GetValueAsStringArray(f) + case "temperature": + out.Temperature = gqlparser.GetValueAsFloat64(f) + case "topP": + out.TopP = gqlparser.GetValueAsFloat64(f) + default: + // do nothing + } + } + } + return out +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-databricks/parameters/provider.go b/platform/dbops/binaries/weaviate-src/modules/generative-databricks/parameters/provider.go new file mode 100644 index 0000000000000000000000000000000000000000..d752771b74bfba8385019aa20296838113ef0cef --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-databricks/parameters/provider.go @@ -0,0 +1,22 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import "github.com/weaviate/weaviate/entities/modulecapabilities" + +const Name = "databricks" + +func AdditionalGenerativeParameters(client modulecapabilities.GenerativeClient) map[string]modulecapabilities.GenerativeProperty { + return map[string]modulecapabilities.GenerativeProperty{ + Name: {Client: client, RequestParamsFunction: input, ResponseParamsFunction: output, ExtractRequestParamsFunction: extract}, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-dummy/clients/dummy.go b/platform/dbops/binaries/weaviate-src/modules/generative-dummy/clients/dummy.go new file mode 100644 index 0000000000000000000000000000000000000000..c0dd6352a7fe024adabb7c5c3d288ea7960d5daf --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-dummy/clients/dummy.go @@ -0,0 +1,60 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "fmt" + + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/usecases/modulecomponents/generative" +) + +type dummy struct { + logger logrus.FieldLogger +} + +func New(logger logrus.FieldLogger) *dummy { + return &dummy{ + logger: logger, + } +} + +func (v *dummy) GenerateSingleResult(ctx context.Context, properties *modulecapabilities.GenerateProperties, prompt string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + forPrompt, err := generative.MakeSinglePrompt(generative.Text(properties), prompt) + if err != nil { + return nil, err + } + return v.Generate(ctx, cfg, forPrompt, options, debug) +} + +func (v *dummy) GenerateAllResults(ctx context.Context, properties []*modulecapabilities.GenerateProperties, task string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + forTask, err := generative.MakeTaskPrompt(generative.Texts(properties), task) + if err != nil { + return nil, err + } + return v.Generate(ctx, cfg, forTask, options, debug) +} + +func (v *dummy) Generate(ctx context.Context, cfg moduletools.ClassConfig, prompt string, options interface{}, debug bool) (*modulecapabilities.GenerateResponse, error) { + cls := cfg.ClassByModuleName("generative-dummy") + settings := "" + for key, val := range cls { + settings += fmt.Sprintf("%s=%s,", key, val) + } + result := "You want me to generate something based on the following prompt: " + prompt + ". With settings: " + settings + ". I'm sorry, I'm just a dummy and can't generate anything." + return &modulecapabilities.GenerateResponse{ + Result: &result, + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-dummy/clients/dummy_meta.go b/platform/dbops/binaries/weaviate-src/modules/generative-dummy/clients/dummy_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..30ef42338f2d902cc4ff140a5254e7984319afba --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-dummy/clients/dummy_meta.go @@ -0,0 +1,18 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *dummy) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Generative Search - Dummy", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-dummy/config.go b/platform/dbops/binaries/weaviate-src/modules/generative-dummy/config.go new file mode 100644 index 0000000000000000000000000000000000000000..54d52d7a81bc9d05e5eaa6c83f3542aff9e70a8e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-dummy/config.go @@ -0,0 +1,39 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativedummy + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *GenerativeDummyModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeDummyModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeDummyModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + return nil +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-dummy/module.go b/platform/dbops/binaries/weaviate-src/modules/generative-dummy/module.go new file mode 100644 index 0000000000000000000000000000000000000000..b752244ef59a0f97701a7b372e2fa4bc3efb4dd4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-dummy/module.go @@ -0,0 +1,77 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativedummy + +import ( + "context" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/generative-dummy/clients" +) + +const Name = "generative-dummy" + +func New() *GenerativeDummyModule { + return &GenerativeDummyModule{} +} + +type GenerativeDummyModule struct { + generative generativeClient +} + +type generativeClient interface { + modulecapabilities.GenerativeClient + MetaInfo() (map[string]interface{}, error) +} + +func (m *GenerativeDummyModule) Name() string { + return Name +} + +func (m *GenerativeDummyModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextGenerative +} + +func (m *GenerativeDummyModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrapf(err, "init %s", Name) + } + return nil +} + +func (m *GenerativeDummyModule) initAdditional(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + m.generative = clients.New(logger) + return nil +} + +func (m *GenerativeDummyModule) MetaInfo() (map[string]interface{}, error) { + return m.generative.MetaInfo() +} + +func (m *GenerativeDummyModule) AdditionalGenerativeProperties() map[string]modulecapabilities.GenerativeProperty { + return map[string]modulecapabilities.GenerativeProperty{"dummy": {Client: m.generative}} +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.AdditionalGenerativeProperties(New()) + _ = modulecapabilities.MetaProvider(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/clients/friendliai.go b/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/clients/friendliai.go new file mode 100644 index 0000000000000000000000000000000000000000..5ab03dee2f83792008bdcdc148ef9cb676ecde2e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/clients/friendliai.go @@ -0,0 +1,242 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + "github.com/weaviate/weaviate/usecases/modulecomponents/generative" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/generative-friendliai/config" + friendliparams "github.com/weaviate/weaviate/modules/generative-friendliai/parameters" +) + +type friendliai struct { + apiKey string + httpClient *http.Client + logger logrus.FieldLogger +} + +func New(apiKey string, timeout time.Duration, logger logrus.FieldLogger) *friendliai { + return &friendliai{ + apiKey: apiKey, + httpClient: &http.Client{ + Timeout: timeout, + }, + logger: logger, + } +} + +func (v *friendliai) GenerateSingleResult(ctx context.Context, properties *modulecapabilities.GenerateProperties, prompt string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + forPrompt, err := generative.MakeSinglePrompt(generative.Text(properties), prompt) + if err != nil { + return nil, err + } + return v.Generate(ctx, cfg, forPrompt, options, debug) +} + +func (v *friendliai) GenerateAllResults(ctx context.Context, properties []*modulecapabilities.GenerateProperties, task string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + forTask, err := generative.MakeTaskPrompt(generative.Texts(properties), task) + if err != nil { + return nil, err + } + return v.Generate(ctx, cfg, forTask, options, debug) +} + +func (v *friendliai) Generate(ctx context.Context, cfg moduletools.ClassConfig, prompt string, options interface{}, debug bool) (*modulecapabilities.GenerateResponse, error) { + params := v.getParameters(cfg, options) + debugInformation := v.getDebugInformation(debug, prompt) + + friendliUrl := v.getFriendliUrl(ctx, params.BaseURL) + friendliPrompt := []map[string]string{ + {"role": "user", "content": prompt}, + } + + input := generateInput{ + Messages: friendliPrompt, + Model: params.Model, + MaxTokens: params.MaxTokens, + Temperature: params.Temperature, + } + + body, err := json.Marshal(input) + if err != nil { + return nil, errors.Wrap(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", friendliUrl, + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + apiKey, err := v.getApiKey(ctx) + if err != nil { + return nil, errors.Wrapf(err, "FriendliAI API Key") + } + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", apiKey)) + req.Header.Add("Content-Type", "application/json") + + res, err := v.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + var resBody generateResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if res.StatusCode == 404 { + return nil, errors.Errorf("Wrong model name, you can check available FriendliAI models from https://docs.friendli.ai/guides/serverless_endpoints/openai_compatibility#model-supports") + } + + if res.StatusCode != 200 || resBody.Error != nil { + if resBody.Error != nil { + return nil, errors.Errorf("connection to FriendliAI API failed with status: %d error: %v", res.StatusCode, resBody.Error.Message) + } + return nil, errors.Errorf("connection to FriendliAI API failed with status: %d", res.StatusCode) + } + + textResponse := resBody.Choices[0].Message.Content + + return &modulecapabilities.GenerateResponse{ + Result: &textResponse, + Debug: debugInformation, + Params: v.getResponseParams(resBody.Usage), + }, nil +} + +func (v *friendliai) getParameters(cfg moduletools.ClassConfig, options interface{}) friendliparams.Params { + settings := config.NewClassSettings(cfg) + + var params friendliparams.Params + if p, ok := options.(friendliparams.Params); ok { + params = p + } + if params.BaseURL == "" { + params.BaseURL = settings.BaseURL() + } + if params.Model == "" { + params.Model = settings.Model() + } + if params.Temperature == nil { + temperature := settings.Temperature() + params.Temperature = &temperature + } + if params.MaxTokens == nil { + maxTokens := settings.MaxTokens() + params.MaxTokens = &maxTokens + } + return params +} + +func (v *friendliai) getDebugInformation(debug bool, prompt string) *modulecapabilities.GenerateDebugInformation { + if debug { + return &modulecapabilities.GenerateDebugInformation{ + Prompt: prompt, + } + } + return nil +} + +func (v *friendliai) getResponseParams(usage *usage) map[string]interface{} { + if usage != nil { + return map[string]interface{}{friendliparams.Name: map[string]interface{}{"usage": usage}} + } + return nil +} + +func GetResponseParams(result map[string]interface{}) *responseParams { + if params, ok := result[friendliparams.Name].(map[string]interface{}); ok { + if usage, ok := params["usage"].(*usage); ok { + return &responseParams{Usage: usage} + } + } + return nil +} + +func (v *friendliai) getFriendliUrl(ctx context.Context, baseURL string) string { + passedBaseURL := baseURL + if headerBaseURL := modulecomponents.GetValueFromContext(ctx, "X-Friendli-Baseurl"); headerBaseURL != "" { + passedBaseURL = headerBaseURL + } + return fmt.Sprintf("%s/v1/chat/completions", passedBaseURL) +} + +func (v *friendliai) getApiKey(ctx context.Context) (string, error) { + if apiKey := modulecomponents.GetValueFromContext(ctx, "X-Friendli-Token"); apiKey != "" { + return apiKey, nil + } + if v.apiKey != "" { + return v.apiKey, nil + } + return "", errors.New("no api key found " + + "neither in request header: X-Friendli-Token " + + "nor in environment variable under FRIENDLI_TOKEN") +} + +type generateInput struct { + Model string `json:"model"` + Messages []map[string]string `json:"messages"` + MaxTokens *int `json:"max_tokens"` + Temperature *float64 `json:"temperature,omitempty"` +} + +type Message struct { + Role string `json:"role"` + Content string `json:"content"` +} + +type Choice struct { + Message Message `json:"message"` + Index int `json:"index"` + FinishReason string `json:"finish_reason"` +} + +type generateResponse struct { + Choices []Choice + Usage *usage `json:"usage,omitempty"` + Created int64 `json:"created"` + + Error *friendliApiError `json:"error,omitempty"` +} + +type usage struct { + PromptTokens *int `json:"prompt_tokens,omitempty"` + CompletionTokens *int `json:"completion_tokens,omitempty"` + TotalTokens *int `json:"total_tokens,omitempty"` +} + +type friendliApiError struct { + Message string `json:"message"` +} + +type responseParams struct { + Usage *usage `json:"usage,omitempty"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/clients/friendliai_meta.go b/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/clients/friendliai_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..2a6ac19c43a566e55ecefa6e9561398d21716f34 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/clients/friendliai_meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *friendliai) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Generative Search - FriendliAI", + "documentationHref": "https://docs.friendli.ai/openapi/create-chat-completions", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/clients/friendliai_meta_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/clients/friendliai_meta_test.go new file mode 100644 index 0000000000000000000000000000000000000000..2fe55dd57a62e50a32c3d2762f2df9eb35eb4862 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/clients/friendliai_meta_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestGetMeta(t *testing.T) { + t.Run("when the server is providing meta", func(t *testing.T) { + server := httptest.NewServer(&testMetaHandler{t: t}) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + meta, err := c.MetaInfo() + + assert.Nil(t, err) + assert.NotNil(t, meta) + metaModel := meta["name"] + assert.True(t, metaModel != nil) + documentationHref := meta["documentationHref"] + assert.True(t, documentationHref != nil) + }) +} + +type testMetaHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testMetaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/meta", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.Write([]byte(f.metaInfo())) +} + +func (f *testMetaHandler) metaInfo() string { + return `{ + "hostname": "http://127.0.0.1:8080", + "modules": { + "generative-friendliai": { + "documentationHref": "URL_HERE", + "name": "FriendliAI Generative Module" + } + }, + "version": "1.1.0" +}` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/clients/friendliai_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/clients/friendliai_test.go new file mode 100644 index 0000000000000000000000000000000000000000..3c91615909fb3305877b667faa47b4ea9c6ac803 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/clients/friendliai_test.go @@ -0,0 +1,178 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} + +func TestGetAnswer(t *testing.T) { + props := []*modulecapabilities.GenerateProperties{{Text: map[string]string{"prop": "My name is john"}}} + + tests := []struct { + name string + answer generateResponse + timeout time.Duration + expectedResult string + }{ + { + name: "when the server has a successful answer", + answer: generateResponse{ + Choices: []Choice{ + { + Message: Message{ + Content: "John", + }, + }, + }, + Error: nil, + }, + expectedResult: "John", + }, + { + name: "when the server has an error", + answer: generateResponse{ + Error: &friendliApiError{ + Message: "some error from the server", + }, + }, + }, + { + name: "when the server does not respond in time", + answer: generateResponse{Error: &friendliApiError{Message: "context deadline exceeded"}}, + timeout: time.Second, + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + handler := &testAnswerHandler{ + t: t, + answer: test.answer, + timeout: test.timeout, + } + server := httptest.NewServer(handler) + defer server.Close() + + c := New("apiKey", test.timeout, nullLogger()) + + settings := &fakeClassConfig{baseURL: server.URL} + res, err := c.GenerateAllResults(context.Background(), props, "What is my name?", nil, false, settings) + + if test.answer.Error != nil { + assert.Contains(t, err.Error(), test.answer.Error.Message) + } else { + assert.Equal(t, test.expectedResult, *res.Result) + } + }) + } + t.Run("when X-Friendli-Baseurl header is passed", func(t *testing.T) { + c := New("apiKey", 5*time.Second, nullLogger()) + baseUrl := "https://inference.friendli.ai" + + ctxWithValue := context.WithValue(context.Background(), + "X-Friendli-Baseurl", []string{"https://inference.friendli.ai/dedicated"}) + buildURL := c.getFriendliUrl(ctxWithValue, baseUrl) + assert.Equal(t, "https://inference.friendli.ai/dedicated/v1/chat/completions", buildURL) + + buildURL = c.getFriendliUrl(context.Background(), baseUrl) + assert.Equal(t, "https://inference.friendli.ai/v1/chat/completions", buildURL) + }) +} + +type testAnswerHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + answer generateResponse + timeout time.Duration +} + +func (f *testAnswerHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/v1/chat/completions", r.URL.String()) + assert.Equal(f.t, http.MethodPost, r.Method) + + time.Sleep(f.timeout) + + if f.answer.Error != nil && f.answer.Error.Message != "" { + outBytes, err := json.Marshal(f.answer) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b map[string]interface{} + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + outBytes, err := json.Marshal(f.answer) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +type fakeClassConfig struct { + baseURL string +} + +func (cfg *fakeClassConfig) Tenant() string { + return "" +} + +func (cfg *fakeClassConfig) Class() map[string]interface{} { + return nil +} + +func (cfg *fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + settings := map[string]interface{}{ + "baseURL": cfg.baseURL, + } + return settings +} + +func (cfg *fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/config.go b/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/config.go new file mode 100644 index 0000000000000000000000000000000000000000..b7cac2a6375e8b8309c43bc2036b8566213f121e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/config.go @@ -0,0 +1,41 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativefriendliai + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/generative-friendliai/config" +) + +func (m *GenerativeFriendliAIModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeFriendliAIModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeFriendliAIModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := config.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/config/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/config/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..1bed44133da23bd4eca27bd827d21fe0127bc467 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/config/class_settings.go @@ -0,0 +1,91 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + baseURLProperty = "baseURL" + headerURLProperty = "X-Friendli-Baseurl" + modelProperty = "model" + temperatureProperty = "temperature" + maxTokensProperty = "maxTokens" +) + +// note it might not like this -- might want int values for e.g. MaxTokens +var ( + DefaultBaseURL = "https://inference.friendli.ai" + DefaultFriendliModel = "meta-llama-3.1-70b-instruct" + DefaultFriendliTemperature float64 = 0 + DefaultFriendliMaxTokens = 2048 +) + +type classSettings struct { + cfg moduletools.ClassConfig + propertyValuesHelper basesettings.PropertyValuesHelper +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, propertyValuesHelper: basesettings.NewPropertyValuesHelper("generative-friendliai")} +} + +func (ic *classSettings) Validate(class *models.Class) error { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + return nil +} + +func (ic *classSettings) getStringProperty(name, defaultValue string) *string { + asString := ic.propertyValuesHelper.GetPropertyAsStringWithNotExists(ic.cfg, name, "", defaultValue) + return &asString +} + +func (ic *classSettings) getIntProperty(name string, defaultValue *int) *int { + wrongVal := -1 + return ic.propertyValuesHelper.GetPropertyAsIntWithNotExists(ic.cfg, name, &wrongVal, defaultValue) +} + +func (ic *classSettings) getFloat64Property(name string, defaultValue *float64) *float64 { + wrongVal := float64(-1) + return ic.propertyValuesHelper.GetPropertyAsFloat64WithNotExists(ic.cfg, name, &wrongVal, defaultValue) +} + +func (ic *classSettings) GetMaxTokensForModel(model string) int { + return DefaultFriendliMaxTokens +} + +func (ic *classSettings) BaseURL() string { + baseURL := *ic.getStringProperty(baseURLProperty, DefaultBaseURL) + if headerURL := *ic.getStringProperty(headerURLProperty, ""); headerURL != "" { + baseURL = headerURL + } + return baseURL +} + +func (ic *classSettings) Model() string { + return *ic.getStringProperty(modelProperty, DefaultFriendliModel) +} + +func (ic *classSettings) MaxTokens() int { + return *ic.getIntProperty(maxTokensProperty, &DefaultFriendliMaxTokens) +} + +func (ic *classSettings) Temperature() float64 { + return *ic.getFloat64Property(temperatureProperty, &DefaultFriendliTemperature) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/config/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/config/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..00c487cf42f3bbe4e0fd603cd1d666dc181a99de --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/config/class_settings_test.go @@ -0,0 +1,133 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_classSettings_Validate(t *testing.T) { + tests := []struct { + name string + cfg moduletools.ClassConfig + wantModel string + wantMaxTokens int + wantTemperature float64 + wantBaseURL string + wantErr error + }{ + { + name: "default settings", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{}, + }, + wantModel: "meta-llama-3.1-70b-instruct", + wantMaxTokens: 2048, + wantTemperature: 0, + wantBaseURL: "https://inference.friendli.ai", + wantErr: nil, + }, + { + name: "everything non default configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "meta-llama-3.1-8b-instruct", + "maxTokens": 1024, + "temperature": 1, + }, + }, + wantModel: "meta-llama-3.1-8b-instruct", + wantMaxTokens: 1024, + wantTemperature: 1, + wantBaseURL: "https://inference.friendli.ai", + wantErr: nil, + }, + { + name: "newly supported serverless model", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "new-serverless-model", + }, + }, + wantModel: "new-serverless-model", + wantMaxTokens: 2048, + wantTemperature: 0, + wantBaseURL: "https://inference.friendli.ai", + wantErr: nil, + }, + { + name: "custom dedicated model", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "my-dedicated-model", + "X-Friendli-Baseurl": "https://inference.friendli.ai/dedicated", + }, + }, + wantModel: "my-dedicated-model", + wantMaxTokens: 2048, + wantTemperature: 0, + wantBaseURL: "https://inference.friendli.ai/dedicated", + wantErr: nil, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.cfg) + if tt.wantErr != nil { + assert.Equal(t, tt.wantErr.Error(), ic.Validate(nil).Error()) + } else { + assert.NoError(t, ic.Validate(nil)) + assert.Equal(t, tt.wantModel, ic.Model()) + assert.Equal(t, tt.wantMaxTokens, ic.MaxTokens()) + assert.Equal(t, tt.wantTemperature, ic.Temperature()) + assert.Equal(t, tt.wantBaseURL, ic.BaseURL()) + } + }) + } +} + +type fakeClassConfig struct { + classConfig map[string]interface{} +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/module.go b/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/module.go new file mode 100644 index 0000000000000000000000000000000000000000..30acce16f7aa8a01925a7ae19b0b5eb234ffcf09 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/module.go @@ -0,0 +1,86 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativefriendliai + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/generative-friendliai/clients" + "github.com/weaviate/weaviate/modules/generative-friendliai/parameters" +) + +const Name = "generative-friendliai" + +func New() *GenerativeFriendliAIModule { + return &GenerativeFriendliAIModule{} +} + +type GenerativeFriendliAIModule struct { + generative generativeClient + additionalPropertiesProvider map[string]modulecapabilities.GenerativeProperty +} + +type generativeClient interface { + modulecapabilities.GenerativeClient + MetaInfo() (map[string]interface{}, error) +} + +func (m *GenerativeFriendliAIModule) Name() string { + return Name +} + +func (m *GenerativeFriendliAIModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextGenerative +} + +func (m *GenerativeFriendliAIModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrapf(err, "init %s", Name) + } + + return nil +} + +func (m *GenerativeFriendliAIModule) initAdditional(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("FRIENDLI_TOKEN") + + client := clients.New(apiKey, timeout, logger) + m.generative = client + m.additionalPropertiesProvider = parameters.AdditionalGenerativeParameters(m.generative) + + return nil +} + +func (m *GenerativeFriendliAIModule) MetaInfo() (map[string]interface{}, error) { + return m.generative.MetaInfo() +} + +func (m *GenerativeFriendliAIModule) AdditionalGenerativeProperties() map[string]modulecapabilities.GenerativeProperty { + return m.additionalPropertiesProvider +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.AdditionalGenerativeProperties(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/parameters/graphql.go b/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/parameters/graphql.go new file mode 100644 index 0000000000000000000000000000000000000000..6b9def6846b767a01bdbed4dd61fb1bfcc173ed0 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/parameters/graphql.go @@ -0,0 +1,70 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "fmt" + + "github.com/tailor-inc/graphql" +) + +func input(prefix string) *graphql.InputObjectFieldConfig { + return &graphql.InputObjectFieldConfig{ + Description: fmt.Sprintf("%s settings", Name), + Type: graphql.NewInputObject(graphql.InputObjectConfig{ + Name: fmt.Sprintf("%s%sInputObject", prefix, Name), + Fields: graphql.InputObjectConfigFieldMap{ + "baseURL": &graphql.InputObjectFieldConfig{ + Description: "baseURL", + Type: graphql.String, + }, + "model": &graphql.InputObjectFieldConfig{ + Description: "model", + Type: graphql.String, + }, + "maxTokens": &graphql.InputObjectFieldConfig{ + Description: "maxTokens", + Type: graphql.Int, + }, + "temperature": &graphql.InputObjectFieldConfig{ + Description: "temperature", + Type: graphql.Float, + }, + "n": &graphql.InputObjectFieldConfig{ + Description: "n", + Type: graphql.Int, + }, + "topP": &graphql.InputObjectFieldConfig{ + Description: "topP", + Type: graphql.Float, + }, + }, + }), + DefaultValue: nil, + } +} + +func output(prefix string) *graphql.Field { + return &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sFields", prefix, Name), + Fields: graphql.Fields{ + "usage": &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sUsageMetadataFields", prefix, Name), + Fields: graphql.Fields{ + "prompt_tokens": &graphql.Field{Type: graphql.Int}, + "completion_tokens": &graphql.Field{Type: graphql.Int}, + "total_tokens": &graphql.Field{Type: graphql.Int}, + }, + })}, + }, + })} +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/parameters/params.go b/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/parameters/params.go new file mode 100644 index 0000000000000000000000000000000000000000..1e423ed9b987dccfc2d5592949c4191548575294 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/parameters/params.go @@ -0,0 +1,50 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/usecases/modulecomponents/gqlparser" +) + +type Params struct { + BaseURL string + Model string + MaxTokens *int + Temperature *float64 + N *int + TopP *float64 +} + +func extract(field *ast.ObjectField) interface{} { + out := Params{} + fields, ok := field.Value.GetValue().([]*ast.ObjectField) + if ok { + for _, f := range fields { + switch f.Name.Value { + case "model": + out.Model = gqlparser.GetValueAsStringOrEmpty(f) + case "maxTokens": + out.MaxTokens = gqlparser.GetValueAsInt(f) + case "temperature": + out.Temperature = gqlparser.GetValueAsFloat64(f) + case "n": + out.N = gqlparser.GetValueAsInt(f) + case "topP": + out.TopP = gqlparser.GetValueAsFloat64(f) + default: + // do nothing + } + } + } + return out +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/parameters/provider.go b/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/parameters/provider.go new file mode 100644 index 0000000000000000000000000000000000000000..41a0b85ad5584e6c68b5a6f4c0e16e18266b20c9 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-friendliai/parameters/provider.go @@ -0,0 +1,22 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import "github.com/weaviate/weaviate/entities/modulecapabilities" + +const Name = "friendliai" + +func AdditionalGenerativeParameters(client modulecapabilities.GenerativeClient) map[string]modulecapabilities.GenerativeProperty { + return map[string]modulecapabilities.GenerativeProperty{ + Name: {Client: client, RequestParamsFunction: input, ResponseParamsFunction: output, ExtractRequestParamsFunction: extract}, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-google/clients/google.go b/platform/dbops/binaries/weaviate-src/modules/generative-google/clients/google.go new file mode 100644 index 0000000000000000000000000000000000000000..97f1c90c2b1c54ac5190a71bd14e89acc60ee075 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-google/clients/google.go @@ -0,0 +1,731 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "strings" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/generative-google/config" + googleparams "github.com/weaviate/weaviate/modules/generative-google/parameters" + "github.com/weaviate/weaviate/usecases/modulecomponents/apikey" + "github.com/weaviate/weaviate/usecases/modulecomponents/generative" +) + +type harmCategory string + +var ( + // Category is unspecified. + HarmCategoryUnspecified harmCategory = "HARM_CATEGORY_UNSPECIFIED" + // Negative or harmful comments targeting identity and/or protected attribute. + HarmCategoryDerogatory harmCategory = "HARM_CATEGORY_DEROGATORY" + // Content that is rude, disrepspectful, or profane. + HarmCategoryToxicity harmCategory = "HARM_CATEGORY_TOXICITY" + // Describes scenarios depictng violence against an individual or group, or general descriptions of gore. + HarmCategoryViolence harmCategory = "HARM_CATEGORY_VIOLENCE" + // Contains references to sexual acts or other lewd content. + HarmCategorySexual harmCategory = "HARM_CATEGORY_SEXUAL" + // Promotes unchecked medical advice. + HarmCategoryMedical harmCategory = "HARM_CATEGORY_MEDICAL" + // Dangerous content that promotes, facilitates, or encourages harmful acts. + HarmCategoryDangerous harmCategory = "HARM_CATEGORY_DANGEROUS" + // Harassment content. + HarmCategoryHarassment harmCategory = "HARM_CATEGORY_HARASSMENT" + // Hate speech and content. + HarmCategoryHate_speech harmCategory = "HARM_CATEGORY_HATE_SPEECH" + // Sexually explicit content. + HarmCategorySexually_explicit harmCategory = "HARM_CATEGORY_SEXUALLY_EXPLICIT" + // Dangerous content. + HarmCategoryDangerous_content harmCategory = "HARM_CATEGORY_DANGEROUS_CONTENT" +) + +type harmBlockThreshold string + +var ( + // Threshold is unspecified. + HarmBlockThresholdUnspecified harmBlockThreshold = "HARM_BLOCK_THRESHOLD_UNSPECIFIED" + // Content with NEGLIGIBLE will be allowed. + BlockLowAndAbove harmBlockThreshold = "BLOCK_LOW_AND_ABOVE" + // Content with NEGLIGIBLE and LOW will be allowed. + BlockMediumAndAbove harmBlockThreshold = "BLOCK_MEDIUM_AND_ABOVE" + // Content with NEGLIGIBLE, LOW, and MEDIUM will be allowed. + BlockOnlyHigh harmBlockThreshold = "BLOCK_ONLY_HIGH" + // All content will be allowed. + BlockNone harmBlockThreshold = "BLOCK_NONE" +) + +type harmProbability string + +var ( + // Probability is unspecified. + HARM_PROBABILITY_UNSPECIFIED harmProbability = "HARM_PROBABILITY_UNSPECIFIED" + // Content has a negligible chance of being unsafe. + NEGLIGIBLE harmProbability = "NEGLIGIBLE" + // Content has a low chance of being unsafe. + LOW harmProbability = "LOW" + // Content has a medium chance of being unsafe. + MEDIUM harmProbability = "MEDIUM" + // Content has a high chance of being unsafe. + HIGH harmProbability = "HIGH" +) + +var ( + FINISH_REASON_UNSPECIFIED_EXPLANATION = "The finish reason is unspecified." + FINISH_REASON_STOP_EXPLANATION = "Natural stop point of the model or provided stop sequence." + FINISH_REASON_MAX_TOKENS_EXPLANATION = "The maximum number of tokens as specified in the request was reached." + FINISH_REASON_SAFETY_EXPLANATION = "The token generation was stopped as the response was flagged for safety reasons. NOTE: When streaming the Candidate.content will be empty if content filters blocked the output." + FINISH_REASON_RECITATION_EXPLANATION = "The token generation was stopped as the response was flagged for unauthorized citations." + FINISH_REASON_OTHER_EXPLANATION = "All other reasons that stopped the token generation" + FINISH_REASON_BLOCKLIST_EXPLANATION = "The token generation was stopped as the response was flagged for the terms which are included from the terminology blocklist." + FINISH_REASON_PROHIBITED_CONTENT_EXPLANATION = "The token generation was stopped as the response was flagged for the prohibited contents." + FINISH_REASON_SPII_EXPLANATION = "The token generation was stopped as the response was flagged for Sensitive Personally Identifiable Information (SPII) contents." + + FINISH_REASON_UNSPECIFIED = "FINISH_REASON_UNSPECIFIED" + FINISH_REASON_STOP = "STOP" + FINISH_REASON_MAX_TOKENS = "MAX_TOKENS" + FINISH_REASON_SAFETY = "SAFETY" + FINISH_REASON_RECITATION = "RECITATION" + FINISH_REASON_OTHER = "OTHER" + FINISH_REASON_BLOCKLIST = "BLOCKLIST" + FINISH_REASON_PROHIBITED_CONTENT = "PROHIBITED_CONTENT" + FINISH_REASON_SPII = "SPII" +) + +func buildURL(useGenerativeAI bool, apiEndpoint, projectID, modelID, region string) string { + if useGenerativeAI { + // Generative AI endpoints, for more context check out this link: + // https://developers.generativeai.google/models/language#model_variations + // https://developers.generativeai.google/api/rest/generativelanguage/models/generateMessage + if strings.HasPrefix(modelID, "gemini") { + return fmt.Sprintf("https://generativelanguage.googleapis.com/v1beta/models/%s:generateContent", modelID) + } + return "https://generativelanguage.googleapis.com/v1beta2/models/chat-bison-001:generateMessage" + } + if strings.HasPrefix(modelID, "gemini") { + // Vertex AI support for Gemini models: https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/gemini + urlTemplate := "https://%s-aiplatform.googleapis.com/v1/projects/%s/locations/%s/publishers/google/models/%s:generateContent" + return fmt.Sprintf(urlTemplate, region, projectID, region, modelID) + } + urlTemplate := "https://%s/v1/projects/%s/locations/us-central1/publishers/google/models/%s:predict" + return fmt.Sprintf(urlTemplate, apiEndpoint, projectID, modelID) +} + +type google struct { + apiKey string + useGoogleAuth bool + googleApiKey *apikey.GoogleApiKey + buildUrlFn func(useGenerativeAI bool, apiEndpoint, projectID, modelID, region string) string + httpClient *http.Client + logger logrus.FieldLogger +} + +func New(apiKey string, useGoogleAuth bool, timeout time.Duration, logger logrus.FieldLogger) *google { + return &google{ + apiKey: apiKey, + useGoogleAuth: useGoogleAuth, + googleApiKey: apikey.NewGoogleApiKey(), + httpClient: &http.Client{ + Timeout: timeout, + }, + buildUrlFn: buildURL, + logger: logger, + } +} + +func (v *google) GenerateSingleResult(ctx context.Context, properties *modulecapabilities.GenerateProperties, prompt string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + forPrompt, err := generative.MakeSinglePrompt(generative.Text(properties), prompt) + if err != nil { + return nil, err + } + return v.generate(ctx, cfg, forPrompt, generative.Blobs([]*modulecapabilities.GenerateProperties{properties}), options, debug) +} + +func (v *google) GenerateAllResults(ctx context.Context, properties []*modulecapabilities.GenerateProperties, task string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + forTask, err := generative.MakeTaskPrompt(generative.Texts(properties), task) + if err != nil { + return nil, err + } + return v.generate(ctx, cfg, forTask, generative.Blobs(properties), options, debug) +} + +func (v *google) generate(ctx context.Context, cfg moduletools.ClassConfig, prompt string, imageProperties []map[string]*string, options interface{}, debug bool) (*modulecapabilities.GenerateResponse, error) { + params := v.getParameters(cfg, options, imageProperties) + debugInformation := v.getDebugInformation(debug, prompt) + + useGenerativeAIEndpoint := v.useGenerativeAIEndpoint(params.ApiEndpoint) + modelID := params.Model + if params.EndpointID != "" { + modelID = params.EndpointID + } + + endpointURL := v.buildUrlFn(useGenerativeAIEndpoint, params.ApiEndpoint, params.ProjectID, modelID, params.Region) + input := v.getPayload(useGenerativeAIEndpoint, prompt, params) + + body, err := json.Marshal(input) + if err != nil { + return nil, errors.Wrap(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", endpointURL, + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + + apiKey, err := v.getApiKey(ctx, useGenerativeAIEndpoint) + if err != nil { + return nil, errors.Wrapf(err, "Google API Key") + } + req.Header.Add("Content-Type", "application/json") + if useGenerativeAIEndpoint { + req.Header.Add("x-goog-api-key", apiKey) + } else { + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", apiKey)) + } + + res, err := v.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + if useGenerativeAIEndpoint { + if strings.HasPrefix(modelID, "gemini") { + return v.parseGenerateContentResponse(res.StatusCode, bodyBytes, debugInformation) + } + return v.parseGenerateMessageResponse(res.StatusCode, bodyBytes, debugInformation) + } + if strings.HasPrefix(modelID, "gemini") { + return v.parseGenerateContentResponse(res.StatusCode, bodyBytes, debugInformation) + } + return v.parseResponse(res.StatusCode, bodyBytes, debugInformation) +} + +func (v *google) parseGenerateMessageResponse(statusCode int, bodyBytes []byte, debug *modulecapabilities.GenerateDebugInformation) (*modulecapabilities.GenerateResponse, error) { + var resBody generateMessageResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if err := v.checkResponse(statusCode, resBody.Error); err != nil { + return nil, err + } + + if len(resBody.Candidates) > 0 { + return v.getGenerateResponse(resBody.Candidates[0].Content, nil, debug) + } + + return &modulecapabilities.GenerateResponse{ + Result: nil, + Debug: debug, + }, nil +} + +func (v *google) getParameters(cfg moduletools.ClassConfig, options interface{}, imagePropertiesArray []map[string]*string) googleparams.Params { + settings := config.NewClassSettings(cfg) + + var params googleparams.Params + if p, ok := options.(googleparams.Params); ok { + params = p + } + + if params.ApiEndpoint == "" { + params.ApiEndpoint = settings.ApiEndpoint() + } + if params.ProjectID == "" { + params.ProjectID = settings.ProjectID() + } + if params.EndpointID == "" { + params.EndpointID = settings.EndpointID() + } + if params.Region == "" { + params.Region = settings.Region() + } + if params.Model == "" { + model := settings.Model() + if model == "" { + model = settings.ModelID() + } + params.Model = model + } + if params.Temperature == nil { + temperature := settings.Temperature() + params.Temperature = &temperature + } + if params.TopP == nil { + topP := settings.TopP() + params.TopP = &topP + } + if params.TopK == nil { + topK := settings.TopK() + params.TopK = &topK + } + if params.MaxTokens == nil { + maxTokens := settings.TokenLimit() + params.MaxTokens = &maxTokens + } + + params.Images = generative.ParseImageProperties(params.Images, params.ImageProperties, imagePropertiesArray) + + return params +} + +func (v *google) getDebugInformation(debug bool, prompt string) *modulecapabilities.GenerateDebugInformation { + if debug { + return &modulecapabilities.GenerateDebugInformation{ + Prompt: prompt, + } + } + return nil +} + +func (v *google) parseGenerateContentResponse(statusCode int, bodyBytes []byte, debug *modulecapabilities.GenerateDebugInformation) (*modulecapabilities.GenerateResponse, error) { + var resBody generateContentResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if err := v.checkResponse(statusCode, resBody.Error); err != nil { + return nil, err + } + + if len(resBody.Candidates) > 0 { + if len(resBody.Candidates[0].Content.Parts) > 0 { + var params map[string]interface{} + if resBody.UsageMetadata != nil { + params = v.getResponseParams(map[string]interface{}{ + "usageMetadata": resBody.UsageMetadata, + }) + } + return v.getGenerateResponse(resBody.Candidates[0].Content.Parts[0].Text, params, debug) + } + return nil, fmt.Errorf("%s", v.decodeFinishReason(resBody.Candidates[0].FinishReason)) + } + + return &modulecapabilities.GenerateResponse{ + Result: nil, + Debug: debug, + }, nil +} + +func (v *google) parseResponse(statusCode int, bodyBytes []byte, debug *modulecapabilities.GenerateDebugInformation) (*modulecapabilities.GenerateResponse, error) { + var resBody generateResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if err := v.checkResponse(statusCode, resBody.Error); err != nil { + return nil, err + } + + if len(resBody.Predictions) > 0 && len(resBody.Predictions[0].Candidates) > 0 { + var params map[string]interface{} + if resBody.Metadata != nil { + params = v.getResponseParams(map[string]interface{}{ + "metadata": resBody.Metadata, + }) + } + return v.getGenerateResponse(resBody.Predictions[0].Candidates[0].Content, params, debug) + } + + return &modulecapabilities.GenerateResponse{ + Result: nil, + Debug: debug, + }, nil +} + +func (v *google) getResponseParams(params map[string]interface{}) map[string]interface{} { + return map[string]interface{}{googleparams.Name: params} +} + +func GetResponseParams(result map[string]interface{}) *responseParams { + if params, ok := result[googleparams.Name].(map[string]interface{}); ok { + responseParams := &responseParams{} + if metadata, ok := params["metadata"].(*metadata); ok { + responseParams.Metadata = metadata + } + if usageMetadata, ok := params["usageMetadata"].(*usageMetadata); ok { + responseParams.UsageMetadata = usageMetadata + } + return responseParams + } + return nil +} + +func (v *google) getGenerateResponse(content *string, params map[string]interface{}, debug *modulecapabilities.GenerateDebugInformation) (*modulecapabilities.GenerateResponse, error) { + if content != nil && *content != "" { + trimmedResponse := strings.Trim(*content, "\n") + return &modulecapabilities.GenerateResponse{ + Result: &trimmedResponse, + Params: params, + Debug: debug, + }, nil + } + + return &modulecapabilities.GenerateResponse{ + Result: nil, + Debug: debug, + }, nil +} + +func (v *google) checkResponse(statusCode int, googleApiError *googleApiError) error { + if statusCode != 200 || googleApiError != nil { + if googleApiError != nil { + return fmt.Errorf("connection to Google failed with status: %v error: %v", + statusCode, googleApiError.Message) + } + return fmt.Errorf("connection to Google failed with status: %d", statusCode) + } + return nil +} + +func (v *google) useGenerativeAIEndpoint(apiEndpoint string) bool { + return apiEndpoint == "generativelanguage.googleapis.com" +} + +func (v *google) getPayload(useGenerativeAI bool, prompt string, params googleparams.Params) any { + if useGenerativeAI { + if strings.HasPrefix(params.Model, "gemini") { + return v.getGeminiPayload(prompt, params) + } + input := generateMessageRequest{ + Prompt: &generateMessagePrompt{ + Messages: []generateMessage{ + { + Content: &prompt, + }, + }, + }, + Temperature: params.Temperature, + TopP: params.TopP, + TopK: params.TopK, + CandidateCount: 1, + } + return input + } + if strings.HasPrefix(params.Model, "gemini") { + return v.getGeminiPayload(prompt, params) + } + input := generateInput{ + Instances: []instance{ + { + Messages: []message{ + { + Content: prompt, + }, + }, + }, + }, + Parameters: parameters{ + Temperature: params.Temperature, + MaxOutputTokens: params.MaxTokens, + TopP: params.TopP, + TopK: params.TopK, + StopSequences: params.StopSequences, + PresencePenalty: params.PresencePenalty, + FrequencyPenalty: params.FrequencyPenalty, + }, + } + return input +} + +func (v *google) getGeminiPayload(prompt string, params googleparams.Params) any { + parts := []part{ + { + Text: &prompt, + }, + } + for _, image := range params.Images { + parts = append(parts, part{ + InlineData: &inlineData{Data: image, MimeType: "image/png"}, + }) + } + input := generateContentRequest{ + Contents: []content{ + { + Role: "user", + Parts: parts, + }, + }, + GenerationConfig: &generationConfig{ + Temperature: params.Temperature, + MaxOutputTokens: params.MaxTokens, + TopP: params.TopP, + TopK: params.TopK, + PresencePenalty: params.PresencePenalty, + FrequencyPenalty: params.FrequencyPenalty, + StopSequences: params.StopSequences, + CandidateCount: 1, + }, + SafetySettings: []safetySetting{ + { + Category: HarmCategoryHarassment, + Threshold: BlockMediumAndAbove, + }, + { + Category: HarmCategoryHate_speech, + Threshold: BlockMediumAndAbove, + }, + { + Category: HarmCategoryDangerous_content, + Threshold: BlockMediumAndAbove, + }, + }, + } + return input +} + +func (v *google) getApiKey(ctx context.Context, useGenerativeAIEndpoint bool) (string, error) { + return v.googleApiKey.GetApiKey(ctx, v.apiKey, useGenerativeAIEndpoint, v.useGoogleAuth) +} + +func (v *google) decodeFinishReason(reason string) string { + switch reason { + case FINISH_REASON_UNSPECIFIED: + return FINISH_REASON_UNSPECIFIED_EXPLANATION + case FINISH_REASON_STOP: + return FINISH_REASON_STOP_EXPLANATION + case FINISH_REASON_MAX_TOKENS: + return FINISH_REASON_MAX_TOKENS_EXPLANATION + case FINISH_REASON_SAFETY: + return FINISH_REASON_SAFETY_EXPLANATION + case FINISH_REASON_RECITATION: + return FINISH_REASON_RECITATION_EXPLANATION + case FINISH_REASON_OTHER: + return FINISH_REASON_OTHER_EXPLANATION + case FINISH_REASON_BLOCKLIST: + return FINISH_REASON_BLOCKLIST_EXPLANATION + case FINISH_REASON_PROHIBITED_CONTENT: + return FINISH_REASON_PROHIBITED_CONTENT_EXPLANATION + case FINISH_REASON_SPII: + return FINISH_REASON_SPII_EXPLANATION + default: + return fmt.Sprintf("unregonized finis reason: %s", reason) + } +} + +type generateInput struct { + Instances []instance `json:"instances,omitempty"` + Parameters parameters `json:"parameters"` +} + +type instance struct { + Context string `json:"context,omitempty"` + Messages []message `json:"messages,omitempty"` + Examples []example `json:"examples,omitempty"` +} + +type message struct { + Author string `json:"author"` + Content string `json:"content"` +} + +type example struct { + Input string `json:"input"` + Output string `json:"output"` +} + +type parameters struct { + Temperature *float64 `json:"temperature,omitempty"` + MaxOutputTokens *int `json:"maxOutputTokens,omitempty"` + TopP *float64 `json:"topP,omitempty"` + TopK *int `json:"topK,omitempty"` + StopSequences []string `json:"stopSequences,omitempty"` + PresencePenalty *float64 `json:"presencePenalty,omitempty"` + FrequencyPenalty *float64 `json:"frequencyPenalty,omitempty"` +} + +type generateResponse struct { + Predictions []prediction `json:"predictions,omitempty"` + Metadata *metadata `json:"metadata,omitempty"` + Error *googleApiError `json:"error,omitempty"` + DeployedModelId string `json:"deployedModelId,omitempty"` + Model string `json:"model,omitempty"` + ModelDisplayName string `json:"modelDisplayName,omitempty"` + ModelVersionId string `json:"modelVersionId,omitempty"` +} + +type prediction struct { + Candidates []candidate `json:"candidates,omitempty"` + SafetyAttributes *[]safetyAttributes `json:"safetyAttributes,omitempty"` +} + +type metadata struct { + TokenMetadata *tokenMetadata `json:"tokenMetadata,omitempty"` +} + +type tokenMetadata struct { + InputTokenCount *tokenCount `json:"inputTokenCount,omitempty"` + OutputTokenCount *tokenCount `json:"outputTokenCount,omitempty"` +} + +type tokenCount struct { + TotalBillableCharacters int64 `json:"totalBillableCharacters,omitempty"` + TotalTokens int64 `json:"totalTokens,omitempty"` +} + +type candidate struct { + Author *string `json:"author,omitempty"` + Content *string `json:"content,omitempty"` +} + +type safetyAttributes struct { + Scores []float64 `json:"scores,omitempty"` + Blocked *bool `json:"blocked,omitempty"` + Categories []string `json:"categories,omitempty"` +} + +type googleApiError struct { + Code int `json:"code"` + Message string `json:"message"` + Status string `json:"status"` +} + +type generateMessageRequest struct { + Prompt *generateMessagePrompt `json:"prompt,omitempty"` + Temperature *float64 `json:"temperature,omitempty"` + TopP *float64 `json:"topP,omitempty"` + TopK *int `json:"topK,omitempty"` + CandidateCount int `json:"candidateCount"` // default 1 +} + +type generateMessagePrompt struct { + Context string `json:"prompt,omitempty"` + Examples []generateExample `json:"examples,omitempty"` + Messages []generateMessage `json:"messages,omitempty"` +} + +type generateMessage struct { + Author string `json:"author,omitempty"` + Content *string `json:"content,omitempty"` + CitationMetadata *generateCitationMetadata `json:"citationMetadata,omitempty"` +} + +type generateCitationMetadata struct { + CitationSources []generateCitationSource `json:"citationSources,omitempty"` +} + +type generateCitationSource struct { + StartIndex int `json:"startIndex,omitempty"` + EndIndex int `json:"endIndex,omitempty"` + URI string `json:"uri,omitempty"` + License string `json:"license,omitempty"` +} + +type generateExample struct { + Input *generateMessage `json:"input,omitempty"` + Output *generateMessage `json:"output,omitempty"` +} + +type generateMessageResponse struct { + Candidates []generateMessage `json:"candidates,omitempty"` + Messages []generateMessage `json:"messages,omitempty"` + Filters []contentFilter `json:"filters,omitempty"` + Error *googleApiError `json:"error,omitempty"` +} + +type contentFilter struct { + Reason string `json:"reason,omitempty"` + Message string `json:"message,omitempty"` +} + +type generateContentRequest struct { + Contents []content `json:"contents,omitempty"` + SafetySettings []safetySetting `json:"safetySettings,omitempty"` + GenerationConfig *generationConfig `json:"generationConfig,omitempty"` +} + +type content struct { + Parts []part `json:"parts,omitempty"` + Role string `json:"role,omitempty"` +} + +type part struct { + Text *string `json:"text,omitempty"` + InlineData *inlineData `json:"inline_data,omitempty"` +} + +type inlineData struct { + MimeType string `json:"mime_type,omitempty"` + Data *string `json:"data,omitempty"` +} + +type safetySetting struct { + Category harmCategory `json:"category,omitempty"` + Threshold harmBlockThreshold `json:"threshold,omitempty"` +} + +type generationConfig struct { + Temperature *float64 `json:"temperature,omitempty"` + TopP *float64 `json:"topP,omitempty"` + TopK *int `json:"topK,omitempty"` + MaxOutputTokens *int `json:"maxOutputTokens,omitempty"` + PresencePenalty *float64 `json:"presencePenalty,omitempty"` + FrequencyPenalty *float64 `json:"frequencyPenalty,omitempty"` + StopSequences []string `json:"stopSequences,omitempty"` + CandidateCount int `json:"candidateCount,omitempty"` +} + +type generateContentResponse struct { + Candidates []generateContentCandidate `json:"candidates,omitempty"` + UsageMetadata *usageMetadata `json:"usageMetadata,omitempty"` + PromptFeedback *promptFeedback `json:"promptFeedback,omitempty"` + Error *googleApiError `json:"error,omitempty"` +} + +type generateContentCandidate struct { + Content contentResponse `json:"content,omitempty"` + FinishReason string `json:"finishReason,omitempty"` + Index int `json:"index,omitempty"` + SafetyRatings []safetyRating `json:"safetyRatings,omitempty"` +} + +type contentResponse struct { + Parts []part `json:"parts,omitempty"` + Role string `json:"role,omitempty"` +} + +type promptFeedback struct { + SafetyRatings []safetyRating `json:"safetyRatings,omitempty"` +} + +type safetyRating struct { + Category harmCategory `json:"category,omitempty"` + Probability harmProbability `json:"probability,omitempty"` + Blocked *bool `json:"blocked,omitempty"` +} + +type usageMetadata struct { + PromptTokenCount int `json:"promptTokenCount"` + CandidatesTokenCount int `json:"candidatesTokenCount"` + TotalTokenCount int `json:"totalTokenCount"` +} + +type responseParams struct { + Metadata *metadata `json:"metadata,omitempty"` + UsageMetadata *usageMetadata `json:"usageMetadata,omitempty"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-google/clients/google_meta.go b/platform/dbops/binaries/weaviate-src/modules/generative-google/clients/google_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..775234da6391054d67b78b6276708b6b5dc76d70 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-google/clients/google_meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *google) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Generative Search - Google", + "documentationHref": "https://cloud.google.com/vertex-ai/docs/generative-ai/chat/test-chat-prompts", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-google/clients/google_meta_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-google/clients/google_meta_test.go new file mode 100644 index 0000000000000000000000000000000000000000..1605e3d9dc0d41d8a325ad6f2c8e346cae41e465 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-google/clients/google_meta_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestGetMeta(t *testing.T) { + t.Run("when the server is providing meta", func(t *testing.T) { + server := httptest.NewServer(&testMetaHandler{t: t}) + defer server.Close() + c := New(server.URL, false, 0, nullLogger()) + meta, err := c.MetaInfo() + + assert.Nil(t, err) + assert.NotNil(t, meta) + metaModel := meta["name"] + assert.True(t, metaModel != nil) + documentationHref := meta["documentationHref"] + assert.True(t, documentationHref != nil) + }) +} + +type testMetaHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testMetaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/meta", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.Write([]byte(f.metaInfo())) +} + +func (f *testMetaHandler) metaInfo() string { + return `{ + "hostname": "http://127.0.0.1:8080", + "modules": { + "generative-google": { + "documentationHref": "to be announced", + "name": "Google Generative Module" + } + }, + "version": "1.16.0" +}` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-google/clients/google_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-google/clients/google_test.go new file mode 100644 index 0000000000000000000000000000000000000000..de0cb7901eed16014b9d83ff22fb3d720191294d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-google/clients/google_test.go @@ -0,0 +1,146 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/modulecapabilities" + googleparams "github.com/weaviate/weaviate/modules/generative-google/parameters" + "github.com/weaviate/weaviate/usecases/modulecomponents/apikey" +) + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} + +func TestGetAnswer(t *testing.T) { + t.Run("when the server has a successful answer ", func(t *testing.T) { + prompt := "John" + handler := &testAnswerHandler{ + t: t, + answer: generateResponse{ + Predictions: []prediction{ + { + Candidates: []candidate{ + { + Content: &prompt, + }, + }, + }, + }, + Error: nil, + }, + } + server := httptest.NewServer(handler) + defer server.Close() + + c := &google{ + apiKey: "apiKey", + httpClient: &http.Client{}, + googleApiKey: apikey.NewGoogleApiKey(), + buildUrlFn: func(useGenerativeAI bool, apiEndpoint, projectID, modelID, region string) string { + return server.URL + }, + logger: nullLogger(), + } + + params := googleparams.Params{ApiEndpoint: server.URL} + props := []*modulecapabilities.GenerateProperties{{Text: map[string]string{"prop": "My name is john"}}} + expected := modulecapabilities.GenerateResponse{ + Result: ptString("John"), + } + + res, err := c.GenerateAllResults(context.Background(), props, "What is my name?", params, false, nil) + + assert.Nil(t, err) + assert.Equal(t, expected, *res) + }) + + t.Run("when the server has a an error", func(t *testing.T) { + server := httptest.NewServer(&testAnswerHandler{ + t: t, + answer: generateResponse{ + Error: &googleApiError{ + Message: "some error from the server", + }, + }, + }) + defer server.Close() + + c := &google{ + apiKey: "apiKey", + httpClient: &http.Client{}, + googleApiKey: apikey.NewGoogleApiKey(), + buildUrlFn: func(useGenerativeAI bool, apiEndpoint, projectID, modelID, region string) string { + return server.URL + }, + logger: nullLogger(), + } + + props := []*modulecapabilities.GenerateProperties{{Text: map[string]string{"prop": "My name is john"}}} + + _, err := c.GenerateAllResults(context.Background(), props, "What is my name?", nil, false, nil) + + require.NotNil(t, err) + assert.EqualError(t, err, "connection to Google failed with status: 500 error: some error from the server") + }) +} + +type testAnswerHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + answer generateResponse +} + +func (f *testAnswerHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.answer.Error != nil && f.answer.Error.Message != "" { + outBytes, err := json.Marshal(f.answer) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b generateInput + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + require.Len(f.t, b.Instances, 1) + require.Len(f.t, b.Instances[0].Messages, 1) + require.True(f.t, len(b.Instances[0].Messages[0].Content) > 0) + + outBytes, err := json.Marshal(f.answer) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +func ptString(in string) *string { + return &in +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-google/config.go b/platform/dbops/binaries/weaviate-src/modules/generative-google/config.go new file mode 100644 index 0000000000000000000000000000000000000000..9beb4f77f5b7a5b77a97204dbd3f696e6b61b74c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-google/config.go @@ -0,0 +1,41 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativegoogle + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/generative-google/config" +) + +func (m *GenerativeGoogleModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeGoogleModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeGoogleModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := config.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-google/config/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/generative-google/config/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..59380cdb2ef5c6ec2b5fd21dc8670a411a94b60b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-google/config/class_settings.go @@ -0,0 +1,246 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "fmt" + "strings" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + apiEndpointProperty = "apiEndpoint" + projectIDProperty = "projectId" + endpointIDProperty = "endpointId" + regionProperty = "region" + modelIDProperty = "modelId" + modelProperty = "model" + temperatureProperty = "temperature" + tokenLimitProperty = "tokenLimit" + topPProperty = "topP" + topKProperty = "topK" +) + +var ( + DefaultGoogleApiEndpoint = "us-central1-aiplatform.googleapis.com" + DefaultGoogleModel = "chat-bison" + DefaultGoogleRegion = "us-central1" + DefaultGoogleTemperature = 1.0 + DefaultTokenLimit = 1024 + DefaultTokenLimitGemini1_0 = 2048 + DefaultTokenLimitGemini1_0_Vision = 2048 + DefaultTokenLimitGemini1_5 = 8192 + DefaultGoogleTopP = 0.95 + DefaultGoogleTopK = 40 + DefaulGenerativeAIApiEndpoint = "generativelanguage.googleapis.com" + DefaulGenerativeAIModelID = "chat-bison-001" +) + +var supportedVertexAIModels = []string{ + DefaultGoogleModel, + "chat-bison-32k", + "chat-bison@002", + "chat-bison-32k@002", + "chat-bison@001", + "gemini-1.5-pro-preview-0514", + "gemini-1.5-pro-preview-0409", + "gemini-1.5-flash-preview-0514", + "gemini-1.0-pro-002", + "gemini-1.0-pro-001", + "gemini-1.0-pro", +} + +var supportedGenerativeAIModels = []string{ + // chat-bison-001 + DefaulGenerativeAIModelID, + "gemini-pro", + "gemini-ultra", + "gemini-1.5-flash-latest", + "gemini-1.5-pro-latest", +} + +type ClassSettings interface { + Validate(class *models.Class) error + // Module settings + ApiEndpoint() string + ProjectID() string + EndpointID() string + Region() string + + ModelID() string + Model() string + // parameters + // 0.0 - 1.0 + Temperature() float64 + // 1 - 1024 / 2048 Gemini 1.0 / 8192 Gemini 1.5 + TokenLimit() int + // 1 - + TopK() int + // 0.0 - 1.0 + TopP() float64 +} + +type classSettings struct { + cfg moduletools.ClassConfig + propertyValuesHelper basesettings.PropertyValuesHelper +} + +func NewClassSettings(cfg moduletools.ClassConfig) ClassSettings { + return &classSettings{ + cfg: cfg, + propertyValuesHelper: basesettings.NewPropertyValuesHelperWithAltNames("generative-google", []string{"generative-palm"}), + } +} + +func (ic *classSettings) Validate(class *models.Class) error { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + + var errorMessages []string + + apiEndpoint := ic.ApiEndpoint() + projectID := ic.ProjectID() + if apiEndpoint != DefaulGenerativeAIApiEndpoint && projectID == "" { + errorMessages = append(errorMessages, fmt.Sprintf("%s cannot be empty", projectIDProperty)) + } + temperature := ic.Temperature() + if temperature < 0 || temperature > 1 { + errorMessages = append(errorMessages, fmt.Sprintf("%s has to be float value between 0 and 1", temperatureProperty)) + } + tokenLimit := ic.TokenLimit() + if tokenLimit < 1 || tokenLimit > ic.getDefaultTokenLimit(ic.ModelID()) { + errorMessages = append(errorMessages, fmt.Sprintf("%s has to be an integer value between 1 and %v", tokenLimitProperty, ic.getDefaultTokenLimit(ic.ModelID()))) + } + topK := ic.TopK() + if topK < 1 { + errorMessages = append(errorMessages, fmt.Sprintf("%s has to be an integer value above or equal 1", topKProperty)) + } + topP := ic.TopP() + if topP < 0 || topP > 1 { + errorMessages = append(errorMessages, fmt.Sprintf("%s has to be float value between 0 and 1", topPProperty)) + } + // Google MakerSuite + availableModels := append(supportedGenerativeAIModels, supportedVertexAIModels...) + model := ic.ModelID() + if apiEndpoint == DefaulGenerativeAIApiEndpoint && !contains(availableModels, model) { + errorMessages = append(errorMessages, fmt.Sprintf("%s is not supported available models are: %+v", model, availableModels)) + } + + if len(errorMessages) > 0 { + return fmt.Errorf("%s", strings.Join(errorMessages, ", ")) + } + + return nil +} + +func (ic *classSettings) getStringProperty(name, defaultValue string) string { + return ic.propertyValuesHelper.GetPropertyAsString(ic.cfg, name, defaultValue) +} + +func (ic *classSettings) getFloatProperty(name string, defaultValue float64) float64 { + asFloat64 := ic.propertyValuesHelper.GetPropertyAsFloat64(ic.cfg, name, &defaultValue) + return *asFloat64 +} + +func (ic *classSettings) getIntProperty(name string, defaultValue int) int { + asInt := ic.propertyValuesHelper.GetPropertyAsInt(ic.cfg, name, &defaultValue) + return *asInt +} + +func (ic *classSettings) getApiEndpoint(projectID string) string { + if projectID == "" { + return DefaulGenerativeAIApiEndpoint + } + return DefaultGoogleApiEndpoint +} + +func (ic *classSettings) getDefaultModel(apiEndpoint string) string { + if apiEndpoint == DefaulGenerativeAIApiEndpoint { + return DefaulGenerativeAIModelID + } + return DefaultGoogleModel +} + +func (ic *classSettings) getDefaultTokenLimit(model string) int { + if strings.HasPrefix(model, "gemini-1.5") { + return DefaultTokenLimitGemini1_5 + } + if strings.HasPrefix(model, "gemini-1.0") || strings.HasPrefix(model, "gemini-pro") { + if strings.Contains(model, "vision") { + return DefaultTokenLimitGemini1_0_Vision + } + return DefaultTokenLimitGemini1_0 + } + return DefaultTokenLimit +} + +// Google params +func (ic *classSettings) ApiEndpoint() string { + return ic.getStringProperty(apiEndpointProperty, ic.getApiEndpoint(ic.ProjectID())) +} + +func (ic *classSettings) ProjectID() string { + return ic.getStringProperty(projectIDProperty, "") +} + +func (ic *classSettings) EndpointID() string { + return ic.getStringProperty(endpointIDProperty, "") +} + +func (ic *classSettings) ModelID() string { + return ic.getStringProperty(modelIDProperty, ic.getDefaultModel(ic.ApiEndpoint())) +} + +func (ic *classSettings) Model() string { + return ic.getStringProperty(modelProperty, "") +} + +func (ic *classSettings) Region() string { + return ic.getStringProperty(regionProperty, DefaultGoogleRegion) +} + +// parameters + +// 0.0 - 1.0 +func (ic *classSettings) Temperature() float64 { + return ic.getFloatProperty(temperatureProperty, DefaultGoogleTemperature) +} + +// 1 - 1024 +func (ic *classSettings) TokenLimit() int { + return ic.getIntProperty(tokenLimitProperty, ic.getDefaultTokenLimit(ic.ModelID())) +} + +// 1 - 40 +func (ic *classSettings) TopK() int { + return ic.getIntProperty(topKProperty, DefaultGoogleTopK) +} + +// 0.0 - 1.0 +func (ic *classSettings) TopP() float64 { + return ic.getFloatProperty(topPProperty, DefaultGoogleTopP) +} + +func contains[T comparable](s []T, e T) bool { + for _, v := range s { + if v == e { + return true + } + } + return false +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-google/config/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-google/config/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..bac2370ad95bb0f7890fc1fdcaf2de0551d45600 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-google/config/class_settings_test.go @@ -0,0 +1,249 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "fmt" + "testing" + + "github.com/pkg/errors" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_classSettings_Validate(t *testing.T) { + tests := []struct { + name string + cfg moduletools.ClassConfig + wantApiEndpoint string + wantProjectID string + wantModelID string + wantModel string + wantTemperature float64 + wantTokenLimit int + wantTopK int + wantTopP float64 + wantErr error + }{ + { + name: "default settings", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{}, + }, + wantApiEndpoint: "generativelanguage.googleapis.com", + wantProjectID: "", + wantModelID: "chat-bison-001", + wantModel: "", + wantTemperature: 1.0, + wantTokenLimit: 1024, + wantTopK: 40, + wantTopP: 0.95, + wantErr: nil, + }, + { + name: "happy flow", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "projectId": "projectId", + }, + }, + wantApiEndpoint: "us-central1-aiplatform.googleapis.com", + wantProjectID: "projectId", + wantModelID: "chat-bison", + wantTemperature: 1.0, + wantTokenLimit: 1024, + wantTopK: 40, + wantTopP: 0.95, + wantErr: nil, + }, + { + name: "custom values", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "apiEndpoint": "google.com", + "projectId": "cloud-project", + "modelId": "model-id", + "temperature": 0.25, + "tokenLimit": 254, + "topK": 30, + "topP": 0.97, + }, + }, + wantApiEndpoint: "google.com", + wantProjectID: "cloud-project", + wantModelID: "model-id", + wantTemperature: 0.25, + wantTokenLimit: 254, + wantTopK: 30, + wantTopP: 0.97, + wantErr: nil, + }, + { + name: "wrong temperature", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "projectId": "cloud-project", + "temperature": 2, + }, + }, + wantErr: errors.Errorf("temperature has to be float value between 0 and 1"), + }, + { + name: "wrong tokenLimit", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "projectId": "cloud-project", + "tokenLimit": 2000, + }, + }, + wantErr: errors.Errorf("tokenLimit has to be an integer value between 1 and 1024"), + }, + { + name: "wrong topP", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "projectId": "cloud-project", + "topP": 3, + }, + }, + wantErr: errors.Errorf("topP has to be float value between 0 and 1"), + }, + { + name: "wrong all", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "temperature": 2, + "tokenLimit": 2000, + "topK": 2000, + "topP": 3, + }, + }, + wantErr: errors.Errorf("temperature has to be float value between 0 and 1, " + + "tokenLimit has to be an integer value between 1 and 1024, " + + "topP has to be float value between 0 and 1"), + }, + { + name: "Generative AI", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "apiEndpoint": "generativelanguage.googleapis.com", + }, + }, + wantApiEndpoint: "generativelanguage.googleapis.com", + wantProjectID: "", + wantModelID: "chat-bison-001", + wantTemperature: 1.0, + wantTokenLimit: 1024, + wantTopK: 40, + wantTopP: 0.95, + wantErr: nil, + }, + { + name: "Generative AI with model", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "apiEndpoint": "generativelanguage.googleapis.com", + "modelId": "chat-bison-001", + }, + }, + wantApiEndpoint: "generativelanguage.googleapis.com", + wantProjectID: "", + wantModelID: "chat-bison-001", + wantTemperature: 1.0, + wantTokenLimit: 1024, + wantTopK: 40, + wantTopP: 0.95, + wantErr: nil, + }, + { + name: "Generative AI with gemini-ultra model", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "apiEndpoint": "generativelanguage.googleapis.com", + "modelId": "gemini-ultra", + }, + }, + wantApiEndpoint: "generativelanguage.googleapis.com", + wantProjectID: "", + wantModelID: "gemini-ultra", + wantTemperature: 1.0, + wantTokenLimit: 1024, + wantTopK: 40, + wantTopP: 0.95, + wantErr: nil, + }, + { + name: "Generative AI with not supported model", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "apiEndpoint": "generativelanguage.googleapis.com", + "modelId": "unsupported-model", + }, + }, + wantErr: fmt.Errorf("unsupported-model is not supported available models are: " + + "[chat-bison-001 gemini-pro gemini-ultra gemini-1.5-flash-latest gemini-1.5-pro-latest chat-bison chat-bison-32k chat-bison@002 chat-bison-32k@002 chat-bison@001 gemini-1.5-pro-preview-0514 gemini-1.5-pro-preview-0409 gemini-1.5-flash-preview-0514 gemini-1.0-pro-002 gemini-1.0-pro-001 gemini-1.0-pro]"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.cfg) + if tt.wantErr != nil { + assert.EqualError(t, ic.Validate(nil), tt.wantErr.Error()) + } else { + assert.NoError(t, ic.Validate(nil)) + assert.Equal(t, tt.wantApiEndpoint, ic.ApiEndpoint()) + assert.Equal(t, tt.wantProjectID, ic.ProjectID()) + assert.Equal(t, tt.wantModelID, ic.ModelID()) + assert.Equal(t, tt.wantModel, ic.Model()) + assert.Equal(t, tt.wantTemperature, ic.Temperature()) + assert.Equal(t, tt.wantTokenLimit, ic.TokenLimit()) + assert.Equal(t, tt.wantTopK, ic.TopK()) + assert.Equal(t, tt.wantTopP, ic.TopP()) + } + }) + } +} + +type fakeClassConfig struct { + classConfig map[string]interface{} +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-google/module.go b/platform/dbops/binaries/weaviate-src/modules/generative-google/module.go new file mode 100644 index 0000000000000000000000000000000000000000..d588956817a9f46f3722a46f2f6f29fad751153e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-google/module.go @@ -0,0 +1,96 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativegoogle + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + entcfg "github.com/weaviate/weaviate/entities/config" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/generative-google/clients" + "github.com/weaviate/weaviate/modules/generative-google/parameters" +) + +const ( + Name = "generative-google" + LegacyName = "generative-palm" +) + +func New() *GenerativeGoogleModule { + return &GenerativeGoogleModule{} +} + +type GenerativeGoogleModule struct { + generative generativeClient + additionalPropertiesProvider map[string]modulecapabilities.GenerativeProperty +} + +type generativeClient interface { + modulecapabilities.GenerativeClient + MetaInfo() (map[string]interface{}, error) +} + +func (m *GenerativeGoogleModule) Name() string { + return Name +} + +func (m *GenerativeGoogleModule) AltNames() []string { + return []string{LegacyName} +} + +func (m *GenerativeGoogleModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextGenerative +} + +func (m *GenerativeGoogleModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrapf(err, "init %s", Name) + } + return nil +} + +func (m *GenerativeGoogleModule) initAdditional(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("GOOGLE_APIKEY") + if apiKey == "" { + apiKey = os.Getenv("PALM_APIKEY") + } + useGoogleAuth := entcfg.Enabled(os.Getenv("USE_GOOGLE_AUTH")) + client := clients.New(apiKey, useGoogleAuth, timeout, logger) + m.generative = client + m.additionalPropertiesProvider = parameters.AdditionalGenerativeParameters(m.generative) + return nil +} + +func (m *GenerativeGoogleModule) MetaInfo() (map[string]interface{}, error) { + return m.generative.MetaInfo() +} + +func (m *GenerativeGoogleModule) AdditionalGenerativeProperties() map[string]modulecapabilities.GenerativeProperty { + return m.additionalPropertiesProvider +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.AdditionalGenerativeProperties(New()) + _ = modulecapabilities.ModuleHasAltNames(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-google/parameters/graphql.go b/platform/dbops/binaries/weaviate-src/modules/generative-google/parameters/graphql.go new file mode 100644 index 0000000000000000000000000000000000000000..49d50ecf764e1fdac706936baa20f3980ff99d3a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-google/parameters/graphql.go @@ -0,0 +1,126 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "fmt" + + "github.com/tailor-inc/graphql" +) + +func input(prefix string) *graphql.InputObjectFieldConfig { + return &graphql.InputObjectFieldConfig{ + Description: fmt.Sprintf("%s settings", Name), + Type: graphql.NewInputObject(graphql.InputObjectConfig{ + Name: fmt.Sprintf("%s%sInputObject", prefix, Name), + Fields: graphql.InputObjectConfigFieldMap{ + "apiEndpoint": &graphql.InputObjectFieldConfig{ + Description: "apiEndpoint", + Type: graphql.String, + }, + "projectId": &graphql.InputObjectFieldConfig{ + Description: "projectId", + Type: graphql.String, + }, + "endpointId": &graphql.InputObjectFieldConfig{ + Description: "endpointId", + Type: graphql.String, + }, + "region": &graphql.InputObjectFieldConfig{ + Description: "region", + Type: graphql.String, + }, + "model": &graphql.InputObjectFieldConfig{ + Description: "model", + Type: graphql.String, + }, + "temperature": &graphql.InputObjectFieldConfig{ + Description: "temperature", + Type: graphql.Float, + }, + "topP": &graphql.InputObjectFieldConfig{ + Description: "topP", + Type: graphql.Float, + }, + "topK": &graphql.InputObjectFieldConfig{ + Description: "topK", + Type: graphql.Int, + }, + "maxTokens": &graphql.InputObjectFieldConfig{ + Description: "maxTokens", + Type: graphql.Int, + }, + "presencePenalty": &graphql.InputObjectFieldConfig{ + Description: "presencePenalty", + Type: graphql.Float, + }, + "frequencyPenalty": &graphql.InputObjectFieldConfig{ + Description: "frequencyPenalty", + Type: graphql.Float, + }, + "stopSequences": &graphql.InputObjectFieldConfig{ + Description: "stopSequences", + Type: graphql.NewList(graphql.String), + }, + "images": &graphql.InputObjectFieldConfig{ + Description: "images", + Type: graphql.NewList(graphql.String), + }, + "imageProperties": &graphql.InputObjectFieldConfig{ + Description: "imageProperties", + Type: graphql.NewList(graphql.String), + }, + }, + }), + DefaultValue: nil, + } +} + +func output(prefix string) *graphql.Field { + return &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sFields", prefix, Name), + Fields: graphql.Fields{ + "usageMetadata": &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sUsageMetadataFields", prefix, Name), + Fields: graphql.Fields{ + "promptTokenCount": &graphql.Field{Type: graphql.Int}, + "candidatesTokenCount": &graphql.Field{Type: graphql.Int}, + "totalTokenCount": &graphql.Field{Type: graphql.Int}, + }, + })}, + "metadata": &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sMetadataFields", prefix, Name), + Fields: graphql.Fields{ + "tokenMetadata": &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sTokenMetadataFields", prefix, Name), + Fields: graphql.Fields{ + "inputTokenCount": &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sMetadataInputTokenCountFields", prefix, Name), + Fields: graphql.Fields{ + "totalBillableCharacters": &graphql.Field{Type: graphql.Int}, + "totalTokens": &graphql.Field{Type: graphql.Int}, + }, + })}, + "outputTokenCount": &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sMetadataOutputTokenCountFields", prefix, Name), + Fields: graphql.Fields{ + "totalBillableCharacters": &graphql.Field{Type: graphql.Int}, + "totalTokens": &graphql.Field{Type: graphql.Int}, + }, + })}, + }, + })}, + }, + })}, + }, + })} +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-google/parameters/params.go b/platform/dbops/binaries/weaviate-src/modules/generative-google/parameters/params.go new file mode 100644 index 0000000000000000000000000000000000000000..afc0c50b6f7adbe49bef6907b1f741ca5a2de03a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-google/parameters/params.go @@ -0,0 +1,76 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/usecases/modulecomponents/gqlparser" +) + +type Params struct { + ApiEndpoint string + ProjectID string + EndpointID string + Region string + Model string + Temperature *float64 + MaxTokens *int + TopP *float64 + TopK *int + StopSequences []string + PresencePenalty *float64 + FrequencyPenalty *float64 + Images []*string + ImageProperties []string +} + +func extract(field *ast.ObjectField) interface{} { + out := Params{} + fields, ok := field.Value.GetValue().([]*ast.ObjectField) + if ok { + for _, f := range fields { + switch f.Name.Value { + case "apiEndpoint": + out.ApiEndpoint = gqlparser.GetValueAsStringOrEmpty(f) + case "projectId": + out.ProjectID = gqlparser.GetValueAsStringOrEmpty(f) + case "endpointId": + out.EndpointID = gqlparser.GetValueAsStringOrEmpty(f) + case "region": + out.Region = gqlparser.GetValueAsStringOrEmpty(f) + case "model": + out.Model = gqlparser.GetValueAsStringOrEmpty(f) + case "temperature": + out.Temperature = gqlparser.GetValueAsFloat64(f) + case "maxTokens": + out.MaxTokens = gqlparser.GetValueAsInt(f) + case "topP": + out.TopP = gqlparser.GetValueAsFloat64(f) + case "topK": + out.TopK = gqlparser.GetValueAsInt(f) + case "stopSequences": + out.StopSequences = gqlparser.GetValueAsStringArray(f) + case "presencePenalty": + out.PresencePenalty = gqlparser.GetValueAsFloat64(f) + case "frequencyPenalty": + out.FrequencyPenalty = gqlparser.GetValueAsFloat64(f) + case "images": + out.Images = gqlparser.GetValueAsStringPtrArray(f) + case "imageProperties": + out.ImageProperties = gqlparser.GetValueAsStringArray(f) + default: + // do nothing + } + } + } + return out +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-google/parameters/provider.go b/platform/dbops/binaries/weaviate-src/modules/generative-google/parameters/provider.go new file mode 100644 index 0000000000000000000000000000000000000000..cbb92fc000f46c7a0da761b3561cf61b284c8d99 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-google/parameters/provider.go @@ -0,0 +1,22 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import "github.com/weaviate/weaviate/entities/modulecapabilities" + +const Name = "google" + +func AdditionalGenerativeParameters(client modulecapabilities.GenerativeClient) map[string]modulecapabilities.GenerativeProperty { + return map[string]modulecapabilities.GenerativeProperty{ + Name: {Client: client, RequestParamsFunction: input, ResponseParamsFunction: output, ExtractRequestParamsFunction: extract}, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-mistral/clients/mistral.go b/platform/dbops/binaries/weaviate-src/modules/generative-mistral/clients/mistral.go new file mode 100644 index 0000000000000000000000000000000000000000..477968a3db41336877e6d4e403522acae8d7ae4b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-mistral/clients/mistral.go @@ -0,0 +1,248 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + "github.com/weaviate/weaviate/usecases/modulecomponents/generative" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/generative-mistral/config" + mistralparams "github.com/weaviate/weaviate/modules/generative-mistral/parameters" +) + +type mistral struct { + apiKey string + httpClient *http.Client + logger logrus.FieldLogger +} + +func New(apiKey string, timeout time.Duration, logger logrus.FieldLogger) *mistral { + return &mistral{ + apiKey: apiKey, + httpClient: &http.Client{ + Timeout: timeout, + }, + logger: logger, + } +} + +func (v *mistral) GenerateSingleResult(ctx context.Context, properties *modulecapabilities.GenerateProperties, prompt string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + forPrompt, err := generative.MakeSinglePrompt(generative.Text(properties), prompt) + if err != nil { + return nil, err + } + return v.Generate(ctx, cfg, forPrompt, options, debug) +} + +func (v *mistral) GenerateAllResults(ctx context.Context, properties []*modulecapabilities.GenerateProperties, task string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + forTask, err := generative.MakeTaskPrompt(generative.Texts(properties), task) + if err != nil { + return nil, err + } + return v.Generate(ctx, cfg, forTask, options, debug) +} + +func (v *mistral) Generate(ctx context.Context, cfg moduletools.ClassConfig, prompt string, options interface{}, debug bool) (*modulecapabilities.GenerateResponse, error) { + params := v.getParameters(cfg, options) + debugInformation := v.getDebugInformation(debug, prompt) + + mistralUrl, err := v.getMistralUrl(ctx, params.BaseURL) + if err != nil { + return nil, errors.Wrap(err, "join Mistral API host and path") + } + + message := Message{ + Role: "user", + Content: prompt, + } + + input := generateInput{ + Messages: []Message{message}, + Model: params.Model, + Temperature: params.Temperature, + TopP: params.TopP, + MaxTokens: params.MaxTokens, + } + + body, err := json.Marshal(input) + if err != nil { + return nil, errors.Wrap(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", mistralUrl, + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + apiKey, err := v.getApiKey(ctx) + if err != nil { + return nil, errors.Wrapf(err, "Mistral API Key") + } + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", apiKey)) + req.Header.Add("Content-Type", "application/json") + + res, err := v.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + var resBody generateResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if res.StatusCode != 200 || resBody.Error != nil { + if resBody.Error != nil { + return nil, errors.Errorf("connection to Mistral API failed with status: %d error: %v", res.StatusCode, resBody.Error.Message) + } + return nil, errors.Errorf("connection to Mistral API failed with status: %d", res.StatusCode) + } + + textResponse := resBody.Choices[0].Message.Content + + return &modulecapabilities.GenerateResponse{ + Result: &textResponse, + Debug: debugInformation, + Params: v.getResponseParams(resBody.Usage), + }, nil +} + +func (v *mistral) getResponseParams(usage *usage) map[string]interface{} { + if usage != nil { + return map[string]interface{}{mistralparams.Name: map[string]interface{}{"usage": usage}} + } + return nil +} + +func GetResponseParams(result map[string]interface{}) *responseParams { + if params, ok := result[mistralparams.Name].(map[string]interface{}); ok { + if usage, ok := params["usage"].(*usage); ok { + return &responseParams{Usage: usage} + } + } + return nil +} + +func (v *mistral) getParameters(cfg moduletools.ClassConfig, options interface{}) mistralparams.Params { + settings := config.NewClassSettings(cfg) + + var params mistralparams.Params + if p, ok := options.(mistralparams.Params); ok { + params = p + } + if params.BaseURL == "" { + params.BaseURL = settings.BaseURL() + } + if params.Model == "" { + model := settings.Model() + params.Model = model + } + if params.Temperature == nil { + temperature := settings.Temperature() + params.Temperature = &temperature + } + if params.MaxTokens == nil { + maxTokens := settings.MaxTokens() + params.MaxTokens = &maxTokens + } + return params +} + +func (v *mistral) getDebugInformation(debug bool, prompt string) *modulecapabilities.GenerateDebugInformation { + if debug { + return &modulecapabilities.GenerateDebugInformation{ + Prompt: prompt, + } + } + return nil +} + +func (v *mistral) getMistralUrl(ctx context.Context, baseURL string) (string, error) { + passedBaseURL := baseURL + if headerBaseURL := modulecomponents.GetValueFromContext(ctx, "X-Mistral-Baseurl"); headerBaseURL != "" { + passedBaseURL = headerBaseURL + } + return url.JoinPath(passedBaseURL, "/v1/chat/completions") +} + +func (v *mistral) getApiKey(ctx context.Context) (string, error) { + if apiKey := modulecomponents.GetValueFromContext(ctx, "X-Mistral-Api-Key"); apiKey != "" { + return apiKey, nil + } + if v.apiKey != "" { + return v.apiKey, nil + } + return "", errors.New("no api key found " + + "neither in request header: X-Mistral-Api-Key " + + "nor in environment variable under MISTRAL_APIKEY") +} + +type generateInput struct { + Model string `json:"model"` + Messages []Message `json:"messages"` + Temperature *float64 `json:"temperature,omitempty"` + TopP *float64 `json:"top_p,omitempty"` + MaxTokens *int `json:"max_tokens,omitempty"` +} + +type generateResponse struct { + Choices []Choice + Usage *usage `json:"usage,omitempty"` + Error *mistralApiError `json:"error,omitempty"` +} + +type Choice struct { + Index int `json:"index"` + Message Message `json:"message"` + FinishReason string `json:"finish_reason"` + Logprobs *string `json:"logprobs"` +} + +type Message struct { + Role string `json:"role"` + Content string `json:"content"` +} + +// need to check this +// I think you just get message +type mistralApiError struct { + Message string `json:"message"` +} + +type usage struct { + PromptTokens *int `json:"prompt_tokens,omitempty"` + CompletionTokens *int `json:"completion_tokens,omitempty"` + TotalTokens *int `json:"total_tokens,omitempty"` +} + +type responseParams struct { + Usage *usage `json:"usage,omitempty"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-mistral/clients/mistral_meta.go b/platform/dbops/binaries/weaviate-src/modules/generative-mistral/clients/mistral_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..25a26477c4addce13c9945ceeb32e0b0beb44a2b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-mistral/clients/mistral_meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *mistral) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Generative Search - Mistral", + "documentationHref": "https://docs.mistral.ai/api/", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-mistral/clients/mistral_meta_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-mistral/clients/mistral_meta_test.go new file mode 100644 index 0000000000000000000000000000000000000000..92b8acf81347c0e4c9d05d62995acd3654e01dce --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-mistral/clients/mistral_meta_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestGetMeta(t *testing.T) { + t.Run("when the server is providing meta", func(t *testing.T) { + server := httptest.NewServer(&testMetaHandler{t: t}) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + meta, err := c.MetaInfo() + + assert.Nil(t, err) + assert.NotNil(t, meta) + metaModel := meta["name"] + assert.True(t, metaModel != nil) + documentationHref := meta["documentationHref"] + assert.True(t, documentationHref != nil) + }) +} + +type testMetaHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testMetaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/meta", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.Write([]byte(f.metaInfo())) +} + +func (f *testMetaHandler) metaInfo() string { + return `{ + "hostname": "http://127.0.0.1:8080", + "modules": { + "generative-mistral": { + "documentationHref": URL_HERE", + "name": "Mistral Generative Module" + } + }, + "version": "1.24.2" +}` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-mistral/clients/mistral_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-mistral/clients/mistral_test.go new file mode 100644 index 0000000000000000000000000000000000000000..153f5755f90ce0ee4f59e53c7b2d6c47fc1e2bd6 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-mistral/clients/mistral_test.go @@ -0,0 +1,181 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} + +func TestGetAnswer(t *testing.T) { + props := []*modulecapabilities.GenerateProperties{{Text: map[string]string{"prop": "My name is john"}}} + + tests := []struct { + name string + answer generateResponse + timeout time.Duration + expectedResult string + }{ + { + name: "when the server has a successful answer", + answer: generateResponse{ + Choices: []Choice{ + { + Message: Message{ + Content: "John", + }, + }, + }, + Error: nil, + }, + expectedResult: "John", + }, + { + name: "when the server has an error", + answer: generateResponse{ + Error: &mistralApiError{ + Message: "some error from the server", + }, + }, + }, + { + name: "when the server does not respond in time", + answer: generateResponse{Error: &mistralApiError{Message: "context deadline exceeded"}}, + timeout: time.Second, + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + handler := &testAnswerHandler{ + t: t, + answer: test.answer, + timeout: test.timeout, + } + server := httptest.NewServer(handler) + defer server.Close() + + c := New("apiKey", test.timeout, nullLogger()) + + cfg := &fakeClassConfig{baseURL: server.URL} + res, err := c.GenerateAllResults(context.Background(), props, "What is my name?", nil, false, cfg) + + if test.answer.Error != nil { + assert.Contains(t, err.Error(), test.answer.Error.Message) + } else { + assert.Equal(t, test.expectedResult, *res.Result) + } + }) + } + t.Run("when X-Mistral-BaseURL header is passed", func(t *testing.T) { + c := New("apiKey", 5*time.Second, nullLogger()) + + baseURL := "http://default-url.com" + ctxWithValue := context.WithValue(context.Background(), + "X-Mistral-Baseurl", []string{"http://base-url-passed-in-header.com"}) + + buildURL, err := c.getMistralUrl(ctxWithValue, baseURL) + require.NoError(t, err) + assert.Equal(t, "http://base-url-passed-in-header.com/v1/chat/completions", buildURL) + + buildURL, err = c.getMistralUrl(context.TODO(), baseURL) + require.NoError(t, err) + assert.Equal(t, "http://default-url.com/v1/chat/completions", buildURL) + }) +} + +type testAnswerHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + answer generateResponse + timeout time.Duration +} + +func (f *testAnswerHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/v1/chat/completions", r.URL.String()) + assert.Equal(f.t, http.MethodPost, r.Method) + + time.Sleep(f.timeout) + + if f.answer.Error != nil && f.answer.Error.Message != "" { + outBytes, err := json.Marshal(f.answer) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b map[string]interface{} + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + outBytes, err := json.Marshal(f.answer) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +type fakeClassConfig struct { + baseURL string +} + +func (cfg *fakeClassConfig) Tenant() string { + return "" +} + +func (cfg *fakeClassConfig) Class() map[string]interface{} { + return nil +} + +func (cfg *fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + settings := map[string]interface{}{ + "baseURL": cfg.baseURL, + } + return settings +} + +func (cfg *fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-mistral/config.go b/platform/dbops/binaries/weaviate-src/modules/generative-mistral/config.go new file mode 100644 index 0000000000000000000000000000000000000000..258b3b6a2fb185648c54a6d089f997d8d1aa2ea8 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-mistral/config.go @@ -0,0 +1,41 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativemistral + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/generative-mistral/config" +) + +func (m *GenerativeMistralModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeMistralModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeMistralModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := config.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-mistral/config/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/generative-mistral/config/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..69276fb3bc53ebbd5951102ffa0fbe707aa0457d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-mistral/config/class_settings.go @@ -0,0 +1,102 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + baseURLProperty = "baseURL" + modelProperty = "model" + temperatureProperty = "temperature" + maxTokensProperty = "maxTokens" +) + +var availableMistralModels = []string{ + "open-mistral-7b", "mistral-tiny-2312", "mistral-tiny", "open-mixtral-8x7b", + "mistral-small-2312", "mistral-small", "mistral-small-2402", "mistral-small-latest", + "mistral-medium-latest", "mistral-medium-2312", "mistral-medium", "mistral-large-latest", + "mistral-large-2402", +} + +// note it might not like this -- might want int values for e.g. MaxTokens +var ( + DefaultBaseURL = "https://api.mistral.ai" + DefaultMistralModel = "open-mistral-7b" + DefaultMistralTemperature float64 = 0 + DefaultMistralMaxTokens = 2048 +) + +type classSettings struct { + cfg moduletools.ClassConfig + propertyValuesHelper basesettings.PropertyValuesHelper +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, propertyValuesHelper: basesettings.NewPropertyValuesHelper("generative-mistral")} +} + +func (ic *classSettings) Validate(class *models.Class) error { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + model := ic.getStringProperty(modelProperty, DefaultMistralModel) + if model == nil || !ic.validateModel(*model) { + return errors.Errorf("wrong Mistral model name, available model names are: %v", availableMistralModels) + } + + return nil +} + +func (ic *classSettings) getStringProperty(name, defaultValue string) *string { + asString := ic.propertyValuesHelper.GetPropertyAsStringWithNotExists(ic.cfg, name, "", defaultValue) + return &asString +} + +func (ic *classSettings) getIntProperty(name string, defaultValue *int) *int { + wrongVal := -1 + return ic.propertyValuesHelper.GetPropertyAsIntWithNotExists(ic.cfg, name, &wrongVal, defaultValue) +} + +func (ic *classSettings) getFloat64Property(name string, defaultValue *float64) *float64 { + wrongVal := float64(-1) + return ic.propertyValuesHelper.GetPropertyAsFloat64WithNotExists(ic.cfg, name, &wrongVal, defaultValue) +} + +func (ic *classSettings) GetMaxTokensForModel(model string) int { + return DefaultMistralMaxTokens +} + +func (ic *classSettings) validateModel(model string) bool { + return basesettings.ValidateSetting(model, availableMistralModels) +} + +func (ic *classSettings) BaseURL() string { + return *ic.getStringProperty(baseURLProperty, DefaultBaseURL) +} + +func (ic *classSettings) Model() string { + return *ic.getStringProperty(modelProperty, DefaultMistralModel) +} + +func (ic *classSettings) MaxTokens() int { + return *ic.getIntProperty(maxTokensProperty, &DefaultMistralMaxTokens) +} + +func (ic *classSettings) Temperature() float64 { + return *ic.getFloat64Property(temperatureProperty, &DefaultMistralTemperature) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-mistral/config/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-mistral/config/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..1ccd5767052e9100101662188849ce88f19791b2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-mistral/config/class_settings_test.go @@ -0,0 +1,144 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_classSettings_Validate(t *testing.T) { + tests := []struct { + name string + cfg moduletools.ClassConfig + wantModel string + wantMaxTokens int + wantTemperature float64 + wantBaseURL string + wantErr error + }{ + { + name: "default settings", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{}, + }, + wantModel: "open-mistral-7b", + wantMaxTokens: 2048, + wantTemperature: 0, + wantBaseURL: "https://api.mistral.ai", + wantErr: nil, + }, + { + name: "everything non default configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "mistral-medium", + "maxTokens": 50, + "temperature": 1, + }, + }, + wantModel: "mistral-medium", + wantMaxTokens: 50, + wantTemperature: 1, + wantBaseURL: "https://api.mistral.ai", + wantErr: nil, + }, + { + name: "wrong model configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "wrong-model", + }, + }, + wantErr: fmt.Errorf("wrong Mistral model name, available model names are: " + + "[open-mistral-7b mistral-tiny-2312 mistral-tiny open-mixtral-8x7b mistral-small-2312 mistral-small mistral-small-2402 mistral-small-latest mistral-medium-latest mistral-medium-2312 mistral-medium mistral-large-latest mistral-large-2402]"), + }, + { + name: "default settings with open-mistral-7b", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "open-mistral-7b", + }, + }, + wantModel: "open-mistral-7b", + wantMaxTokens: 2048, + wantTemperature: 0, + wantBaseURL: "https://api.mistral.ai", + wantErr: nil, + }, + { + name: "default settings with mistral-medium and baseURL", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "mistral-medium", + "baseURL": "http://custom-url.com", + }, + }, + wantModel: "mistral-medium", + wantMaxTokens: 2048, + wantTemperature: 0, + wantBaseURL: "http://custom-url.com", + wantErr: nil, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.cfg) + if tt.wantErr != nil { + assert.Equal(t, tt.wantErr.Error(), ic.Validate(nil).Error()) + } else { + assert.NoError(t, ic.Validate(nil)) + assert.Equal(t, tt.wantModel, ic.Model()) + assert.Equal(t, tt.wantMaxTokens, ic.MaxTokens()) + assert.Equal(t, tt.wantTemperature, ic.Temperature()) + assert.Equal(t, tt.wantBaseURL, ic.BaseURL()) + } + }) + } +} + +type fakeClassConfig struct { + classConfig map[string]interface{} +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-mistral/module.go b/platform/dbops/binaries/weaviate-src/modules/generative-mistral/module.go new file mode 100644 index 0000000000000000000000000000000000000000..3874cad504339932c0703feea8b4a68942e8a0e4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-mistral/module.go @@ -0,0 +1,86 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativemistral + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/generative-mistral/clients" + "github.com/weaviate/weaviate/modules/generative-mistral/parameters" +) + +const Name = "generative-mistral" + +func New() *GenerativeMistralModule { + return &GenerativeMistralModule{} +} + +type GenerativeMistralModule struct { + generative generativeClient + additionalPropertiesProvider map[string]modulecapabilities.GenerativeProperty +} + +type generativeClient interface { + modulecapabilities.GenerativeClient + MetaInfo() (map[string]interface{}, error) +} + +func (m *GenerativeMistralModule) Name() string { + return Name +} + +func (m *GenerativeMistralModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextGenerative +} + +func (m *GenerativeMistralModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrapf(err, "init %s", Name) + } + + return nil +} + +func (m *GenerativeMistralModule) initAdditional(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("MISTRAL_APIKEY") + + client := clients.New(apiKey, timeout, logger) + m.generative = client + m.additionalPropertiesProvider = parameters.AdditionalGenerativeParameters(m.generative) + + return nil +} + +func (m *GenerativeMistralModule) MetaInfo() (map[string]interface{}, error) { + return m.generative.MetaInfo() +} + +func (m *GenerativeMistralModule) AdditionalGenerativeProperties() map[string]modulecapabilities.GenerativeProperty { + return m.additionalPropertiesProvider +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.AdditionalGenerativeProperties(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-mistral/parameters/graphql.go b/platform/dbops/binaries/weaviate-src/modules/generative-mistral/parameters/graphql.go new file mode 100644 index 0000000000000000000000000000000000000000..e34c017afb4e6a52b39c1162bad73fa31686ed0b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-mistral/parameters/graphql.go @@ -0,0 +1,66 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "fmt" + + "github.com/tailor-inc/graphql" +) + +func input(prefix string) *graphql.InputObjectFieldConfig { + return &graphql.InputObjectFieldConfig{ + Description: fmt.Sprintf("%s settings", Name), + Type: graphql.NewInputObject(graphql.InputObjectConfig{ + Name: fmt.Sprintf("%s%sInputObject", prefix, Name), + Fields: graphql.InputObjectConfigFieldMap{ + "baseURL": &graphql.InputObjectFieldConfig{ + Description: "baseURL", + Type: graphql.String, + }, + "model": &graphql.InputObjectFieldConfig{ + Description: "model", + Type: graphql.String, + }, + "temperature": &graphql.InputObjectFieldConfig{ + Description: "temperature", + Type: graphql.Float, + }, + "topP": &graphql.InputObjectFieldConfig{ + Description: "topP", + Type: graphql.Float, + }, + "maxTokens": &graphql.InputObjectFieldConfig{ + Description: "maxTokens", + Type: graphql.Int, + }, + }, + }), + DefaultValue: nil, + } +} + +func output(prefix string) *graphql.Field { + return &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sFields", prefix, Name), + Fields: graphql.Fields{ + "usage": &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sUsageMetadataFields", prefix, Name), + Fields: graphql.Fields{ + "prompt_tokens": &graphql.Field{Type: graphql.Int}, + "completion_tokens": &graphql.Field{Type: graphql.Int}, + "total_tokens": &graphql.Field{Type: graphql.Int}, + }, + })}, + }, + })} +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-mistral/parameters/params.go b/platform/dbops/binaries/weaviate-src/modules/generative-mistral/parameters/params.go new file mode 100644 index 0000000000000000000000000000000000000000..00382dc8c7fc89868e00c6ed9b5bc6021c38372b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-mistral/parameters/params.go @@ -0,0 +1,49 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/usecases/modulecomponents/gqlparser" +) + +type Params struct { + BaseURL string + Model string + Temperature *float64 + TopP *float64 + MaxTokens *int +} + +func extract(field *ast.ObjectField) interface{} { + out := Params{} + fields, ok := field.Value.GetValue().([]*ast.ObjectField) + if ok { + for _, f := range fields { + switch f.Name.Value { + case "baseURL": + out.BaseURL = gqlparser.GetValueAsStringOrEmpty(f) + case "model": + out.Model = gqlparser.GetValueAsStringOrEmpty(f) + case "temperature": + out.Temperature = gqlparser.GetValueAsFloat64(f) + case "topP": + out.TopP = gqlparser.GetValueAsFloat64(f) + case "maxTokens": + out.MaxTokens = gqlparser.GetValueAsInt(f) + default: + // do nothing + } + } + } + return out +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-mistral/parameters/provider.go b/platform/dbops/binaries/weaviate-src/modules/generative-mistral/parameters/provider.go new file mode 100644 index 0000000000000000000000000000000000000000..831b3c84ccf89f839cc21521c27edd2303c85488 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-mistral/parameters/provider.go @@ -0,0 +1,22 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import "github.com/weaviate/weaviate/entities/modulecapabilities" + +const Name = "mistral" + +func AdditionalGenerativeParameters(client modulecapabilities.GenerativeClient) map[string]modulecapabilities.GenerativeProperty { + return map[string]modulecapabilities.GenerativeProperty{ + Name: {Client: client, RequestParamsFunction: input, ResponseParamsFunction: output, ExtractRequestParamsFunction: extract}, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/clients/nvidia.go b/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/clients/nvidia.go new file mode 100644 index 0000000000000000000000000000000000000000..87c91b6e80a37846a3919375c0b4de96fab3cb1f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/clients/nvidia.go @@ -0,0 +1,242 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/generative-nvidia/config" + nvidiaparams "github.com/weaviate/weaviate/modules/generative-nvidia/parameters" + "github.com/weaviate/weaviate/usecases/modulecomponents/generative" +) + +type nvidia struct { + apiKey string + httpClient *http.Client + logger logrus.FieldLogger +} + +func New(apiKey string, timeout time.Duration, logger logrus.FieldLogger) *nvidia { + return &nvidia{ + apiKey: apiKey, + httpClient: &http.Client{ + Timeout: timeout, + }, + logger: logger, + } +} + +func (v *nvidia) GenerateSingleResult(ctx context.Context, properties *modulecapabilities.GenerateProperties, prompt string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + forPrompt, err := generative.MakeSinglePrompt(generative.Text(properties), prompt) + if err != nil { + return nil, err + } + return v.generate(ctx, cfg, forPrompt, options, debug) +} + +func (v *nvidia) GenerateAllResults(ctx context.Context, properties []*modulecapabilities.GenerateProperties, task string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + forTask, err := generative.MakeTaskPrompt(generative.Texts(properties), task) + if err != nil { + return nil, err + } + return v.generate(ctx, cfg, forTask, options, debug) +} + +func (v *nvidia) generate(ctx context.Context, cfg moduletools.ClassConfig, prompt string, options interface{}, debug bool) (*modulecapabilities.GenerateResponse, error) { + params := v.getParameters(cfg, options) + debugInformation := v.getDebugInformation(debug, prompt) + + nvidiaUrl := v.getNvidiaUrl(ctx, params.BaseURL) + input := v.getRequest(prompt, params) + + body, err := json.Marshal(input) + if err != nil { + return nil, errors.Wrap(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", nvidiaUrl, + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + apiKey, err := v.getApiKey(ctx) + if err != nil { + return nil, errors.Wrapf(err, "NVIDIA API Key") + } + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", apiKey)) + req.Header.Add("Content-Type", "application/json") + + res, err := v.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + if res.StatusCode != 200 { + var resBody generateResponseError + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body error: got: %v", string(bodyBytes))) + } + return nil, errors.Errorf("connection to NVIDIA API failed with status: %d error: %s: %s", res.StatusCode, resBody.Title, resBody.Detail) + } + + var resBody generateResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + textResponse := resBody.Choices[0].Message.Content + + return &modulecapabilities.GenerateResponse{ + Result: &textResponse, + Debug: debugInformation, + Params: v.getResponseParams(resBody.Usage), + }, nil +} + +func (v *nvidia) getRequest(prompt string, params nvidiaparams.Params) generateInput { + return generateInput{ + Model: params.Model, + Messages: []message{{Role: "user", Content: prompt}}, + Temperature: params.Temperature, + TopP: params.TopP, + MaxTokens: params.MaxTokens, + } +} + +func (v *nvidia) getParameters(cfg moduletools.ClassConfig, options interface{}) nvidiaparams.Params { + settings := config.NewClassSettings(cfg) + + var params nvidiaparams.Params + if p, ok := options.(nvidiaparams.Params); ok { + params = p + } + if params.BaseURL == "" { + params.BaseURL = settings.BaseURL() + } + if params.Model == "" { + params.Model = settings.Model() + } + if params.Temperature == nil { + params.Temperature = settings.Temperature() + } + if params.MaxTokens == nil { + params.MaxTokens = settings.MaxTokens() + } + if params.TopP == nil { + params.TopP = settings.TopP() + } + return params +} + +func (v *nvidia) getDebugInformation(debug bool, prompt string) *modulecapabilities.GenerateDebugInformation { + if debug { + return &modulecapabilities.GenerateDebugInformation{ + Prompt: prompt, + } + } + return nil +} + +func (v *nvidia) getResponseParams(usage *usage) map[string]interface{} { + if usage != nil { + return map[string]interface{}{nvidiaparams.Name: map[string]interface{}{"usage": usage}} + } + return nil +} + +func GetResponseParams(result map[string]interface{}) *responseParams { + if params, ok := result[nvidiaparams.Name].(map[string]interface{}); ok { + if usage, ok := params["usage"].(*usage); ok { + return &responseParams{Usage: usage} + } + } + return nil +} + +func (v *nvidia) getNvidiaUrl(ctx context.Context, baseURL string) string { + passedBaseURL := baseURL + if headerBaseURL := modulecomponents.GetValueFromContext(ctx, "X-Nvidia-Baseurl"); headerBaseURL != "" { + passedBaseURL = headerBaseURL + } + return fmt.Sprintf("%s/v1/chat/completions", passedBaseURL) +} + +func (v *nvidia) getApiKey(ctx context.Context) (string, error) { + if apiKey := modulecomponents.GetValueFromContext(ctx, "X-Nvidia-Api-Key"); apiKey != "" { + return apiKey, nil + } + if v.apiKey != "" { + return v.apiKey, nil + } + return "", errors.New("no api key found " + + "neither in request header: X-Nvidia-Api-Key " + + "nor in environment variable under NVIDIA_APIKEY") +} + +type generateInput struct { + Model string `json:"model"` + Messages []message `json:"messages,omitempty"` + Temperature *float64 `json:"temperature,omitempty"` + TopP *float64 `json:"top_p,omitempty"` + MaxTokens *int `json:"max_tokens,omitempty"` +} + +type message struct { + Role string `json:"role"` + Content string `json:"content"` +} + +type generateResponseError struct { + Status int `json:"status,omitempty"` + Title string `json:"title,omitempty"` + Detail string `json:"detail,omitempty"` +} + +type generateResponse struct { + Choices []choice `json:"choices,omitempty"` + Usage *usage `json:"usage,omitempty"` + Created int64 `json:"created"` +} + +type choice struct { + Message message `json:"message"` + Index int `json:"index"` + FinishReason string `json:"finish_reason"` +} + +type usage struct { + PromptTokens *int `json:"prompt_tokens,omitempty"` + TotalTokens *int `json:"total_tokens,omitempty"` + CompletionTokens *int `json:"completion_tokens,omitempty"` +} + +type responseParams struct { + Usage *usage `json:"usage,omitempty"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/clients/nvidia_meta.go b/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/clients/nvidia_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..06d9f167c48c8a8842e997c7bc981f81fe416a43 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/clients/nvidia_meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *nvidia) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Generative Search - NVIDIA", + "documentationHref": "https://docs.api.nvidia.com/nim/reference/llm-apis", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/clients/nvidia_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/clients/nvidia_test.go new file mode 100644 index 0000000000000000000000000000000000000000..9a84a2fc4aec2545869b705eb3c2b93f8f1ec810 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/clients/nvidia_test.go @@ -0,0 +1,174 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func TestGetAnswer(t *testing.T) { + tests := []struct { + name string + answer generateResponse + errorResponse *generateResponseError + timeout time.Duration + expectedResult string + }{ + { + name: "when the server has a successful answer", + answer: generateResponse{ + Choices: []choice{ + { + Message: message{ + Role: "user", + Content: "John", + }, + }, + }, + }, + expectedResult: "John", + }, + { + name: "when the server has an error", + errorResponse: &generateResponseError{ + Status: 402, + Title: "Payment Required", + Detail: "Account 'x': Cloud credits expired - Please contact NVIDIA representatives", + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + handler := &testAnswerHandler{ + t: t, + answer: tt.answer, + errorResponse: tt.errorResponse, + timeout: tt.timeout, + } + server := httptest.NewServer(handler) + defer server.Close() + + c := New("apiKey", tt.timeout, nullLogger()) + + settings := &fakeClassConfig{baseURL: server.URL} + props := []*modulecapabilities.GenerateProperties{{Text: map[string]string{"prop": "My name is john"}}} + res, err := c.GenerateAllResults(context.Background(), props, "What is my name?", nil, false, settings) + + if tt.errorResponse != nil { + assert.Contains(t, err.Error(), tt.errorResponse.Title, tt.errorResponse.Detail) + } else { + assert.Equal(t, tt.expectedResult, *res.Result) + } + }) + } + t.Run("when X-Nvidia-BaseURL header is passed", func(t *testing.T) { + c := New("apiKey", 5*time.Second, nullLogger()) + baseUrl := "https://integrate.api.nvidia.com" + + ctxWithValue := context.WithValue(context.Background(), + "X-Nvidia-BaseURL", []string{"https://integrate.api.nvidia.com"}) + buildURL := c.getNvidiaUrl(ctxWithValue, baseUrl) + assert.Equal(t, "https://integrate.api.nvidia.com/v1/chat/completions", buildURL) + + buildURL = c.getNvidiaUrl(context.Background(), baseUrl) + assert.Equal(t, "https://integrate.api.nvidia.com/v1/chat/completions", buildURL) + }) +} + +type testAnswerHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + answer generateResponse + errorResponse *generateResponseError + timeout time.Duration +} + +func (f *testAnswerHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, http.MethodPost, r.Method) + + time.Sleep(f.timeout) + + if f.errorResponse != nil { + outBytes, err := json.Marshal(f.errorResponse) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusPaymentRequired) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b map[string]interface{} + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + outBytes, err := json.Marshal(f.answer) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +type fakeClassConfig struct { + baseURL string +} + +func (cfg *fakeClassConfig) Tenant() string { + return "" +} + +func (cfg *fakeClassConfig) Class() map[string]interface{} { + return nil +} + +func (cfg *fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + settings := map[string]interface{}{ + "baseURL": cfg.baseURL, + } + return settings +} + +func (cfg *fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/config.go b/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/config.go new file mode 100644 index 0000000000000000000000000000000000000000..472bf4ad10935cec74b4c78105e48a85dad3ca6f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/config.go @@ -0,0 +1,41 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativenvidia + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/generative-nvidia/config" +) + +func (m *GenerativeNvidiaModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeNvidiaModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeNvidiaModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := config.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/config/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/config/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..06980a5d9e271379b0f855a63afa3e1d348f1f2e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/config/class_settings.go @@ -0,0 +1,69 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + baseURLProperty = "baseURL" + modelProperty = "model" + temperatureProperty = "temperature" + topPProperty = "topP" + maxTokensProperty = "maxTokens" +) + +var ( + DefaultBaseURL = "https://integrate.api.nvidia.com" + DefaultNvidiaModel = "nvidia/llama-3.1-nemotron-51b-instruct" +) + +type classSettings struct { + cfg moduletools.ClassConfig + propertyValuesHelper basesettings.PropertyValuesHelper +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, propertyValuesHelper: basesettings.NewPropertyValuesHelper("generative-nvidia")} +} + +func (ic *classSettings) Validate(class *models.Class) error { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + return nil +} + +func (ic *classSettings) BaseURL() string { + return ic.propertyValuesHelper.GetPropertyAsString(ic.cfg, baseURLProperty, DefaultBaseURL) +} + +func (ic *classSettings) Model() string { + return ic.propertyValuesHelper.GetPropertyAsString(ic.cfg, modelProperty, DefaultNvidiaModel) +} + +func (ic *classSettings) Temperature() *float64 { + return ic.propertyValuesHelper.GetPropertyAsFloat64(ic.cfg, temperatureProperty, nil) +} + +func (ic *classSettings) TopP() *float64 { + return ic.propertyValuesHelper.GetPropertyAsFloat64(ic.cfg, topPProperty, nil) +} + +func (ic *classSettings) MaxTokens() *int { + return ic.propertyValuesHelper.GetPropertyAsInt(ic.cfg, maxTokensProperty, nil) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/config/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/config/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ebcb2cdb63302ff6e7091a514893b24ff775c5c7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/config/class_settings_test.go @@ -0,0 +1,116 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_classSettings_Validate(t *testing.T) { + tests := []struct { + name string + cfg moduletools.ClassConfig + wantBaseURL string + wantModel string + wantTemperature *float64 + wantTopP *float64 + wantMaxTokens *int + wantErr error + }{ + { + name: "default settings", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{}, + }, + wantBaseURL: "https://integrate.api.nvidia.com", + wantModel: "nvidia/llama-3.1-nemotron-51b-instruct", + wantErr: nil, + }, + { + name: "everything non default configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "baseURL": "http://url.com", + "model": "nvidia/llama-3.1-nemoguard-8b-topic-control", + "temperature": 0.5, + "topP": 1, + "maxTokens": 1024, + }, + }, + wantBaseURL: "http://url.com", + wantModel: "nvidia/llama-3.1-nemoguard-8b-topic-control", + wantTemperature: ptFloat64(0.5), + wantTopP: ptFloat64(1), + wantMaxTokens: ptInt(1024), + wantErr: nil, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.cfg) + if tt.wantErr != nil { + assert.Equal(t, tt.wantErr.Error(), ic.Validate(nil).Error()) + } else { + assert.NoError(t, ic.Validate(nil)) + assert.Equal(t, tt.wantModel, ic.Model()) + assert.Equal(t, tt.wantMaxTokens, ic.MaxTokens()) + assert.Equal(t, tt.wantTemperature, ic.Temperature()) + assert.Equal(t, tt.wantBaseURL, ic.BaseURL()) + } + }) + } +} + +type fakeClassConfig struct { + classConfig map[string]interface{} +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} + +func ptInt(in int) *int { + return &in +} + +func ptFloat64(in float64) *float64 { + return &in +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/module.go b/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/module.go new file mode 100644 index 0000000000000000000000000000000000000000..bb4d7259e14d0e7ad56f20c92e0f1bb26096a274 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/module.go @@ -0,0 +1,86 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativenvidia + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/generative-nvidia/clients" + "github.com/weaviate/weaviate/modules/generative-nvidia/parameters" +) + +const Name = "generative-nvidia" + +func New() *GenerativeNvidiaModule { + return &GenerativeNvidiaModule{} +} + +type GenerativeNvidiaModule struct { + generative generativeClient + additionalPropertiesProvider map[string]modulecapabilities.GenerativeProperty +} + +type generativeClient interface { + modulecapabilities.GenerativeClient + MetaInfo() (map[string]interface{}, error) +} + +func (m *GenerativeNvidiaModule) Name() string { + return Name +} + +func (m *GenerativeNvidiaModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextGenerative +} + +func (m *GenerativeNvidiaModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrapf(err, "init %s", Name) + } + + return nil +} + +func (m *GenerativeNvidiaModule) initAdditional(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("NVIDIA_APIKEY") + + client := clients.New(apiKey, timeout, logger) + m.generative = client + m.additionalPropertiesProvider = parameters.AdditionalGenerativeParameters(m.generative) + + return nil +} + +func (m *GenerativeNvidiaModule) MetaInfo() (map[string]interface{}, error) { + return m.generative.MetaInfo() +} + +func (m *GenerativeNvidiaModule) AdditionalGenerativeProperties() map[string]modulecapabilities.GenerativeProperty { + return m.additionalPropertiesProvider +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.AdditionalGenerativeProperties(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/parameters/graphql.go b/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/parameters/graphql.go new file mode 100644 index 0000000000000000000000000000000000000000..e34c017afb4e6a52b39c1162bad73fa31686ed0b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/parameters/graphql.go @@ -0,0 +1,66 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "fmt" + + "github.com/tailor-inc/graphql" +) + +func input(prefix string) *graphql.InputObjectFieldConfig { + return &graphql.InputObjectFieldConfig{ + Description: fmt.Sprintf("%s settings", Name), + Type: graphql.NewInputObject(graphql.InputObjectConfig{ + Name: fmt.Sprintf("%s%sInputObject", prefix, Name), + Fields: graphql.InputObjectConfigFieldMap{ + "baseURL": &graphql.InputObjectFieldConfig{ + Description: "baseURL", + Type: graphql.String, + }, + "model": &graphql.InputObjectFieldConfig{ + Description: "model", + Type: graphql.String, + }, + "temperature": &graphql.InputObjectFieldConfig{ + Description: "temperature", + Type: graphql.Float, + }, + "topP": &graphql.InputObjectFieldConfig{ + Description: "topP", + Type: graphql.Float, + }, + "maxTokens": &graphql.InputObjectFieldConfig{ + Description: "maxTokens", + Type: graphql.Int, + }, + }, + }), + DefaultValue: nil, + } +} + +func output(prefix string) *graphql.Field { + return &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sFields", prefix, Name), + Fields: graphql.Fields{ + "usage": &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sUsageMetadataFields", prefix, Name), + Fields: graphql.Fields{ + "prompt_tokens": &graphql.Field{Type: graphql.Int}, + "completion_tokens": &graphql.Field{Type: graphql.Int}, + "total_tokens": &graphql.Field{Type: graphql.Int}, + }, + })}, + }, + })} +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/parameters/params.go b/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/parameters/params.go new file mode 100644 index 0000000000000000000000000000000000000000..00382dc8c7fc89868e00c6ed9b5bc6021c38372b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/parameters/params.go @@ -0,0 +1,49 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/usecases/modulecomponents/gqlparser" +) + +type Params struct { + BaseURL string + Model string + Temperature *float64 + TopP *float64 + MaxTokens *int +} + +func extract(field *ast.ObjectField) interface{} { + out := Params{} + fields, ok := field.Value.GetValue().([]*ast.ObjectField) + if ok { + for _, f := range fields { + switch f.Name.Value { + case "baseURL": + out.BaseURL = gqlparser.GetValueAsStringOrEmpty(f) + case "model": + out.Model = gqlparser.GetValueAsStringOrEmpty(f) + case "temperature": + out.Temperature = gqlparser.GetValueAsFloat64(f) + case "topP": + out.TopP = gqlparser.GetValueAsFloat64(f) + case "maxTokens": + out.MaxTokens = gqlparser.GetValueAsInt(f) + default: + // do nothing + } + } + } + return out +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/parameters/provider.go b/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/parameters/provider.go new file mode 100644 index 0000000000000000000000000000000000000000..1c3b512a6aa91c2f73b1657273c17346f15cd19c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-nvidia/parameters/provider.go @@ -0,0 +1,22 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import "github.com/weaviate/weaviate/entities/modulecapabilities" + +const Name = "nvidia" + +func AdditionalGenerativeParameters(client modulecapabilities.GenerativeClient) map[string]modulecapabilities.GenerativeProperty { + return map[string]modulecapabilities.GenerativeProperty{ + Name: {Client: client, RequestParamsFunction: input, ResponseParamsFunction: output, ExtractRequestParamsFunction: extract}, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-octoai/clients/octoai.go b/platform/dbops/binaries/weaviate-src/modules/generative-octoai/clients/octoai.go new file mode 100644 index 0000000000000000000000000000000000000000..28263c07305fa7de8ec44aca17e518fef6d6a5ce --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-octoai/clients/octoai.go @@ -0,0 +1,40 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" +) + +type octoai struct{} + +func New(apiKey string, timeout time.Duration, logger logrus.FieldLogger) *octoai { + return &octoai{} +} + +func (v *octoai) GenerateSingleResult(ctx context.Context, properties *modulecapabilities.GenerateProperties, prompt string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + return nil, errors.New("OctoAI is permanently shut down") +} + +func (v *octoai) GenerateAllResults(ctx context.Context, properties []*modulecapabilities.GenerateProperties, task string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + return nil, errors.New("OctoAI is permanently shut down") +} + +func (v *octoai) Generate(ctx context.Context, cfg moduletools.ClassConfig, prompt string, options interface{}, debug bool) (*modulecapabilities.GenerateResponse, error) { + return nil, errors.New("OctoAI is permanently shut down") +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-octoai/clients/octoai_meta.go b/platform/dbops/binaries/weaviate-src/modules/generative-octoai/clients/octoai_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..2a0ad1c987882306b0a32aa7fb52b64ccd0b4f58 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-octoai/clients/octoai_meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *octoai) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Generative Search - OctoAI (deprecated)", + "documentationHref": "https://octo.ai/docs/text-gen-solution/getting-started", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-octoai/module.go b/platform/dbops/binaries/weaviate-src/modules/generative-octoai/module.go new file mode 100644 index 0000000000000000000000000000000000000000..e69758316bf6adb717c78bc07c19da6af60e7307 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-octoai/module.go @@ -0,0 +1,86 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativeoctoai + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/generative-octoai/clients" + "github.com/weaviate/weaviate/modules/generative-octoai/parameters" +) + +const Name = "generative-octoai" + +func New() *GenerativeOctoAIModule { + return &GenerativeOctoAIModule{} +} + +type GenerativeOctoAIModule struct { + generative generativeClient + additionalPropertiesProvider map[string]modulecapabilities.GenerativeProperty +} + +type generativeClient interface { + modulecapabilities.GenerativeClient + MetaInfo() (map[string]interface{}, error) +} + +func (m *GenerativeOctoAIModule) Name() string { + return Name +} + +func (m *GenerativeOctoAIModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextGenerative +} + +func (m *GenerativeOctoAIModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrapf(err, "init %s", Name) + } + + return nil +} + +func (m *GenerativeOctoAIModule) initAdditional(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("OCTOAI_APIKEY") + + client := clients.New(apiKey, timeout, logger) + m.generative = client + m.additionalPropertiesProvider = parameters.AdditionalGenerativeParameters(m.generative) + + return nil +} + +func (m *GenerativeOctoAIModule) MetaInfo() (map[string]interface{}, error) { + return m.generative.MetaInfo() +} + +func (m *GenerativeOctoAIModule) AdditionalGenerativeProperties() map[string]modulecapabilities.GenerativeProperty { + return m.additionalPropertiesProvider +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.AdditionalGenerativeProperties(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-octoai/parameters/graphql.go b/platform/dbops/binaries/weaviate-src/modules/generative-octoai/parameters/graphql.go new file mode 100644 index 0000000000000000000000000000000000000000..6b9def6846b767a01bdbed4dd61fb1bfcc173ed0 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-octoai/parameters/graphql.go @@ -0,0 +1,70 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "fmt" + + "github.com/tailor-inc/graphql" +) + +func input(prefix string) *graphql.InputObjectFieldConfig { + return &graphql.InputObjectFieldConfig{ + Description: fmt.Sprintf("%s settings", Name), + Type: graphql.NewInputObject(graphql.InputObjectConfig{ + Name: fmt.Sprintf("%s%sInputObject", prefix, Name), + Fields: graphql.InputObjectConfigFieldMap{ + "baseURL": &graphql.InputObjectFieldConfig{ + Description: "baseURL", + Type: graphql.String, + }, + "model": &graphql.InputObjectFieldConfig{ + Description: "model", + Type: graphql.String, + }, + "maxTokens": &graphql.InputObjectFieldConfig{ + Description: "maxTokens", + Type: graphql.Int, + }, + "temperature": &graphql.InputObjectFieldConfig{ + Description: "temperature", + Type: graphql.Float, + }, + "n": &graphql.InputObjectFieldConfig{ + Description: "n", + Type: graphql.Int, + }, + "topP": &graphql.InputObjectFieldConfig{ + Description: "topP", + Type: graphql.Float, + }, + }, + }), + DefaultValue: nil, + } +} + +func output(prefix string) *graphql.Field { + return &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sFields", prefix, Name), + Fields: graphql.Fields{ + "usage": &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sUsageMetadataFields", prefix, Name), + Fields: graphql.Fields{ + "prompt_tokens": &graphql.Field{Type: graphql.Int}, + "completion_tokens": &graphql.Field{Type: graphql.Int}, + "total_tokens": &graphql.Field{Type: graphql.Int}, + }, + })}, + }, + })} +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-octoai/parameters/params.go b/platform/dbops/binaries/weaviate-src/modules/generative-octoai/parameters/params.go new file mode 100644 index 0000000000000000000000000000000000000000..a6175be1736b6d54d698f037d789bd389daf9e10 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-octoai/parameters/params.go @@ -0,0 +1,52 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/usecases/modulecomponents/gqlparser" +) + +type Params struct { + BaseURL string + Model string + MaxTokens *int + Temperature *float64 + N *int + TopP *float64 +} + +func extract(field *ast.ObjectField) interface{} { + out := Params{} + fields, ok := field.Value.GetValue().([]*ast.ObjectField) + if ok { + for _, f := range fields { + switch f.Name.Value { + case "baseURL": + out.BaseURL = gqlparser.GetValueAsStringOrEmpty(f) + case "model": + out.Model = gqlparser.GetValueAsStringOrEmpty(f) + case "maxTokens": + out.MaxTokens = gqlparser.GetValueAsInt(f) + case "temperature": + out.Temperature = gqlparser.GetValueAsFloat64(f) + case "n": + out.N = gqlparser.GetValueAsInt(f) + case "topP": + out.TopP = gqlparser.GetValueAsFloat64(f) + default: + // do nothing + } + } + } + return out +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-octoai/parameters/provider.go b/platform/dbops/binaries/weaviate-src/modules/generative-octoai/parameters/provider.go new file mode 100644 index 0000000000000000000000000000000000000000..ca3fa621f3c480463ea625ddbbd2739b4087a496 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-octoai/parameters/provider.go @@ -0,0 +1,22 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import "github.com/weaviate/weaviate/entities/modulecapabilities" + +const Name = "octoai" + +func AdditionalGenerativeParameters(client modulecapabilities.GenerativeClient) map[string]modulecapabilities.GenerativeProperty { + return map[string]modulecapabilities.GenerativeProperty{ + Name: {Client: client, RequestParamsFunction: input, ResponseParamsFunction: output, ExtractRequestParamsFunction: extract}, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-ollama/clients/ollama.go b/platform/dbops/binaries/weaviate-src/modules/generative-ollama/clients/ollama.go new file mode 100644 index 0000000000000000000000000000000000000000..ff350567e9d79bea39da3376ef86d61a1c4bad11 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-ollama/clients/ollama.go @@ -0,0 +1,186 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ollama + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "github.com/weaviate/weaviate/modules/generative-ollama/config" + ollamaparams "github.com/weaviate/weaviate/modules/generative-ollama/parameters" + "github.com/weaviate/weaviate/usecases/modulecomponents" + "github.com/weaviate/weaviate/usecases/modulecomponents/generative" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" +) + +type ollama struct { + httpClient *http.Client + logger logrus.FieldLogger +} + +func New(timeout time.Duration, logger logrus.FieldLogger) *ollama { + return &ollama{ + httpClient: &http.Client{ + Timeout: timeout, + }, + logger: logger, + } +} + +func (v *ollama) GenerateSingleResult(ctx context.Context, properties *modulecapabilities.GenerateProperties, prompt string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + forPrompt, err := generative.MakeSinglePrompt(generative.Text(properties), prompt) + if err != nil { + return nil, err + } + return v.generate(ctx, cfg, forPrompt, generative.Blobs([]*modulecapabilities.GenerateProperties{properties}), options, debug) +} + +func (v *ollama) GenerateAllResults(ctx context.Context, properties []*modulecapabilities.GenerateProperties, task string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + forTask, err := generative.MakeTaskPrompt(generative.Texts(properties), task) + if err != nil { + return nil, err + } + return v.generate(ctx, cfg, forTask, generative.Blobs(properties), options, debug) +} + +func (v *ollama) generate(ctx context.Context, cfg moduletools.ClassConfig, prompt string, imageProperties []map[string]*string, options interface{}, debug bool) (*modulecapabilities.GenerateResponse, error) { + params := v.getParameters(cfg, options, imageProperties) + debugInformation := v.getDebugInformation(debug, prompt) + + ollamaUrl := v.getOllamaUrl(ctx, params.ApiEndpoint) + input := generateInput{ + Model: params.Model, + Prompt: prompt, + Stream: false, + } + if params.Temperature != nil { + input.Options = &generateOptions{Temperature: params.Temperature} + } + if len(params.Images) > 0 { + input.Images = params.Images + } + + body, err := json.Marshal(input) + if err != nil { + return nil, errors.Wrap(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", ollamaUrl, + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + req.Header.Add("Content-Type", "application/json") + + res, err := v.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + var resBody generateResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if resBody.Error != "" { + return nil, errors.Errorf("connection to Ollama API failed with error: %s", resBody.Error) + } + + if res.StatusCode != 200 { + return nil, fmt.Errorf("connection to Ollama API failed with status: %d", res.StatusCode) + } + + textResponse := resBody.Response + + return &modulecapabilities.GenerateResponse{ + Result: &textResponse, + Debug: debugInformation, + }, nil +} + +func (v *ollama) getParameters(cfg moduletools.ClassConfig, options interface{}, imagePropertiesArray []map[string]*string) ollamaparams.Params { + settings := config.NewClassSettings(cfg) + + var params ollamaparams.Params + if p, ok := options.(ollamaparams.Params); ok { + params = p + } + if params.ApiEndpoint == "" { + params.ApiEndpoint = settings.ApiEndpoint() + } + if params.Model == "" { + params.Model = settings.Model() + } + + params.Images = generative.ParseImageProperties(params.Images, params.ImageProperties, imagePropertiesArray) + + return params +} + +func (v *ollama) getDebugInformation(debug bool, prompt string) *modulecapabilities.GenerateDebugInformation { + if debug { + return &modulecapabilities.GenerateDebugInformation{ + Prompt: prompt, + } + } + return nil +} + +func (v *ollama) getOllamaUrl(ctx context.Context, baseURL string) string { + passedBaseURL := baseURL + if headerBaseURL := modulecomponents.GetValueFromContext(ctx, "X-Ollama-BaseURL"); headerBaseURL != "" { + passedBaseURL = headerBaseURL + } + return fmt.Sprintf("%s/api/generate", passedBaseURL) +} + +type generateInput struct { + Model string `json:"model"` + Prompt string `json:"prompt"` + Stream bool `json:"stream"` + Options *generateOptions `json:"options,omitempty"` + Images []*string `json:"images,omitempty"` +} + +type generateOptions struct { + Temperature *float64 `json:"temperature,omitempty"` +} + +// The entire response for an error ends up looking different, may want to add omitempty everywhere. +type generateResponse struct { + Model string `json:"model,omitempty"` + CreatedAt string `json:"created_at,omitempty"` + Response string `json:"response,omitempty"` + Done bool `json:"done,omitempty"` + Context []int `json:"context,omitempty"` + TotalDuration int `json:"total_duration,omitempty"` + LoadDuration int `json:"load_duration,omitempty"` + PromptEvalDuration int `json:"prompt_eval_duration,omitempty"` + EvalCount int `json:"eval_count,omitempty"` + EvalDuration int `json:"eval_duration,omitempty"` + Error string `json:"error,omitempty"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-ollama/clients/ollama_meta.go b/platform/dbops/binaries/weaviate-src/modules/generative-ollama/clients/ollama_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..03fbe0ea73d5fca7bdab64f128df3809f1b2af7e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-ollama/clients/ollama_meta.go @@ -0,0 +1,21 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ollama + +func (v *ollama) MetaInfo() (map[string]interface{}, error) { + // Or this could be attached to the container + // Needs to be to get the model path + return map[string]interface{}{ + "name": "Generative Search - Ollama", + "documentationHref": "https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-completion", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-ollama/clients/ollama_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-ollama/clients/ollama_test.go new file mode 100644 index 0000000000000000000000000000000000000000..5c63142cb9584f80dd435da8a0b3dc3aa6113002 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-ollama/clients/ollama_test.go @@ -0,0 +1,157 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ollama + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} + +func TestGetAnswer(t *testing.T) { + props := []*modulecapabilities.GenerateProperties{{Text: map[string]string{"prop": "My name is john"}}} + + tests := []struct { + name string + answer generateResponse + timeout time.Duration + expectedResult string + }{ + { + name: "when the server has a successful aner", + answer: generateResponse{ + Response: "Test test", + }, + expectedResult: "Test test", + }, + { + name: "when the server has a an error", + answer: generateResponse{ + Error: "some error from the server", + }, + }, + { + name: "when the server does not respond in time", + answer: generateResponse{Error: "context deadline exceeded"}, + timeout: time.Second, + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + handler := &testAnswerHandler{ + t: t, + answer: test.answer, + timeout: test.timeout, + } + server := httptest.NewServer(handler) + defer server.Close() + + c := New(test.timeout, nullLogger()) + + settings := &fakeClassConfig{apiEndpoint: server.URL} + res, err := c.GenerateAllResults(context.Background(), props, "What is my name?", nil, false, settings) + + if test.answer.Error != "" { + assert.Contains(t, err.Error(), test.answer.Error) + } else { + assert.Equal(t, test.expectedResult, *res.Result) + } + }) + } +} + +type testAnswerHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + answer generateResponse + timeout time.Duration +} + +func (f *testAnswerHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/api/generate", r.URL.String()) + assert.Equal(f.t, http.MethodPost, r.Method) + + time.Sleep(f.timeout) + + if f.answer.Error != "" { + outBytes, err := json.Marshal(f.answer) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b map[string]interface{} + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + outBytes, err := json.Marshal(f.answer) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +type fakeClassConfig struct { + apiEndpoint string +} + +func (cfg *fakeClassConfig) Tenant() string { + return "" +} + +func (cfg *fakeClassConfig) Class() map[string]interface{} { + return nil +} + +func (cfg *fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + settings := map[string]interface{}{ + "apiEndpoint": cfg.apiEndpoint, + } + return settings +} + +func (cfg *fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (cfg *fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-ollama/config.go b/platform/dbops/binaries/weaviate-src/modules/generative-ollama/config.go new file mode 100644 index 0000000000000000000000000000000000000000..987f4ce971a159f8f39aac5451478d35ecadfb12 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-ollama/config.go @@ -0,0 +1,39 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativeollama + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *GenerativeOllamaModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeOllamaModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeOllamaModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + return nil +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-ollama/config/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/generative-ollama/config/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..0cd59d5931f9031330fd9853fbe9f8b2fb4e1011 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-ollama/config/class_settings.go @@ -0,0 +1,65 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + apiEndpointProperty = "apiEndpoint" + modelProperty = "model" +) + +const ( + DefaultApiEndpoint = "http://localhost:11434" + DefaultModel = "llama3" +) + +type classSettings struct { + cfg moduletools.ClassConfig + propertyValuesHelper basesettings.PropertyValuesHelper +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, propertyValuesHelper: basesettings.NewPropertyValuesHelper("generative-ollama")} +} + +func (ic *classSettings) Validate(class *models.Class) error { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + if ic.ApiEndpoint() == "" { + return errors.New("apiEndpoint cannot be empty") + } + model := ic.Model() + if model == "" { + return errors.New("model cannot be empty") + } + return nil +} + +func (ic *classSettings) getStringProperty(name, defaultValue string) string { + return ic.propertyValuesHelper.GetPropertyAsString(ic.cfg, name, defaultValue) +} + +func (ic *classSettings) ApiEndpoint() string { + return ic.getStringProperty(apiEndpointProperty, DefaultApiEndpoint) +} + +func (ic *classSettings) Model() string { + return ic.getStringProperty(modelProperty, DefaultModel) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-ollama/config/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-ollama/config/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..3142925ec6e105960dc777669b1363e9dd3efb7b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-ollama/config/class_settings_test.go @@ -0,0 +1,108 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_classSettings_Validate(t *testing.T) { + tests := []struct { + name string + cfg moduletools.ClassConfig + wantApiEndpoint string + wantModel string + wantErr error + }{ + { + name: "default settings", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{}, + }, + wantApiEndpoint: "http://localhost:11434", + wantModel: "llama3", + wantErr: nil, + }, + { + name: "everything non default configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "mistral", + }, + }, + wantApiEndpoint: "http://localhost:11434", + wantModel: "mistral", + wantErr: nil, + }, + { + name: "empty model", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "", + }, + }, + wantErr: errors.New("model cannot be empty"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.cfg) + if tt.wantErr != nil { + err := ic.Validate(nil) + require.Error(t, err) + assert.Equal(t, tt.wantErr.Error(), err.Error()) + } else { + assert.NoError(t, ic.Validate(nil)) + assert.Equal(t, tt.wantModel, ic.Model()) + } + }) + } +} + +type fakeClassConfig struct { + classConfig map[string]interface{} +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-ollama/module.go b/platform/dbops/binaries/weaviate-src/modules/generative-ollama/module.go new file mode 100644 index 0000000000000000000000000000000000000000..1ed45015dba811d370eb18dbfea763b8b8a2304a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-ollama/module.go @@ -0,0 +1,82 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativeollama + +import ( + "context" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + ollama "github.com/weaviate/weaviate/modules/generative-ollama/clients" + "github.com/weaviate/weaviate/modules/generative-ollama/parameters" +) + +const Name = "generative-ollama" + +func New() *GenerativeOllamaModule { + return &GenerativeOllamaModule{} +} + +type GenerativeOllamaModule struct { + generative generativeClient + additionalPropertiesProvider map[string]modulecapabilities.GenerativeProperty +} + +type generativeClient interface { + modulecapabilities.GenerativeClient + MetaInfo() (map[string]interface{}, error) +} + +func (m *GenerativeOllamaModule) Name() string { + return Name +} + +func (m *GenerativeOllamaModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextGenerative +} + +func (m *GenerativeOllamaModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrapf(err, "init %s", Name) + } + + return nil +} + +func (m *GenerativeOllamaModule) initAdditional(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + client := ollama.New(timeout, logger) + m.generative = client + m.additionalPropertiesProvider = parameters.AdditionalGenerativeParameters(m.generative) + return nil +} + +func (m *GenerativeOllamaModule) MetaInfo() (map[string]interface{}, error) { + return m.generative.MetaInfo() +} + +func (m *GenerativeOllamaModule) AdditionalGenerativeProperties() map[string]modulecapabilities.GenerativeProperty { + return m.additionalPropertiesProvider +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.AdditionalGenerativeProperties(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-ollama/parameters/graphql.go b/platform/dbops/binaries/weaviate-src/modules/generative-ollama/parameters/graphql.go new file mode 100644 index 0000000000000000000000000000000000000000..1a7a722937dd75014c7ae7eb1ff752f672f38013 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-ollama/parameters/graphql.go @@ -0,0 +1,50 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "fmt" + + "github.com/tailor-inc/graphql" +) + +func input(prefix string) *graphql.InputObjectFieldConfig { + return &graphql.InputObjectFieldConfig{ + Description: fmt.Sprintf("%s settings", Name), + Type: graphql.NewInputObject(graphql.InputObjectConfig{ + Name: fmt.Sprintf("%s%sInputObject", prefix, Name), + Fields: graphql.InputObjectConfigFieldMap{ + "apiEndpoint": &graphql.InputObjectFieldConfig{ + Description: "apiEndpoint", + Type: graphql.String, + }, + "model": &graphql.InputObjectFieldConfig{ + Description: "model", + Type: graphql.String, + }, + "temperature": &graphql.InputObjectFieldConfig{ + Description: "temperature", + Type: graphql.Float, + }, + "images": &graphql.InputObjectFieldConfig{ + Description: "images", + Type: graphql.NewList(graphql.String), + }, + "imageProperties": &graphql.InputObjectFieldConfig{ + Description: "imageProperties", + Type: graphql.NewList(graphql.String), + }, + }, + }), + DefaultValue: nil, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-ollama/parameters/params.go b/platform/dbops/binaries/weaviate-src/modules/generative-ollama/parameters/params.go new file mode 100644 index 0000000000000000000000000000000000000000..afbed9a0afdd7ef7e86611ed33d15a35e4b128dd --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-ollama/parameters/params.go @@ -0,0 +1,49 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/usecases/modulecomponents/gqlparser" +) + +type Params struct { + ApiEndpoint string + Model string + Temperature *float64 + Images []*string + ImageProperties []string +} + +func extract(field *ast.ObjectField) interface{} { + out := Params{} + fields, ok := field.Value.GetValue().([]*ast.ObjectField) + if ok { + for _, f := range fields { + switch f.Name.Value { + case "apiEndpoint": + out.ApiEndpoint = gqlparser.GetValueAsStringOrEmpty(f) + case "model": + out.Model = gqlparser.GetValueAsStringOrEmpty(f) + case "temperature": + out.Temperature = gqlparser.GetValueAsFloat64(f) + case "images": + out.Images = gqlparser.GetValueAsStringPtrArray(f) + case "imageProperties": + out.ImageProperties = gqlparser.GetValueAsStringArray(f) + default: + // do nothing + } + } + } + return out +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-ollama/parameters/provider.go b/platform/dbops/binaries/weaviate-src/modules/generative-ollama/parameters/provider.go new file mode 100644 index 0000000000000000000000000000000000000000..746bb1b220c8dad00b855460d4c9d57ad4ca6708 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-ollama/parameters/provider.go @@ -0,0 +1,22 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import "github.com/weaviate/weaviate/entities/modulecapabilities" + +const Name = "ollama" + +func AdditionalGenerativeParameters(client modulecapabilities.GenerativeClient) map[string]modulecapabilities.GenerativeProperty { + return map[string]modulecapabilities.GenerativeProperty{ + Name: {Client: client, RequestParamsFunction: input, ResponseParamsFunction: nil, ExtractRequestParamsFunction: extract}, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-openai/clients/openai.go b/platform/dbops/binaries/weaviate-src/modules/generative-openai/clients/openai.go new file mode 100644 index 0000000000000000000000000000000000000000..aae5ff9965f39c3689f6c498cb6d28e6875ba12d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-openai/clients/openai.go @@ -0,0 +1,554 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "strconv" + "strings" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + "github.com/weaviate/weaviate/usecases/modulecomponents/generative" + "github.com/weaviate/weaviate/usecases/monitoring" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/generative-openai/config" + openaiparams "github.com/weaviate/weaviate/modules/generative-openai/parameters" +) + +func buildUrlFn(isLegacy, isAzure bool, resourceName, deploymentID, baseURL, apiVersion string) (string, error) { + if isAzure { + host := baseURL + if host == "" || host == "https://api.openai.com" { + // Fall back to old assumption + host = "https://" + resourceName + ".openai.azure.com" + } + path := "openai/deployments/" + deploymentID + "/chat/completions" + queryParam := fmt.Sprintf("api-version=%s", apiVersion) + return fmt.Sprintf("%s/%s?%s", host, path, queryParam), nil + } + path := "/v1/chat/completions" + if isLegacy { + path = "/v1/completions" + } + return url.JoinPath(baseURL, path) +} + +type openai struct { + openAIApiKey string + openAIOrganization string + azureApiKey string + buildUrl func(isLegacy, isAzure bool, resourceName, deploymentID, baseURL, apiVersion string) (string, error) + httpClient *http.Client + logger logrus.FieldLogger +} + +func New(openAIApiKey, openAIOrganization, azureApiKey string, timeout time.Duration, logger logrus.FieldLogger) *openai { + return &openai{ + openAIApiKey: openAIApiKey, + openAIOrganization: openAIOrganization, + azureApiKey: azureApiKey, + httpClient: &http.Client{ + Timeout: timeout, + }, + buildUrl: buildUrlFn, + logger: logger, + } +} + +func (v *openai) GenerateSingleResult(ctx context.Context, properties *modulecapabilities.GenerateProperties, prompt string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + monitoring.GetMetrics().ModuleExternalRequestSingleCount.WithLabelValues("generate", "openai").Inc() + forPrompt, err := generative.MakeSinglePrompt(generative.Text(properties), prompt) + if err != nil { + return nil, err + } + return v.generate(ctx, cfg, forPrompt, generative.Blobs([]*modulecapabilities.GenerateProperties{properties}), options, debug) +} + +func (v *openai) GenerateAllResults(ctx context.Context, properties []*modulecapabilities.GenerateProperties, task string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + monitoring.GetMetrics().ModuleExternalRequestBatchCount.WithLabelValues("generate", "openai").Inc() + forTask, err := generative.MakeTaskPrompt(generative.Texts(properties), task) + if err != nil { + return nil, err + } + return v.generate(ctx, cfg, forTask, generative.Blobs(properties), options, debug) +} + +func (v *openai) generate(ctx context.Context, cfg moduletools.ClassConfig, prompt string, imageProperties []map[string]*string, options interface{}, debug bool) (*modulecapabilities.GenerateResponse, error) { + monitoring.GetMetrics().ModuleExternalRequests.WithLabelValues("generate", "openai").Inc() + startTime := time.Now() + params := v.getParameters(cfg, options, imageProperties) + isAzure := config.IsAzure(params.IsAzure, params.ResourceName, params.DeploymentID) + debugInformation := v.getDebugInformation(debug, prompt) + + oaiUrl, err := v.buildOpenAIUrl(ctx, params) + if err != nil { + return nil, errors.Wrap(err, "url join path") + } + + input, err := v.generateInput(prompt, params) + if err != nil { + return nil, errors.Wrap(err, "generate input") + } + + defer func() { + monitoring.GetMetrics().ModuleExternalRequestDuration.WithLabelValues("generate", oaiUrl).Observe(time.Since(startTime).Seconds()) + }() + + body, err := json.Marshal(input) + if err != nil { + return nil, errors.Wrap(err, "marshal body") + } + + monitoring.GetMetrics().ModuleExternalRequestSize.WithLabelValues("generate", oaiUrl).Observe(float64(len(body))) + + req, err := http.NewRequestWithContext(ctx, "POST", oaiUrl, + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + apiKey, err := v.getApiKey(ctx, isAzure) + if err != nil { + return nil, errors.Wrapf(err, "OpenAI API Key") + } + req.Header.Add(v.getApiKeyHeaderAndValue(apiKey, isAzure)) + if openAIOrganization := v.getOpenAIOrganization(ctx); openAIOrganization != "" { + req.Header.Add("OpenAI-Organization", openAIOrganization) + } + req.Header.Add("Content-Type", "application/json") + + res, err := v.httpClient.Do(req) + if res != nil { + vrst := monitoring.GetMetrics().ModuleExternalResponseStatus + vrst.WithLabelValues("generate", oaiUrl, fmt.Sprintf("%v", res.StatusCode)).Inc() + } + if err != nil { + code := -1 + if res != nil { + code = res.StatusCode + } + monitoring.GetMetrics().ModuleExternalError.WithLabelValues("generate", "openai", "OpenAI API", fmt.Sprintf("%v", code)).Inc() + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + requestID := res.Header.Get("x-request-id") + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + monitoring.GetMetrics().ModuleExternalResponseSize.WithLabelValues("generate", oaiUrl).Observe(float64(len(bodyBytes))) + vrst := monitoring.GetMetrics().ModuleExternalResponseStatus + vrst.WithLabelValues("generate", oaiUrl, fmt.Sprintf("%v", res.StatusCode)).Inc() + + var resBody generateResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if res.StatusCode != 200 || resBody.Error != nil { + return nil, v.getError(res.StatusCode, requestID, resBody.Error, params.IsAzure) + } + + responseParams := v.getResponseParams(resBody.Usage) + textResponse := resBody.Choices[0].Text + if len(resBody.Choices) > 0 && textResponse != "" { + trimmedResponse := strings.Trim(textResponse, "\n") + return &modulecapabilities.GenerateResponse{ + Result: &trimmedResponse, + Debug: debugInformation, + Params: responseParams, + }, nil + } + + message := resBody.Choices[0].Message + if message != nil { + textResponse = message.Content + trimmedResponse := strings.Trim(textResponse, "\n") + return &modulecapabilities.GenerateResponse{ + Result: &trimmedResponse, + Debug: debugInformation, + Params: responseParams, + }, nil + } + + return &modulecapabilities.GenerateResponse{ + Result: nil, + Debug: debugInformation, + }, nil +} + +func (v *openai) getParameters(cfg moduletools.ClassConfig, options interface{}, imagePropertiesArray []map[string]*string) openaiparams.Params { + settings := config.NewClassSettings(cfg) + + var params openaiparams.Params + if p, ok := options.(openaiparams.Params); ok { + params = p + } + + if params.BaseURL == "" { + params.BaseURL = settings.BaseURL() + } + if params.ApiVersion == "" { + params.ApiVersion = settings.ApiVersion() + } + if params.ResourceName == "" { + params.ResourceName = settings.ResourceName() + } + if params.DeploymentID == "" { + params.DeploymentID = settings.DeploymentID() + } + if !params.IsAzure { + params.IsAzure = settings.IsAzure() + } + if params.Model == "" { + params.Model = settings.Model() + } + if params.Temperature == nil { + temperature := settings.Temperature() + if temperature != nil { + params.Temperature = temperature + } + } + if params.TopP == nil { + topP := settings.TopP() + params.TopP = &topP + } + if params.FrequencyPenalty == nil { + frequencyPenalty := settings.FrequencyPenalty() + params.FrequencyPenalty = &frequencyPenalty + } + if params.PresencePenalty == nil { + presencePenalty := settings.PresencePenalty() + params.PresencePenalty = &presencePenalty + } + if params.MaxTokens == nil { + if settings.MaxTokens() != nil && *settings.MaxTokens() != -1 { + maxTokens := int(*settings.MaxTokens()) + params.MaxTokens = &maxTokens + } + } + + params.Images = generative.ParseImageProperties(params.Images, params.ImageProperties, imagePropertiesArray) + + return params +} + +func (v *openai) getDebugInformation(debug bool, prompt string) *modulecapabilities.GenerateDebugInformation { + if debug { + return &modulecapabilities.GenerateDebugInformation{ + Prompt: prompt, + } + } + return nil +} + +func (v *openai) getResponseParams(usage *usage) map[string]interface{} { + if usage != nil { + return map[string]interface{}{openaiparams.Name: map[string]interface{}{"usage": usage}} + } + return nil +} + +func GetResponseParams(result map[string]interface{}) *responseParams { + if params, ok := result[openaiparams.Name].(map[string]interface{}); ok { + if usage, ok := params["usage"].(*usage); ok { + return &responseParams{Usage: usage} + } + } + return nil +} + +func (v *openai) buildOpenAIUrl(ctx context.Context, params openaiparams.Params) (string, error) { + baseURL := params.BaseURL + + deploymentID := params.DeploymentID + resourceName := params.ResourceName + + if headerBaseURL := modulecomponents.GetValueFromContext(ctx, "X-Openai-Baseurl"); headerBaseURL != "" { + baseURL = headerBaseURL + } + + if headerDeploymentID := modulecomponents.GetValueFromContext(ctx, "X-Azure-Deployment-Id"); headerDeploymentID != "" { + deploymentID = headerDeploymentID + } + + if headerResourceName := modulecomponents.GetValueFromContext(ctx, "X-Azure-Resource-Name"); headerResourceName != "" { + resourceName = headerResourceName + } + + isLegacy := config.IsLegacy(params.Model) + isAzure := config.IsAzure(params.IsAzure, resourceName, deploymentID) + + return v.buildUrl(isLegacy, isAzure, resourceName, deploymentID, baseURL, params.ApiVersion) +} + +func (v *openai) generateInput(prompt string, params openaiparams.Params) (generateInput, error) { + if config.IsLegacy(params.Model) { + return generateInput{ + Prompt: prompt, + Model: params.Model, + FrequencyPenalty: params.FrequencyPenalty, + MaxCompletionTokens: params.MaxTokens, + N: params.N, + PresencePenalty: params.PresencePenalty, + Stop: params.Stop, + Temperature: params.Temperature, + TopP: params.TopP, + }, nil + } else { + var input generateInput + + var content any + if len(params.Images) > 0 { + imageInput := contentImageInput{} + imageInput = append(imageInput, contentText{ + Type: "text", + Text: prompt, + }) + for i := range params.Images { + url := fmt.Sprintf("data:image/jpeg;base64,%s", *params.Images[i]) + imageInput = append(imageInput, contentImage{ + Type: "image_url", + ImageURL: contentImageURL{URL: &url}, + }) + } + content = imageInput + } else { + content = prompt + } + + messages := []message{{ + Role: "user", + Content: content, + }} + + var tokens *int + var err error + maxTokensForModel := config.GetMaxTokensForModel(params.Model) + if config.IsThirdPartyProvider(params.BaseURL, params.IsAzure, params.ResourceName, params.DeploymentID) && maxTokensForModel != nil { + tokens, err = v.determineTokens(*maxTokensForModel, params.MaxTokens, params.Model, messages) + if err != nil { + return input, errors.Wrap(err, "determine tokens count") + } + } else { + tokens = params.MaxTokens + } + + input = generateInput{ + Messages: messages, + Stream: false, + MaxCompletionTokens: tokens, + FrequencyPenalty: params.FrequencyPenalty, + N: params.N, + PresencePenalty: params.PresencePenalty, + Stop: params.Stop, + Temperature: params.Temperature, + TopP: params.TopP, + } + if !config.IsAzure(params.IsAzure, params.ResourceName, params.DeploymentID) { + // model is mandatory for OpenAI calls, but obsolete for Azure calls + input.Model = params.Model + if strings.HasPrefix(input.Model, "gpt-4o") { + input.MaxCompletionTokens = nil + input.MaxTokens = tokens + } + } + return input, nil + } +} + +func (v *openai) getError(statusCode int, requestID string, resBodyError *openAIApiError, isAzure bool) error { + endpoint := "OpenAI API" + if isAzure { + endpoint = "Azure OpenAI API" + } + errorMsg := fmt.Sprintf("connection to: %s failed with status: %d", endpoint, statusCode) + if requestID != "" { + errorMsg = fmt.Sprintf("%s request-id: %s", errorMsg, requestID) + } + if resBodyError != nil { + errorMsg = fmt.Sprintf("%s error: %v", errorMsg, resBodyError.Message) + } + monitoring.GetMetrics().ModuleExternalError.WithLabelValues("generate", "openai", endpoint, fmt.Sprintf("%v", statusCode)).Inc() + return errors.New(errorMsg) +} + +func (v *openai) determineTokens(maxTokensSetting float64, classSetting *int, model string, messages []message) (*int, error) { + monitoring.GetMetrics().ModuleExternalBatchLength.WithLabelValues("generate", "openai").Observe(float64(len(messages))) + tokenMessagesCount, err := getTokensCount(model, messages) + if err != nil { + return nil, err + } + if classSetting == nil { + // if class setting is not set, assume that max value was requested + maxTokens := int(maxTokensSetting) + classSetting = &maxTokens + } + messageTokens := tokenMessagesCount + if *classSetting+messageTokens >= int(maxTokensSetting) { + // max token limit must be in range: [1, maxTokensSetting) that's why -1 is added + maxTokens := int(maxTokensSetting) - messageTokens - 1 + return &maxTokens, nil + } + return &messageTokens, nil +} + +func (v *openai) getApiKeyHeaderAndValue(apiKey string, isAzure bool) (string, string) { + if isAzure { + return "api-key", apiKey + } + return "Authorization", fmt.Sprintf("Bearer %s", apiKey) +} + +func (v *openai) getApiKey(ctx context.Context, isAzure bool) (string, error) { + var apiKey, envVarValue, envVar string + + if isAzure { + apiKey = "X-Azure-Api-Key" + envVar = "AZURE_APIKEY" + envVarValue = v.azureApiKey + } else { + apiKey = "X-Openai-Api-Key" + envVar = "OPENAI_APIKEY" + envVarValue = v.openAIApiKey + } + + return v.getApiKeyFromContext(ctx, apiKey, envVarValue, envVar) +} + +func (v *openai) getApiKeyFromContext(ctx context.Context, apiKey, envVarValue, envVar string) (string, error) { + if apiKeyValue := modulecomponents.GetValueFromContext(ctx, apiKey); apiKeyValue != "" { + return apiKeyValue, nil + } + if envVarValue != "" { + return envVarValue, nil + } + return "", fmt.Errorf("no api key found neither in request header: %s nor in environment variable under %s", apiKey, envVar) +} + +func (v *openai) getOpenAIOrganization(ctx context.Context) string { + if value := modulecomponents.GetValueFromContext(ctx, "X-Openai-Organization"); value != "" { + return value + } + return v.openAIOrganization +} + +type generateInput struct { + Prompt string `json:"prompt,omitempty"` + Messages []message `json:"messages,omitempty"` + Stream bool `json:"stream,omitempty"` + Model string `json:"model,omitempty"` + FrequencyPenalty *float64 `json:"frequency_penalty,omitempty"` + Logprobs *bool `json:"logprobs,omitempty"` + TopLogprobs *int `json:"top_logprobs,omitempty"` + MaxCompletionTokens *int `json:"max_completion_tokens,omitempty"` + MaxTokens *int `json:"max_tokens,omitempty"` + N *int `json:"n,omitempty"` + PresencePenalty *float64 `json:"presence_penalty,omitempty"` + Stop []string `json:"stop,omitempty"` + Temperature *float64 `json:"temperature,omitempty"` + TopP *float64 `json:"top_p,omitempty"` +} + +type responseMessage struct { + Role string `json:"role"` + Content string `json:"content"` + Name string `json:"name,omitempty"` +} + +type message struct { + Role string `json:"role"` + Content interface{} `json:"content"` // string or array of contentText and contentImage + Name string `json:"name,omitempty"` +} + +type contentImageInput []interface{} + +type contentText struct { + Type string `json:"type"` + Text string `json:"text"` +} + +type contentImage struct { + Type string `json:"type"` + ImageURL contentImageURL `json:"image_url,omitempty"` +} + +type contentImageURL struct { + URL *string `json:"url"` +} + +type generateResponse struct { + Choices []choice + Usage *usage `json:"usage,omitempty"` + Error *openAIApiError `json:"error,omitempty"` +} + +type choice struct { + FinishReason string + Index float32 + Text string `json:"text,omitempty"` + Message *responseMessage `json:"message,omitempty"` +} + +type openAIApiError struct { + Message string `json:"message"` + Type string `json:"type"` + Param string `json:"param"` + Code openAICode `json:"code"` +} + +type usage struct { + PromptTokens *int `json:"prompt_tokens,omitempty"` + CompletionTokens *int `json:"completion_tokens,omitempty"` + TotalTokens *int `json:"total_tokens,omitempty"` +} + +type openAICode string + +func (c *openAICode) String() string { + if c == nil { + return "" + } + return string(*c) +} + +func (c *openAICode) UnmarshalJSON(data []byte) (err error) { + if number, err := strconv.Atoi(string(data)); err == nil { + str := strconv.Itoa(number) + *c = openAICode(str) + return nil + } + var str string + err = json.Unmarshal(data, &str) + if err != nil { + return err + } + *c = openAICode(str) + return nil +} + +type responseParams struct { + Usage *usage `json:"usage,omitempty"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-openai/clients/openai_meta.go b/platform/dbops/binaries/weaviate-src/modules/generative-openai/clients/openai_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..ee453b323212da7b053d57cc0b98be9feca7dd30 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-openai/clients/openai_meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *openai) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Generative Search - OpenAI", + "documentationHref": "https://platform.openai.com/docs/api-reference/completions", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-openai/clients/openai_meta_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-openai/clients/openai_meta_test.go new file mode 100644 index 0000000000000000000000000000000000000000..d27db3982fc2b4f8be00fc99363c59d3be586807 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-openai/clients/openai_meta_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestGetMeta(t *testing.T) { + t.Run("when the server is providing meta", func(t *testing.T) { + server := httptest.NewServer(&testMetaHandler{t: t}) + defer server.Close() + c := New("", "", "", 0, nullLogger()) + meta, err := c.MetaInfo() + + assert.Nil(t, err) + assert.NotNil(t, meta) + metaModel := meta["name"] + assert.True(t, metaModel != nil) + documentationHref := meta["documentationHref"] + assert.True(t, documentationHref != nil) + }) +} + +type testMetaHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testMetaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/meta", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.Write([]byte(f.metaInfo())) +} + +func (f *testMetaHandler) metaInfo() string { + return `{ + "hostname": "http://127.0.0.1:8080", + "modules": { + "generative-openai": { + "documentationHref": "https://platform.openai.com/docs/api-reference/completions", + "name": "OpenAI Generative Module" + } + }, + "version": "1.16.0" +}` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-openai/clients/openai_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-openai/clients/openai_test.go new file mode 100644 index 0000000000000000000000000000000000000000..32afa5b3c7d6b4254762e01137f1cd227516dfa3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-openai/clients/openai_test.go @@ -0,0 +1,277 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "os" + "strings" + "testing" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/modules/generative-openai/config" + openaiparams "github.com/weaviate/weaviate/modules/generative-openai/parameters" +) + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} + +func fakeBuildUrl(serverURL string, isAzure, isLegacy bool, resourceName, deploymentID, baseURL, apiVersion string) (string, error) { + endpoint, err := buildUrlFn(isLegacy, isAzure, resourceName, deploymentID, baseURL, apiVersion) + if err != nil { + return "", err + } + endpoint = strings.Replace(endpoint, "https://api.openai.com", serverURL, 1) + return endpoint, nil +} + +func TestBuildUrlFn(t *testing.T) { + t.Run("buildUrlFn returns default OpenAI Client", func(t *testing.T) { + url, err := buildUrlFn(false, false, "", "", config.DefaultOpenAIBaseURL, config.DefaultApiVersion) + assert.Nil(t, err) + assert.Equal(t, "https://api.openai.com/v1/chat/completions", url) + }) + t.Run("buildUrlFn returns Azure Client when isAzure is true", func(t *testing.T) { + url, err := buildUrlFn(false, true, "resourceID", "deploymentID", "", config.DefaultApiVersion) + assert.Nil(t, err) + assert.Equal(t, "https://resourceID.openai.azure.com/openai/deployments/deploymentID/chat/completions?api-version=2024-06-01", url) + }) + t.Run("buildUrlFn loads from environment variable", func(t *testing.T) { + url, err := buildUrlFn(false, false, "", "", "https://foobar.some.proxy", config.DefaultApiVersion) + assert.Nil(t, err) + assert.Equal(t, "https://foobar.some.proxy/v1/chat/completions", url) + os.Unsetenv("OPENAI_BASE_URL") + }) + t.Run("buildUrlFn returns Azure Client with custom baseURL", func(t *testing.T) { + url, err := buildUrlFn(false, true, "resourceID", "deploymentID", "customBaseURL", config.DefaultApiVersion) + assert.Nil(t, err) + assert.Equal(t, "customBaseURL/openai/deployments/deploymentID/chat/completions?api-version=2024-06-01", url) + }) +} + +func TestGetAnswer(t *testing.T) { + props := []*modulecapabilities.GenerateProperties{{Text: map[string]string{"prop": "My name is john"}}} + t.Run("when the server has a successful answer ", func(t *testing.T) { + handler := &testAnswerHandler{ + t: t, + answer: generateResponse{ + Choices: []choice{{ + FinishReason: "test", + Index: 0, + Text: "John", + }}, + Error: nil, + }, + } + server := httptest.NewServer(handler) + defer server.Close() + + c := New("openAIApiKey", "", "", 0, nullLogger()) + c.buildUrl = func(isLegacy, isAzure bool, resourceName, deploymentID, baseURL, apiVersion string) (string, error) { + return fakeBuildUrl(server.URL, isAzure, isLegacy, resourceName, deploymentID, baseURL, apiVersion) + } + + expected := modulecapabilities.GenerateResponse{ + Result: ptString("John"), + } + + res, err := c.GenerateAllResults(context.Background(), props, "What is my name?", nil, false, nil) + + assert.Nil(t, err) + assert.Equal(t, expected, *res) + }) + + t.Run("when the server has a an error", func(t *testing.T) { + server := httptest.NewServer(&testAnswerHandler{ + t: t, + answer: generateResponse{ + Error: &openAIApiError{ + Message: "some error from the server", + }, + }, + }) + defer server.Close() + + c := New("openAIApiKey", "", "", 0, nullLogger()) + c.buildUrl = func(isLegacy, isAzure bool, resourceName, deploymentID, baseURL, apiVersion string) (string, error) { + return fakeBuildUrl(server.URL, isAzure, isLegacy, resourceName, deploymentID, baseURL, apiVersion) + } + + _, err := c.GenerateAllResults(context.Background(), props, "What is my name?", nil, false, nil) + + require.NotNil(t, err) + assert.Error(t, err, "connection to OpenAI failed with status: 500 error: some error from the server") + }) + + t.Run("when the server has a an error and request id is present", func(t *testing.T) { + server := httptest.NewServer(&testAnswerHandler{ + t: t, + answer: generateResponse{ + Error: &openAIApiError{ + Message: "some error from the server", + }, + }, + headerRequestID: "some-request-id", + }) + defer server.Close() + + c := New("openAIApiKey", "", "", 0, nullLogger()) + c.buildUrl = func(isLegacy, isAzure bool, resourceName, deploymentID, baseURL, apiVersion string) (string, error) { + return fakeBuildUrl(server.URL, isAzure, isLegacy, resourceName, deploymentID, baseURL, apiVersion) + } + + _, err := c.GenerateAllResults(context.Background(), props, "What is my name?", nil, false, nil) + + require.NotNil(t, err) + assert.Error(t, err, "connection to OpenAI failed with status: 500 request-id: some-request-id error: some error from the server") + }) + + t.Run("when X-OpenAI-BaseURL header is passed", func(t *testing.T) { + params := openaiparams.Params{ + BaseURL: "http://default-url.com", + } + c := New("openAIApiKey", "", "", 0, nullLogger()) + + ctxWithValue := context.WithValue(context.Background(), + "X-Openai-Baseurl", []string{"http://base-url-passed-in-header.com"}) + + buildURL, err := c.buildOpenAIUrl(ctxWithValue, params) + require.NoError(t, err) + assert.Equal(t, "http://base-url-passed-in-header.com/v1/chat/completions", buildURL) + + buildURL, err = c.buildOpenAIUrl(context.TODO(), params) + require.NoError(t, err) + assert.Equal(t, "http://default-url.com/v1/chat/completions", buildURL) + }) + + t.Run("when X-Azure-DeploymentId is passed", func(t *testing.T) { + params := openaiparams.Params{ + IsAzure: true, + ResourceName: "classResourceName", + DeploymentID: "classDeploymentId", + ApiVersion: "2024-02-01", + } + c := New("", "", "", 0, nullLogger()) + + ctxWithValue := context.WithValue(context.Background(), + "X-Azure-Deployment-Id", []string{"headerDeploymentId"}) + ctxWithValue = context.WithValue(ctxWithValue, + "X-Azure-Resource-Name", []string{"headerResourceName"}) + + buildURL, err := c.buildOpenAIUrl(ctxWithValue, params) + require.NoError(t, err) + assert.Equal(t, "https://headerResourceName.openai.azure.com/openai/deployments/headerDeploymentId/chat/completions?api-version=2024-02-01", buildURL) + }) +} + +type testAnswerHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + answer generateResponse + headerRequestID string +} + +func (f *testAnswerHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/v1/chat/completions", r.URL.String()) + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.answer.Error != nil && f.answer.Error.Message != "" { + outBytes, err := json.Marshal(f.answer) + require.Nil(f.t, err) + + if f.headerRequestID != "" { + w.Header().Add("x-request-id", f.headerRequestID) + } + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b map[string]interface{} + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + outBytes, err := json.Marshal(f.answer) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +func TestOpenAIApiErrorDecode(t *testing.T) { + t.Run("getModelStringQuery", func(t *testing.T) { + type args struct { + response []byte + } + tests := []struct { + name string + args args + want string + }{ + { + name: "Error code: missing property", + args: args{ + response: []byte(`{"message": "failed", "type": "error", "param": "arg..."}`), + }, + want: "", + }, + { + name: "Error code: as int", + args: args{ + response: []byte(`{"message": "failed", "type": "error", "param": "arg...", "code": 500}`), + }, + want: "500", + }, + { + name: "Error code as string number", + args: args{ + response: []byte(`{"message": "failed", "type": "error", "param": "arg...", "code": "500"}`), + }, + want: "500", + }, + { + name: "Error code as string text", + args: args{ + response: []byte(`{"message": "failed", "type": "error", "param": "arg...", "code": "invalid_api_key"}`), + }, + want: "invalid_api_key", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var got *openAIApiError + err := json.Unmarshal(tt.args.response, &got) + require.NoError(t, err) + + if got.Code.String() != tt.want { + t.Errorf("OpenAIerror.code = %v, want %v", got.Code, tt.want) + } + }) + } + }) +} + +func ptString(in string) *string { + return &in +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-openai/clients/openai_tokens.go b/platform/dbops/binaries/weaviate-src/modules/generative-openai/clients/openai_tokens.go new file mode 100644 index 0000000000000000000000000000000000000000..af171e28cf918abb06ece8ad0cfe793ac2cd5e0d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-openai/clients/openai_tokens.go @@ -0,0 +1,73 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "fmt" + "strings" + + "github.com/weaviate/tiktoken-go" +) + +func getTokensCount(model string, messages []message) (int, error) { + tke, err := tiktoken.EncodingForModel(model) + if err != nil { + return 0, fmt.Errorf("encoding for model %s: %w", model, err) + } + + tokensPerMessage := 3 + if strings.HasPrefix(model, "gpt-3.5-turbo") { + tokensPerMessage = 4 + } + + tokensPerName := 1 + if strings.HasPrefix(model, "gpt-3.5-turbo") { + tokensPerName = -1 + } + + tokensCount := 3 + for _, message := range messages { + tokensCount += tokensPerMessage + tokensCount += len(tke.Encode(message.Role, nil, nil)) + tokensCount += len(tke.Encode(parseMessageContent(message.Content), nil, nil)) + if message.Name != "" { + tokensCount += tokensPerName + tokensCount += len(tke.Encode(message.Name, nil, nil)) + } + } + return tokensCount, nil +} + +func parseMessageContent(content interface{}) string { + switch c := content.(type) { + case string: + return c + case []interface{}: + var contents []string + for i := range c { + switch input := c[i].(type) { + case contentText: + contents = append(contents, input.Text) + case contentImage: + if input.ImageURL.URL == nil { + continue + } + contents = append(contents, *input.ImageURL.URL) + default: + // do nothing + } + } + return strings.Join(contents, " ") + default: + return "" + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-openai/clients/openai_tokens_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-openai/clients/openai_tokens_test.go new file mode 100644 index 0000000000000000000000000000000000000000..32bd8f22b3497eff4b9196bd92191379d0c454a3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-openai/clients/openai_tokens_test.go @@ -0,0 +1,144 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_getTokensCount(t *testing.T) { + prompt := ` + Summarize the following in a tweet: + + As generative language models such as GPT-4 continue to push the boundaries of what AI can do, + the excitement surrounding its potential is spreading quickly. Many applications and projects are + built on top of GPT-4 to extend its capabilities and features. Additionally, many tools were created + in order to interact with large language models, like LangChain as an example. Auto-GPT is one of the fastest + rising open-source python projects harnessing the power of GPT-4! + ` + messages := []message{ + {Role: "user", Content: prompt}, + } + // Example messages from: https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb + // added for sanity check that getTokensCount method computes tokens accordingly to above examples provided by OpenAI + exampleMessages := []message{ + { + Role: "system", + Content: "You are a helpful, pattern-following assistant that translates corporate jargon into plain English.", + }, + { + Role: "system", + Name: "example_user", + Content: "New synergies will help drive top-line growth.", + }, + { + Role: "system", + Name: "example_assistant", + Content: "Things working well together will increase revenue.", + }, + { + Role: "system", + Name: "example_user", + Content: "Let's circle back when we have more bandwidth to touch base on opportunities for increased leverage.", + }, + { + Role: "system", + Name: "example_assistant", + Content: "Let's talk later when we're less busy about how to do better.", + }, + { + Role: "user", + Content: "This late pivot means we don't have time to boil the ocean for the client deliverable.", + }, + } + tests := []struct { + name string + model string + messages []message + want int + wantErr string + }{ + { + name: "text-davinci-002", + model: "text-davinci-002", + messages: messages, + want: 128, + }, + { + name: "text-davinci-003", + model: "text-davinci-003", + messages: messages, + want: 128, + }, + { + name: "gpt-3.5-turbo", + model: "gpt-3.5-turbo", + messages: messages, + want: 122, + }, + { + name: "gpt-4", + model: "gpt-4", + messages: messages, + want: 121, + }, + { + name: "gpt-4-32k", + model: "gpt-4-32k", + messages: messages, + want: 121, + }, + { + name: "non-existent-model", + model: "non-existent-model", + messages: messages, + wantErr: "encoding for model non-existent-model: no encoding for model non-existent-model", + }, + { + name: "OpenAI cookbook example - gpt-3.5-turbo-0301", + model: "gpt-3.5-turbo-0301", + messages: exampleMessages, + want: 127, + }, + { + name: "OpenAI cookbook example - gpt-4", + model: "gpt-4", + messages: exampleMessages, + want: 129, + }, + { + name: "gpt-5", + model: "gpt-5", + messages: messages, + want: 117, + }, + { + name: "gpt-4o", + model: "gpt-4o", + messages: messages, + want: 117, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := getTokensCount(tt.model, tt.messages) + if err != nil { + assert.EqualError(t, err, tt.wantErr) + } else { + assert.Nil(t, err) + assert.Equal(t, tt.want, got) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-openai/config.go b/platform/dbops/binaries/weaviate-src/modules/generative-openai/config.go new file mode 100644 index 0000000000000000000000000000000000000000..9ad9d61ea1351e1f6140ada41222ad6d259ad1a5 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-openai/config.go @@ -0,0 +1,41 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativeopenai + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/generative-openai/config" +) + +func (m *GenerativeOpenAIModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeOpenAIModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeOpenAIModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := config.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-openai/config/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/generative-openai/config/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..09384f9738ff336c1755b3e3ebaea6558241c272 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-openai/config/class_settings.go @@ -0,0 +1,268 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "fmt" + "slices" + "strings" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + modelProperty = "model" + temperatureProperty = "temperature" + maxTokensProperty = "maxTokens" + frequencyPenaltyProperty = "frequencyPenalty" + presencePenaltyProperty = "presencePenalty" + topPProperty = "topP" + baseURLProperty = "baseURL" + apiVersionProperty = "apiVersion" +) + +var availableOpenAILegacyModels = []string{ + "text-davinci-002", + "text-davinci-003", +} + +var availableOpenAIModels = []string{ + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-1106", + "gpt-4", + "gpt-4-32k", + "gpt-4-1106-preview", + "gpt-4o", + "gpt-4o-mini", + "gpt-5", + "gpt-5-mini", + "gpt-5-nano", +} + +var ( + DefaultOpenAIModel = "gpt-5-mini" + DefaultOpenAIMaxTokens = float64(8192) + DefaultOpenAIFrequencyPenalty = 0.0 + DefaultOpenAIPresencePenalty = 0.0 + DefaultOpenAITopP = 1.0 + DefaultOpenAIBaseURL = "https://api.openai.com" + DefaultApiVersion = "2024-06-01" +) + +// todo Need to parse the tokenLimits in a smarter way, as the prompt defines the max length +var defaultMaxTokens = map[string]float64{ + "text-davinci-002": 4097, + "text-davinci-003": 4097, + "gpt-3.5-turbo": 4096, + "gpt-3.5-turbo-16k": 16384, + "gpt-3.5-turbo-1106": 16385, + "gpt-4": 8192, + "gpt-4-32k": 32768, + "gpt-4-1106-preview": 128000, + "gpt-4o": 128000, + "gpt-4o-mini": 16384, + "gpt-5": 128000, + "gpt-5-mini": 128000, + "gpt-5-nano": 128000, +} + +var availableApiVersions = []string{ + "2022-12-01", + "2023-03-15-preview", + "2023-05-15", + "2023-06-01-preview", + "2023-07-01-preview", + "2023-08-01-preview", + "2023-09-01-preview", + "2023-12-01-preview", + "2024-02-15-preview", + "2024-03-01-preview", + "2024-02-01", + "2024-06-01", +} + +func GetMaxTokensForModel(model string) *float64 { + if maxTokens, ok := defaultMaxTokens[model]; ok { + return &maxTokens + } + return nil +} + +func IsLegacy(model string) bool { + return contains(availableOpenAILegacyModels, model) +} + +func IsThirdPartyProvider(baseURL string, isAzure bool, resourceName, deploymentID string) bool { + return !(strings.Contains(baseURL, "api.openai.com") || IsAzure(isAzure, resourceName, deploymentID)) +} + +func IsAzure(isAzure bool, resourceName, deploymentID string) bool { + return isAzure || (resourceName != "" && deploymentID != "") +} + +type ClassSettings interface { + Model() string + MaxTokens() *float64 + Temperature() *float64 + FrequencyPenalty() float64 + PresencePenalty() float64 + TopP() float64 + ResourceName() string + DeploymentID() string + IsAzure() bool + Validate(class *models.Class) error + BaseURL() string + ApiVersion() string +} + +type classSettings struct { + cfg moduletools.ClassConfig + propertyValuesHelper basesettings.PropertyValuesHelper +} + +func NewClassSettings(cfg moduletools.ClassConfig) ClassSettings { + return &classSettings{cfg: cfg, propertyValuesHelper: basesettings.NewPropertyValuesHelper("generative-openai")} +} + +func (ic *classSettings) Validate(class *models.Class) error { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + + model := ic.getStringProperty(modelProperty, DefaultOpenAIModel) + if model == nil || !ic.validateModel(*model) { + return errors.Errorf("wrong OpenAI model name, available model names are: %v", availableOpenAIModels) + } + + temperature := ic.Temperature() + if temperature != nil && (*temperature < 0 || *temperature > 1) { + return errors.Errorf("Wrong temperature configuration, values are between 0.0 and 1.0") + } + + maxTokens := ic.MaxTokens() + maxTokensForModel := GetMaxTokensForModel(*model) + if maxTokens != nil && (*maxTokens < 0 || (maxTokensForModel != nil && *maxTokens > *maxTokensForModel)) { + return errors.Errorf("Wrong maxTokens configuration, values are should have a minimal value of 1 and max is dependant on the model used") + } + + frequencyPenalty := ic.getFloatProperty(frequencyPenaltyProperty, &DefaultOpenAIFrequencyPenalty) + if frequencyPenalty == nil || (*frequencyPenalty < 0 || *frequencyPenalty > 1) { + return errors.Errorf("Wrong frequencyPenalty configuration, values are between 0.0 and 1.0") + } + + presencePenalty := ic.getFloatProperty(presencePenaltyProperty, &DefaultOpenAIPresencePenalty) + if presencePenalty == nil || (*presencePenalty < 0 || *presencePenalty > 1) { + return errors.Errorf("Wrong presencePenalty configuration, values are between 0.0 and 1.0") + } + + topP := ic.getFloatProperty(topPProperty, &DefaultOpenAITopP) + if topP == nil || (*topP < 0 || *topP > 5) { + return errors.Errorf("Wrong topP configuration, values are should have a minimal value of 1 and max of 5") + } + + apiVersion := ic.ApiVersion() + if !ic.validateApiVersion(apiVersion) { + return errors.Errorf("wrong Azure OpenAI apiVersion, available api versions are: %v", availableApiVersions) + } + + if ic.IsAzure() { + err := ic.validateAzureConfig(ic.ResourceName(), ic.DeploymentID()) + if err != nil { + return err + } + } + + return nil +} + +func (ic *classSettings) getStringProperty(name, defaultValue string) *string { + asString := ic.propertyValuesHelper.GetPropertyAsStringWithNotExists(ic.cfg, name, "", defaultValue) + return &asString +} + +func (ic *classSettings) getBoolProperty(name string, defaultValue bool) *bool { + asBool := ic.propertyValuesHelper.GetPropertyAsBool(ic.cfg, name, false) + return &asBool +} + +func (ic *classSettings) getFloatProperty(name string, defaultValue *float64) *float64 { + wrongVal := float64(-1.0) + return ic.propertyValuesHelper.GetPropertyAsFloat64WithNotExists(ic.cfg, name, &wrongVal, defaultValue) +} + +func (ic *classSettings) validateModel(model string) bool { + return contains(availableOpenAIModels, model) || contains(availableOpenAILegacyModels, model) +} + +func (ic *classSettings) validateApiVersion(apiVersion string) bool { + return contains(availableApiVersions, apiVersion) +} + +func (ic *classSettings) Model() string { + return *ic.getStringProperty(modelProperty, DefaultOpenAIModel) +} + +func (ic *classSettings) MaxTokens() *float64 { + return ic.getFloatProperty(maxTokensProperty, nil) +} + +func (ic *classSettings) BaseURL() string { + return *ic.getStringProperty(baseURLProperty, DefaultOpenAIBaseURL) +} + +func (ic *classSettings) ApiVersion() string { + return *ic.getStringProperty(apiVersionProperty, DefaultApiVersion) +} + +func (ic *classSettings) Temperature() *float64 { + return ic.getFloatProperty(temperatureProperty, nil) +} + +func (ic *classSettings) FrequencyPenalty() float64 { + return *ic.getFloatProperty(frequencyPenaltyProperty, &DefaultOpenAIFrequencyPenalty) +} + +func (ic *classSettings) PresencePenalty() float64 { + return *ic.getFloatProperty(presencePenaltyProperty, &DefaultOpenAIPresencePenalty) +} + +func (ic *classSettings) TopP() float64 { + return *ic.getFloatProperty(topPProperty, &DefaultOpenAITopP) +} + +func (ic *classSettings) ResourceName() string { + return *ic.getStringProperty("resourceName", "") +} + +func (ic *classSettings) DeploymentID() string { + return *ic.getStringProperty("deploymentId", "") +} + +func (ic *classSettings) IsAzure() bool { + return IsAzure(*ic.getBoolProperty("isAzure", false), ic.ResourceName(), ic.DeploymentID()) +} + +func (ic *classSettings) validateAzureConfig(resourceName string, deploymentId string) error { + if (resourceName == "" && deploymentId != "") || (resourceName != "" && deploymentId == "") { + return fmt.Errorf("both resourceName and deploymentId must be provided") + } + return nil +} + +func contains[T comparable](s []T, e T) bool { + return slices.Contains(s, e) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-openai/config/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-openai/config/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..9c6a8046f1d756ec6dd9ed0c29c345ff5125581b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-openai/config/class_settings_test.go @@ -0,0 +1,344 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_classSettings_Validate(t *testing.T) { + tests := []struct { + name string + cfg moduletools.ClassConfig + wantModel string + wantMaxTokens *float64 + wantTemperature *float64 + wantTopP float64 + wantFrequencyPenalty float64 + wantPresencePenalty float64 + wantResourceName string + wantDeploymentID string + wantIsAzure bool + wantErr error + wantBaseURL string + wantApiVersion string + }{ + { + name: "default settings", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{}, + }, + wantModel: "gpt-5-mini", + wantMaxTokens: nil, + wantTemperature: nil, + wantTopP: 1, + wantFrequencyPenalty: 0.0, + wantPresencePenalty: 0.0, + wantErr: nil, + wantBaseURL: "https://api.openai.com", + wantApiVersion: "2024-06-01", + }, + { + name: "Everything non default configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "gpt-3.5-turbo", + "maxTokens": 4096, + "temperature": 0.5, + "topP": 3, + "frequencyPenalty": 0.1, + "presencePenalty": 0.9, + }, + }, + wantModel: "gpt-3.5-turbo", + wantMaxTokens: ptrFloat64(4096), + wantTemperature: ptrFloat64(0.5), + wantTopP: 3, + wantFrequencyPenalty: 0.1, + wantPresencePenalty: 0.9, + wantErr: nil, + wantBaseURL: "https://api.openai.com", + wantApiVersion: "2024-06-01", + }, + { + name: "OpenAI Proxy", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "gpt-3.5-turbo", + "maxTokens": 4096, + "temperature": 0.5, + "topP": 3, + "frequencyPenalty": 0.1, + "presencePenalty": 0.9, + "baseURL": "https://proxy.weaviate.dev/", + }, + }, + wantBaseURL: "https://proxy.weaviate.dev/", + wantApiVersion: "2024-06-01", + wantModel: "gpt-3.5-turbo", + wantMaxTokens: ptrFloat64(4096), + wantTemperature: ptrFloat64(0.5), + wantTopP: 3, + wantFrequencyPenalty: 0.1, + wantPresencePenalty: 0.9, + wantErr: nil, + }, + { + name: "Legacy config", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "text-davinci-003", + "maxTokens": 1200, + "temperature": 0.5, + "topP": 3, + "frequencyPenalty": 0.1, + "presencePenalty": 0.9, + }, + }, + wantModel: "text-davinci-003", + wantMaxTokens: ptrFloat64(1200), + wantTemperature: ptrFloat64(0.5), + wantTopP: 3, + wantFrequencyPenalty: 0.1, + wantPresencePenalty: 0.9, + wantErr: nil, + wantBaseURL: "https://api.openai.com", + wantApiVersion: "2024-06-01", + }, + { + name: "Azure OpenAI config", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "isAzure": true, + "resourceName": "weaviate", + "deploymentId": "gpt-3.5-turbo", + "maxTokens": 4096, + "temperature": 0.5, + "topP": 3, + "frequencyPenalty": 0.1, + "presencePenalty": 0.9, + }, + }, + wantResourceName: "weaviate", + wantDeploymentID: "gpt-3.5-turbo", + wantIsAzure: true, + wantModel: "gpt-5-mini", + wantMaxTokens: ptrFloat64(4096), + wantTemperature: ptrFloat64(0.5), + wantTopP: 3, + wantFrequencyPenalty: 0.1, + wantPresencePenalty: 0.9, + wantErr: nil, + wantBaseURL: "https://api.openai.com", + wantApiVersion: "2024-06-01", + }, + { + name: "Azure OpenAI config with baseURL", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "isAzure": true, + "baseURL": "some-base-url", + "resourceName": "weaviate", + "deploymentId": "gpt-3.5-turbo", + "maxTokens": 4096, + "temperature": 0.5, + "topP": 3, + "frequencyPenalty": 0.1, + "presencePenalty": 0.9, + }, + }, + wantResourceName: "weaviate", + wantDeploymentID: "gpt-3.5-turbo", + wantIsAzure: true, + wantModel: "gpt-5-mini", + wantMaxTokens: ptrFloat64(4096), + wantTemperature: ptrFloat64(0.5), + wantTopP: 3, + wantFrequencyPenalty: 0.1, + wantPresencePenalty: 0.9, + wantErr: nil, + wantBaseURL: "some-base-url", + wantApiVersion: "2024-06-01", + }, + { + name: "With gpt-3.5-turbo-16k model", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "gpt-3.5-turbo-16k", + "maxTokens": 4096, + "temperature": 0.5, + "topP": 3, + "frequencyPenalty": 0.1, + "presencePenalty": 0.9, + }, + }, + wantModel: "gpt-3.5-turbo-16k", + wantMaxTokens: ptrFloat64(4096), + wantTemperature: ptrFloat64(0.5), + wantTopP: 3, + wantFrequencyPenalty: 0.1, + wantPresencePenalty: 0.9, + wantErr: nil, + wantBaseURL: "https://api.openai.com", + wantApiVersion: "2024-06-01", + }, + { + name: "Wrong maxTokens configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "maxTokens": true, + }, + }, + wantErr: fmt.Errorf("Wrong maxTokens configuration, values are should have a minimal value of 1 and max is dependant on the model used"), + }, + { + name: "Wrong temperature configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "temperature": true, + }, + }, + wantErr: fmt.Errorf("Wrong temperature configuration, values are between 0.0 and 1.0"), + }, + { + name: "Third party provider, use max tokens", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "model-that-openai-does-not-have", + "baseURL": "https://something-else.com", + "maxTokens": 4096, + }, + }, + wantErr: fmt.Errorf("wrong OpenAI model name, available model names are: [gpt-3.5-turbo gpt-3.5-turbo-16k gpt-3.5-turbo-1106 gpt-4 gpt-4-32k gpt-4-1106-preview gpt-4o gpt-4o-mini gpt-5 gpt-5-mini gpt-5-nano]"), + }, + { + name: "Wrong frequencyPenalty configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "frequencyPenalty": true, + }, + }, + wantErr: fmt.Errorf("Wrong frequencyPenalty configuration, values are between 0.0 and 1.0"), + }, + { + name: "Wrong presencePenalty configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "presencePenalty": true, + }, + }, + wantErr: fmt.Errorf("Wrong presencePenalty configuration, values are between 0.0 and 1.0"), + }, + { + name: "Wrong topP configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "topP": true, + }, + }, + wantErr: fmt.Errorf("Wrong topP configuration, values are should have a minimal value of 1 and max of 5"), + }, + { + name: "Wrong Azure config - empty deploymentId", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "resourceName": "resource-name", + "isAzure": true, + }, + }, + wantErr: fmt.Errorf("both resourceName and deploymentId must be provided"), + }, + { + name: "Wrong Azure config - empty resourceName", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "deploymentId": "deployment-name", + "isAzure": true, + }, + }, + wantErr: fmt.Errorf("both resourceName and deploymentId must be provided"), + }, + { + name: "Wrong Azure config - wrong api version", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "apiVersion": "wrong-api-version", + }, + }, + wantErr: fmt.Errorf("wrong Azure OpenAI apiVersion, available api versions are: " + + "[2022-12-01 2023-03-15-preview 2023-05-15 2023-06-01-preview 2023-07-01-preview 2023-08-01-preview 2023-09-01-preview 2023-12-01-preview 2024-02-15-preview 2024-03-01-preview 2024-02-01 2024-06-01]"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.cfg) + if tt.wantErr != nil { + assert.EqualError(t, tt.wantErr, ic.Validate(nil).Error()) + } else { + assert.NoError(t, ic.Validate(nil)) + assert.Equal(t, tt.wantModel, ic.Model()) + assert.Equal(t, tt.wantMaxTokens, ic.MaxTokens()) + assert.Equal(t, tt.wantTemperature, ic.Temperature()) + assert.Equal(t, tt.wantTopP, ic.TopP()) + assert.Equal(t, tt.wantFrequencyPenalty, ic.FrequencyPenalty()) + assert.Equal(t, tt.wantPresencePenalty, ic.PresencePenalty()) + assert.Equal(t, tt.wantResourceName, ic.ResourceName()) + assert.Equal(t, tt.wantDeploymentID, ic.DeploymentID()) + assert.Equal(t, tt.wantIsAzure, ic.IsAzure()) + assert.Equal(t, tt.wantBaseURL, ic.BaseURL()) + assert.Equal(t, tt.wantApiVersion, ic.ApiVersion()) + } + }) + } +} + +type fakeClassConfig struct { + classConfig map[string]interface{} +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} + +func ptrFloat64(in float64) *float64 { + return &in +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-openai/module.go b/platform/dbops/binaries/weaviate-src/modules/generative-openai/module.go new file mode 100644 index 0000000000000000000000000000000000000000..d3df0ad82711838f1b2f0d9a2b9b3b124332dc0d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-openai/module.go @@ -0,0 +1,87 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativeopenai + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/generative-openai/clients" + "github.com/weaviate/weaviate/modules/generative-openai/parameters" +) + +const Name = "generative-openai" + +func New() *GenerativeOpenAIModule { + return &GenerativeOpenAIModule{} +} + +type GenerativeOpenAIModule struct { + generative generativeClient + additionalPropertiesProvider map[string]modulecapabilities.GenerativeProperty +} + +type generativeClient interface { + modulecapabilities.GenerativeClient + MetaInfo() (map[string]interface{}, error) +} + +func (m *GenerativeOpenAIModule) Name() string { + return Name +} + +func (m *GenerativeOpenAIModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextGenerative +} + +func (m *GenerativeOpenAIModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrapf(err, "init %s", Name) + } + return nil +} + +func (m *GenerativeOpenAIModule) initAdditional(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + openAIApiKey := os.Getenv("OPENAI_APIKEY") + openAIOrganization := os.Getenv("OPENAI_ORGANIZATION") + azureApiKey := os.Getenv("AZURE_APIKEY") + + client := clients.New(openAIApiKey, openAIOrganization, azureApiKey, timeout, logger) + m.generative = client + m.additionalPropertiesProvider = parameters.AdditionalGenerativeParameters(m.generative) + + return nil +} + +func (m *GenerativeOpenAIModule) MetaInfo() (map[string]interface{}, error) { + return m.generative.MetaInfo() +} + +func (m *GenerativeOpenAIModule) AdditionalGenerativeProperties() map[string]modulecapabilities.GenerativeProperty { + return m.additionalPropertiesProvider +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.AdditionalGenerativeProperties(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-openai/parameters/graphql.go b/platform/dbops/binaries/weaviate-src/modules/generative-openai/parameters/graphql.go new file mode 100644 index 0000000000000000000000000000000000000000..1d7c2a206b49a7df46404f8f8600d531d58e1036 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-openai/parameters/graphql.go @@ -0,0 +1,106 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "fmt" + + "github.com/tailor-inc/graphql" +) + +func input(prefix string) *graphql.InputObjectFieldConfig { + return &graphql.InputObjectFieldConfig{ + Description: fmt.Sprintf("%s settings", Name), + Type: graphql.NewInputObject(graphql.InputObjectConfig{ + Name: fmt.Sprintf("%s%sInputObject", prefix, Name), + Fields: graphql.InputObjectConfigFieldMap{ + "baseURL": &graphql.InputObjectFieldConfig{ + Description: "baseURL", + Type: graphql.String, + }, + "apiVersion": &graphql.InputObjectFieldConfig{ + Description: "apiVersion", + Type: graphql.String, + }, + "resourceName": &graphql.InputObjectFieldConfig{ + Description: "resourceName", + Type: graphql.String, + }, + "deploymentId": &graphql.InputObjectFieldConfig{ + Description: "deploymentId", + Type: graphql.String, + }, + "isAzure": &graphql.InputObjectFieldConfig{ + Description: "isAzure", + Type: graphql.Boolean, + }, + "model": &graphql.InputObjectFieldConfig{ + Description: "model", + Type: graphql.String, + }, + "frequencyPenalty": &graphql.InputObjectFieldConfig{ + Description: "frequencyPenalty", + Type: graphql.Float, + }, + "maxTokens": &graphql.InputObjectFieldConfig{ + Description: "maxTokens", + Type: graphql.Int, + }, + "n": &graphql.InputObjectFieldConfig{ + Description: "n", + Type: graphql.Int, + }, + "presencePenalty": &graphql.InputObjectFieldConfig{ + Description: "presencePenalty", + Type: graphql.Float, + }, + "stop": &graphql.InputObjectFieldConfig{ + Description: "stop", + Type: graphql.NewList(graphql.String), + }, + "temperature": &graphql.InputObjectFieldConfig{ + Description: "temperature", + Type: graphql.Float, + }, + "topP": &graphql.InputObjectFieldConfig{ + Description: "topP", + Type: graphql.Float, + }, + "images": &graphql.InputObjectFieldConfig{ + Description: "images", + Type: graphql.NewList(graphql.String), + }, + "imageProperties": &graphql.InputObjectFieldConfig{ + Description: "imageProperties", + Type: graphql.NewList(graphql.String), + }, + }, + }), + DefaultValue: nil, + } +} + +func output(prefix string) *graphql.Field { + return &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sFields", prefix, Name), + Fields: graphql.Fields{ + "usage": &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sUsageMetadataFields", prefix, Name), + Fields: graphql.Fields{ + "prompt_tokens": &graphql.Field{Type: graphql.Int}, + "completion_tokens": &graphql.Field{Type: graphql.Int}, + "total_tokens": &graphql.Field{Type: graphql.Int}, + }, + })}, + }, + })} +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-openai/parameters/params.go b/platform/dbops/binaries/weaviate-src/modules/generative-openai/parameters/params.go new file mode 100644 index 0000000000000000000000000000000000000000..d66f9dcb1ae4af45927835d267762bacee029d16 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-openai/parameters/params.go @@ -0,0 +1,79 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/usecases/modulecomponents/gqlparser" +) + +type Params struct { + BaseURL string + ApiVersion string + ResourceName string + DeploymentID string + IsAzure bool + Model string + FrequencyPenalty *float64 + MaxTokens *int + N *int + PresencePenalty *float64 + Stop []string + Temperature *float64 + TopP *float64 + Images []*string + ImageProperties []string +} + +func extract(field *ast.ObjectField) interface{} { + out := Params{} + fields, ok := field.Value.GetValue().([]*ast.ObjectField) + if ok { + for _, f := range fields { + switch f.Name.Value { + case "baseURL": + out.BaseURL = gqlparser.GetValueAsStringOrEmpty(f) + case "apiVersion": + out.ApiVersion = gqlparser.GetValueAsStringOrEmpty(f) + case "resourceName": + out.ResourceName = gqlparser.GetValueAsStringOrEmpty(f) + case "deploymentId": + out.DeploymentID = gqlparser.GetValueAsStringOrEmpty(f) + case "isAzure": + out.IsAzure = gqlparser.GetValueAsBoolOrFalse(f) + case "model": + out.Model = gqlparser.GetValueAsStringOrEmpty(f) + case "frequencyPenalty": + out.FrequencyPenalty = gqlparser.GetValueAsFloat64(f) + case "maxTokens": + out.MaxTokens = gqlparser.GetValueAsInt(f) + case "n": + out.N = gqlparser.GetValueAsInt(f) + case "presencePenalty": + out.PresencePenalty = gqlparser.GetValueAsFloat64(f) + case "stop": + out.Stop = gqlparser.GetValueAsStringArray(f) + case "temperature": + out.Temperature = gqlparser.GetValueAsFloat64(f) + case "topP": + out.TopP = gqlparser.GetValueAsFloat64(f) + case "images": + out.Images = gqlparser.GetValueAsStringPtrArray(f) + case "imageProperties": + out.ImageProperties = gqlparser.GetValueAsStringArray(f) + default: + // do nothing + } + } + } + return out +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-openai/parameters/provider.go b/platform/dbops/binaries/weaviate-src/modules/generative-openai/parameters/provider.go new file mode 100644 index 0000000000000000000000000000000000000000..7e394fc76262b04c2ffdca966ad15d7cb26229e7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-openai/parameters/provider.go @@ -0,0 +1,22 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import "github.com/weaviate/weaviate/entities/modulecapabilities" + +const Name = "openai" + +func AdditionalGenerativeParameters(client modulecapabilities.GenerativeClient) map[string]modulecapabilities.GenerativeProperty { + return map[string]modulecapabilities.GenerativeProperty{ + Name: {Client: client, RequestParamsFunction: input, ResponseParamsFunction: output, ExtractRequestParamsFunction: extract}, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-xai/clients/xai.go b/platform/dbops/binaries/weaviate-src/modules/generative-xai/clients/xai.go new file mode 100644 index 0000000000000000000000000000000000000000..b8ee2ba64441eecb9f53fe980905e09ed82ea776 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-xai/clients/xai.go @@ -0,0 +1,328 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "strconv" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + "github.com/weaviate/weaviate/usecases/modulecomponents/generative" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/generative-xai/config" + xaiparams "github.com/weaviate/weaviate/modules/generative-xai/parameters" +) + +type xai struct { + apiKey string + httpClient *http.Client + logger logrus.FieldLogger +} + +func New(apiKey string, timeout time.Duration, logger logrus.FieldLogger) *xai { + return &xai{ + apiKey: apiKey, + httpClient: &http.Client{ + Timeout: timeout, + }, + logger: logger, + } +} + +func (v *xai) GenerateSingleResult(ctx context.Context, properties *modulecapabilities.GenerateProperties, prompt string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + forPrompt, err := generative.MakeSinglePrompt(generative.Text(properties), prompt) + if err != nil { + return nil, err + } + return v.generate(ctx, cfg, forPrompt, options, debug) +} + +func (v *xai) GenerateAllResults(ctx context.Context, properties []*modulecapabilities.GenerateProperties, task string, options interface{}, debug bool, cfg moduletools.ClassConfig) (*modulecapabilities.GenerateResponse, error) { + forTask, err := generative.MakeTaskPrompt(generative.Texts(properties), task) + if err != nil { + return nil, err + } + return v.generate(ctx, cfg, forTask, options, debug) +} + +func (v *xai) generate(ctx context.Context, cfg moduletools.ClassConfig, prompt string, options interface{}, debug bool) (*modulecapabilities.GenerateResponse, error) { + params := v.getParameters(cfg, options) + debugInformation := v.getDebugInformation(debug, prompt) + + xaiUrl := v.getXaiUrl(ctx, params.BaseURL) + input := v.getRequest(prompt, params) + + body, err := json.Marshal(input) + if err != nil { + return nil, errors.Wrap(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", xaiUrl, + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + apiKey, err := v.getApiKey(ctx) + if err != nil { + return nil, errors.Wrapf(err, "xAI API Key") + } + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", apiKey)) + req.Header.Add("Content-Type", "application/json") + + res, err := v.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + var resBody generateResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if res.StatusCode != 200 || resBody.Error != nil { + if resBody.Error != nil { + return nil, errors.Errorf("connection to xAI API failed with status: %d error: %s", res.StatusCode, resBody.Error.Message) + } + return nil, errors.Errorf("connection to xAI API failed with status: %d", res.StatusCode) + } + + textResponse := resBody.Choices[0].Message.Content + + return &modulecapabilities.GenerateResponse{ + Result: &textResponse, + Debug: debugInformation, + Params: v.getResponseParams(resBody.Usage), + }, nil +} + +func (v *xai) getRequest(prompt string, params xaiparams.Params) generateInput { + var input generateInput + + var content interface{} + if len(params.Images) > 0 { + imageInput := contentImageInput{} + imageInput = append(imageInput, contentText{ + Type: "text", + Text: prompt, + }) + for i := range params.Images { + url := fmt.Sprintf("data:image/jpeg;base64,%s", *params.Images[i]) + imageInput = append(imageInput, contentImage{ + Type: "image_url", + ImageURL: contentImageURL{URL: &url}, + }) + } + content = imageInput + } else { + content = prompt + } + + messages := []message{{ + Role: "user", + Content: content, + }} + + input = generateInput{ + Messages: messages, + Model: params.Model, + Stream: false, + MaxTokens: params.MaxTokens, + Temperature: params.Temperature, + TopP: params.TopP, + } + + return input +} + +func (v *xai) getParameters(cfg moduletools.ClassConfig, options interface{}) xaiparams.Params { + settings := config.NewClassSettings(cfg) + + var params xaiparams.Params + if p, ok := options.(xaiparams.Params); ok { + params = p + } + if params.BaseURL == "" { + params.BaseURL = settings.BaseURL() + } + if params.Model == "" { + params.Model = settings.Model() + } + if params.Temperature == nil { + params.Temperature = settings.Temperature() + } + if params.MaxTokens == nil { + params.MaxTokens = settings.MaxTokens() + } + if params.TopP == nil { + params.TopP = settings.TopP() + } + return params +} + +func (v *xai) getDebugInformation(debug bool, prompt string) *modulecapabilities.GenerateDebugInformation { + if debug { + return &modulecapabilities.GenerateDebugInformation{ + Prompt: prompt, + } + } + return nil +} + +func (v *xai) getResponseParams(usage *usage) map[string]interface{} { + if usage != nil { + return map[string]interface{}{xaiparams.Name: map[string]interface{}{"usage": usage}} + } + return nil +} + +func GetResponseParams(result map[string]interface{}) *responseParams { + if params, ok := result[xaiparams.Name].(map[string]interface{}); ok { + if usage, ok := params["usage"].(*usage); ok { + return &responseParams{Usage: usage} + } + } + return nil +} + +func (v *xai) getXaiUrl(ctx context.Context, baseURL string) string { + passedBaseURL := baseURL + if headerBaseURL := modulecomponents.GetValueFromContext(ctx, "X-Xai-Baseurl"); headerBaseURL != "" { + passedBaseURL = headerBaseURL + } + return fmt.Sprintf("%s/v1/chat/completions", passedBaseURL) +} + +func (v *xai) getApiKey(ctx context.Context) (string, error) { + if apiKey := modulecomponents.GetValueFromContext(ctx, "X-Xai-Api-Key"); apiKey != "" { + return apiKey, nil + } + if v.apiKey != "" { + return v.apiKey, nil + } + return "", errors.New("no api key found " + + "neither in request header: X-xAI-Api-Key " + + "nor in environment variable under XAI_APIKEY") +} + +type generateInput struct { + Prompt string `json:"prompt,omitempty"` + Messages []message `json:"messages,omitempty"` + Stream bool `json:"stream,omitempty"` + Model string `json:"model,omitempty"` + FrequencyPenalty *float64 `json:"frequency_penalty,omitempty"` + Logprobs *bool `json:"logprobs,omitempty"` + TopLogprobs *int `json:"top_logprobs,omitempty"` + MaxTokens *int `json:"max_tokens,omitempty"` + N *int `json:"n,omitempty"` + PresencePenalty *float64 `json:"presence_penalty,omitempty"` + Stop []string `json:"stop,omitempty"` + Temperature *float64 `json:"temperature,omitempty"` + TopP *float64 `json:"top_p,omitempty"` +} + +type responseMessage struct { + Role string `json:"role"` + Content string `json:"content"` + Name string `json:"name,omitempty"` +} + +type message struct { + Role string `json:"role"` + Content interface{} `json:"content"` // string or array of contentText and contentImage + Name string `json:"name,omitempty"` +} + +type contentImageInput []interface{} + +type contentText struct { + Type string `json:"type"` + Text string `json:"text"` +} + +type contentImage struct { + Type string `json:"type"` + ImageURL contentImageURL `json:"image_url,omitempty"` +} + +type contentImageURL struct { + URL *string `json:"url"` +} + +type generateResponse struct { + Choices []choice + Usage *usage `json:"usage,omitempty"` + Error *openAIApiError `json:"error,omitempty"` +} + +type choice struct { + FinishReason string + Index float32 + Text string `json:"text,omitempty"` + Message *responseMessage `json:"message,omitempty"` +} + +type openAIApiError struct { + Message string `json:"message"` + Type string `json:"type"` + Param string `json:"param"` + Code openAICode `json:"code"` +} + +type usage struct { + PromptTokens *int `json:"prompt_tokens,omitempty"` + CompletionTokens *int `json:"completion_tokens,omitempty"` + TotalTokens *int `json:"total_tokens,omitempty"` +} + +type openAICode string + +func (c *openAICode) String() string { + if c == nil { + return "" + } + return string(*c) +} + +func (c *openAICode) UnmarshalJSON(data []byte) (err error) { + if number, err := strconv.Atoi(string(data)); err == nil { + str := strconv.Itoa(number) + *c = openAICode(str) + return nil + } + var str string + err = json.Unmarshal(data, &str) + if err != nil { + return err + } + *c = openAICode(str) + return nil +} + +type responseParams struct { + Usage *usage `json:"usage,omitempty"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-xai/clients/xai_meta.go b/platform/dbops/binaries/weaviate-src/modules/generative-xai/clients/xai_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..bf6c74462cff87fed02c844694d9c501bb4c48fa --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-xai/clients/xai_meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *xai) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Generative Search - xAI", + "documentationHref": "https://docs.x.ai/docs/overview", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-xai/clients/xai_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-xai/clients/xai_test.go new file mode 100644 index 0000000000000000000000000000000000000000..c750752eb9248c4e59881856d039cd724b8eca77 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-xai/clients/xai_test.go @@ -0,0 +1,174 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func TestGetAnswer(t *testing.T) { + tests := []struct { + name string + answer generateResponse + errorResponse *generateResponse + timeout time.Duration + expectedResult string + }{ + { + name: "when the server has a successful answer", + answer: generateResponse{ + Choices: []choice{ + { + Message: &responseMessage{ + Role: "user", + Content: "John", + }, + }, + }, + }, + expectedResult: "John", + }, + { + name: "when the server has an error", + errorResponse: &generateResponse{ + Error: &openAIApiError{ + Message: "Account 'x': Cloud credits expired - Please contact xAI representatives", + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + handler := &testAnswerHandler{ + t: t, + answer: tt.answer, + errorResponse: tt.errorResponse, + timeout: tt.timeout, + } + server := httptest.NewServer(handler) + defer server.Close() + + c := New("apiKey", tt.timeout, nullLogger()) + + settings := &fakeClassConfig{baseURL: server.URL} + props := []*modulecapabilities.GenerateProperties{{Text: map[string]string{"prop": "My name is john"}}} + res, err := c.GenerateAllResults(context.Background(), props, "What is my name?", nil, false, settings) + + if tt.errorResponse != nil { + assert.Contains(t, err.Error(), tt.errorResponse.Error.Message) + } else { + assert.Equal(t, tt.expectedResult, *res.Result) + } + }) + } + t.Run("when X-Xai-BaseURL header is passed", func(t *testing.T) { + c := New("apiKey", 5*time.Second, nullLogger()) + baseUrl := "https://integrate.api.xai.com" + + ctxWithValue := context.WithValue(context.Background(), + "X-Xai-BaseURL", []string{"https://integrate.api.xai.com"}) + buildURL := c.getXaiUrl(ctxWithValue, baseUrl) + assert.Equal(t, "https://integrate.api.xai.com/v1/chat/completions", buildURL) + + buildURL = c.getXaiUrl(context.Background(), baseUrl) + assert.Equal(t, "https://integrate.api.xai.com/v1/chat/completions", buildURL) + }) +} + +type testAnswerHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + answer generateResponse + errorResponse *generateResponse + timeout time.Duration +} + +func (f *testAnswerHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, http.MethodPost, r.Method) + + time.Sleep(f.timeout) + + if f.errorResponse != nil { + outBytes, err := json.Marshal(f.errorResponse) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusPaymentRequired) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b map[string]interface{} + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + outBytes, err := json.Marshal(f.answer) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +type fakeClassConfig struct { + baseURL string +} + +func (cfg *fakeClassConfig) Tenant() string { + return "" +} + +func (cfg *fakeClassConfig) Class() map[string]interface{} { + return nil +} + +func (cfg *fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + settings := map[string]interface{}{ + "baseURL": cfg.baseURL, + } + return settings +} + +func (cfg *fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-xai/config.go b/platform/dbops/binaries/weaviate-src/modules/generative-xai/config.go new file mode 100644 index 0000000000000000000000000000000000000000..74d516e4e464237d693c32e7d80fa53a370f7a00 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-xai/config.go @@ -0,0 +1,41 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativexai + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/generative-xai/config" +) + +func (m *GenerativeXaiModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeXaiModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GenerativeXaiModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := config.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-xai/config/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/generative-xai/config/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..7bac817a6a3fde506fda259b6402a99ce3fb0fb9 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-xai/config/class_settings.go @@ -0,0 +1,69 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + baseURLProperty = "baseURL" + modelProperty = "model" + temperatureProperty = "temperature" + topPProperty = "topP" + maxTokensProperty = "maxTokens" +) + +var ( + DefaultBaseURL = "https://api.x.ai" + DefaultXaiModel = "grok-2-latest" +) + +type classSettings struct { + cfg moduletools.ClassConfig + propertyValuesHelper basesettings.PropertyValuesHelper +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, propertyValuesHelper: basesettings.NewPropertyValuesHelper("generative-xai")} +} + +func (ic *classSettings) Validate(class *models.Class) error { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + return nil +} + +func (ic *classSettings) BaseURL() string { + return ic.propertyValuesHelper.GetPropertyAsString(ic.cfg, baseURLProperty, DefaultBaseURL) +} + +func (ic *classSettings) Model() string { + return ic.propertyValuesHelper.GetPropertyAsString(ic.cfg, modelProperty, DefaultXaiModel) +} + +func (ic *classSettings) Temperature() *float64 { + return ic.propertyValuesHelper.GetPropertyAsFloat64(ic.cfg, temperatureProperty, nil) +} + +func (ic *classSettings) TopP() *float64 { + return ic.propertyValuesHelper.GetPropertyAsFloat64(ic.cfg, topPProperty, nil) +} + +func (ic *classSettings) MaxTokens() *int { + return ic.propertyValuesHelper.GetPropertyAsInt(ic.cfg, maxTokensProperty, nil) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-xai/config/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/generative-xai/config/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..a1b85bba13ee0ee16faddcfc5a96192cd261783b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-xai/config/class_settings_test.go @@ -0,0 +1,116 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_classSettings_Validate(t *testing.T) { + tests := []struct { + name string + cfg moduletools.ClassConfig + wantBaseURL string + wantModel string + wantTemperature *float64 + wantTopP *float64 + wantMaxTokens *int + wantErr error + }{ + { + name: "default settings", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{}, + }, + wantBaseURL: "https://api.x.ai", + wantModel: "grok-2-latest", + wantErr: nil, + }, + { + name: "everything non default configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "baseURL": "https://api.x.ai", + "model": "grok-2-latest", + "temperature": 0.5, + "topP": 1, + "maxTokens": 1024, + }, + }, + wantBaseURL: "https://api.x.ai", + wantModel: "grok-2-latest", + wantTemperature: ptFloat64(0.5), + wantTopP: ptFloat64(1), + wantMaxTokens: ptInt(1024), + wantErr: nil, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.cfg) + if tt.wantErr != nil { + assert.Equal(t, tt.wantErr.Error(), ic.Validate(nil).Error()) + } else { + assert.NoError(t, ic.Validate(nil)) + assert.Equal(t, tt.wantModel, ic.Model()) + assert.Equal(t, tt.wantMaxTokens, ic.MaxTokens()) + assert.Equal(t, tt.wantTemperature, ic.Temperature()) + assert.Equal(t, tt.wantBaseURL, ic.BaseURL()) + } + }) + } +} + +type fakeClassConfig struct { + classConfig map[string]interface{} +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} + +func ptInt(in int) *int { + return &in +} + +func ptFloat64(in float64) *float64 { + return &in +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-xai/module.go b/platform/dbops/binaries/weaviate-src/modules/generative-xai/module.go new file mode 100644 index 0000000000000000000000000000000000000000..58aeabf8586d6c4eb5f1e887bdd1d7785a3bd9bf --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-xai/module.go @@ -0,0 +1,86 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgenerativexai + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/generative-xai/clients" + "github.com/weaviate/weaviate/modules/generative-xai/parameters" +) + +const Name = "generative-xai" + +func New() *GenerativeXaiModule { + return &GenerativeXaiModule{} +} + +type GenerativeXaiModule struct { + generative generativeClient + additionalPropertiesProvider map[string]modulecapabilities.GenerativeProperty +} + +type generativeClient interface { + modulecapabilities.GenerativeClient + MetaInfo() (map[string]interface{}, error) +} + +func (m *GenerativeXaiModule) Name() string { + return Name +} + +func (m *GenerativeXaiModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextGenerative +} + +func (m *GenerativeXaiModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrapf(err, "init %s", Name) + } + + return nil +} + +func (m *GenerativeXaiModule) initAdditional(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("XAI_APIKEY") + + client := clients.New(apiKey, timeout, logger) + m.generative = client + m.additionalPropertiesProvider = parameters.AdditionalGenerativeParameters(m.generative) + + return nil +} + +func (m *GenerativeXaiModule) MetaInfo() (map[string]interface{}, error) { + return m.generative.MetaInfo() +} + +func (m *GenerativeXaiModule) AdditionalGenerativeProperties() map[string]modulecapabilities.GenerativeProperty { + return m.additionalPropertiesProvider +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.AdditionalGenerativeProperties(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-xai/parameters/graphql.go b/platform/dbops/binaries/weaviate-src/modules/generative-xai/parameters/graphql.go new file mode 100644 index 0000000000000000000000000000000000000000..3a0ceef5f7ad16fd1683a5f59e6b9f337316755e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-xai/parameters/graphql.go @@ -0,0 +1,74 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "fmt" + + "github.com/tailor-inc/graphql" +) + +func input(prefix string) *graphql.InputObjectFieldConfig { + return &graphql.InputObjectFieldConfig{ + Description: fmt.Sprintf("%s settings", Name), + Type: graphql.NewInputObject(graphql.InputObjectConfig{ + Name: fmt.Sprintf("%s%sInputObject", prefix, Name), + Fields: graphql.InputObjectConfigFieldMap{ + "baseURL": &graphql.InputObjectFieldConfig{ + Description: "baseURL", + Type: graphql.String, + }, + "model": &graphql.InputObjectFieldConfig{ + Description: "model", + Type: graphql.String, + }, + "temperature": &graphql.InputObjectFieldConfig{ + Description: "temperature", + Type: graphql.Float, + }, + "topP": &graphql.InputObjectFieldConfig{ + Description: "topP", + Type: graphql.Float, + }, + "maxTokens": &graphql.InputObjectFieldConfig{ + Description: "maxTokens", + Type: graphql.Int, + }, + "images": &graphql.InputObjectFieldConfig{ + Description: "images", + Type: graphql.NewList(graphql.String), + }, + "imageProperties": &graphql.InputObjectFieldConfig{ + Description: "imageProperties", + Type: graphql.NewList(graphql.String), + }, + }, + }), + DefaultValue: nil, + } +} + +func output(prefix string) *graphql.Field { + return &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sFields", prefix, Name), + Fields: graphql.Fields{ + "usage": &graphql.Field{Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%s%sUsageMetadataFields", prefix, Name), + Fields: graphql.Fields{ + "prompt_tokens": &graphql.Field{Type: graphql.Int}, + "completion_tokens": &graphql.Field{Type: graphql.Int}, + "total_tokens": &graphql.Field{Type: graphql.Int}, + }, + })}, + }, + })} +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-xai/parameters/params.go b/platform/dbops/binaries/weaviate-src/modules/generative-xai/parameters/params.go new file mode 100644 index 0000000000000000000000000000000000000000..c50e06ac42e30d25fa2cd7fbf655bc97d95ecd85 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-xai/parameters/params.go @@ -0,0 +1,55 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import ( + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/usecases/modulecomponents/gqlparser" +) + +type Params struct { + BaseURL string + Model string + Temperature *float64 + TopP *float64 + MaxTokens *int + Images []*string + ImageProperties []string +} + +func extract(field *ast.ObjectField) interface{} { + out := Params{} + fields, ok := field.Value.GetValue().([]*ast.ObjectField) + if ok { + for _, f := range fields { + switch f.Name.Value { + case "baseURL": + out.BaseURL = gqlparser.GetValueAsStringOrEmpty(f) + case "model": + out.Model = gqlparser.GetValueAsStringOrEmpty(f) + case "temperature": + out.Temperature = gqlparser.GetValueAsFloat64(f) + case "topP": + out.TopP = gqlparser.GetValueAsFloat64(f) + case "maxTokens": + out.MaxTokens = gqlparser.GetValueAsInt(f) + case "images": + out.Images = gqlparser.GetValueAsStringPtrArray(f) + case "imageProperties": + out.ImageProperties = gqlparser.GetValueAsStringArray(f) + default: + // do nothing + } + } + } + return out +} diff --git a/platform/dbops/binaries/weaviate-src/modules/generative-xai/parameters/provider.go b/platform/dbops/binaries/weaviate-src/modules/generative-xai/parameters/provider.go new file mode 100644 index 0000000000000000000000000000000000000000..ec2644b775efae78a644f8e4a712df76b63232ba --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/generative-xai/parameters/provider.go @@ -0,0 +1,22 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package parameters + +import "github.com/weaviate/weaviate/entities/modulecapabilities" + +const Name = "xai" + +func AdditionalGenerativeParameters(client modulecapabilities.GenerativeClient) map[string]modulecapabilities.GenerativeProperty { + return map[string]modulecapabilities.GenerativeProperty{ + Name: {Client: client, RequestParamsFunction: input, ResponseParamsFunction: output, ExtractRequestParamsFunction: extract}, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/clients/startup.go b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/clients/startup.go new file mode 100644 index 0000000000000000000000000000000000000000..c515b7d94e05123cc945f6ff37727926196cf4a2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/clients/startup.go @@ -0,0 +1,68 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "net/http" + "time" + + "github.com/pkg/errors" +) + +func (v *vectorizer) WaitForStartup(initCtx context.Context, + interval time.Duration, +) error { + t := time.NewTicker(interval) + defer t.Stop() + expired := initCtx.Done() + var lastErr error + for { + select { + case <-t.C: + lastErr = v.checkReady(initCtx) + if lastErr == nil { + return nil + } + v.logger. + WithField("action", "img2vec_remote_wait_for_startup"). + WithError(lastErr).Warnf("img2vec-neural inference service not ready") + case <-expired: + return errors.Wrapf(lastErr, "init context expired before remote was ready") + } + } +} + +func (v *vectorizer) checkReady(initCtx context.Context) error { + // spawn a new context (derived on the overall context) which is used to + // consider an individual request timed out + requestCtx, cancel := context.WithTimeout(initCtx, 500*time.Millisecond) + defer cancel() + + req, err := http.NewRequestWithContext(requestCtx, http.MethodGet, + v.url("/.well-known/ready"), nil) + if err != nil { + return errors.Wrap(err, "create check ready request") + } + + res, err := v.httpClient.Do(req) + if err != nil { + return errors.Wrap(err, "send check ready request") + } + + defer res.Body.Close() + if res.StatusCode > 299 { + return errors.Errorf("not ready: status %d", res.StatusCode) + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/clients/startup_test.go b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/clients/startup_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ff65558b1e87f7f143c6c43afdaf422dd3829c34 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/clients/startup_test.go @@ -0,0 +1,98 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestWaitForStartup(t *testing.T) { + t.Run("when the server is immediately ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{t: t}) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + err := c.WaitForStartup(context.Background(), 50*time.Millisecond) + + assert.Nil(t, err) + }) + + t.Run("when the server is down", func(t *testing.T) { + c := New("http://nothing-running-at-this-url", 0, nullLogger()) + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 150*time.Millisecond) + + require.NotNil(t, err, nullLogger()) + assert.Contains(t, err.Error(), "expired before remote was ready") + }) + + t.Run("when the server is alive, but not ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(1 * time.Minute), + }) + c := New(server.URL, 0, nullLogger()) + defer server.Close() + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 50*time.Millisecond) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "expired before remote was ready") + }) + + t.Run("when the server is initially not ready, but then becomes ready", + func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(100 * time.Millisecond), + }) + c := New(server.URL, 0, nullLogger()) + defer server.Close() + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 50*time.Millisecond) + + require.Nil(t, err) + }) +} + +type testReadyHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testReadyHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/.well-known/ready", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.WriteHeader(http.StatusNoContent) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} diff --git a/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/clients/vectorizer.go b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/clients/vectorizer.go new file mode 100644 index 0000000000000000000000000000000000000000..c6a2f05595104a9815dc8de5a15982c46aea30d4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/clients/vectorizer.go @@ -0,0 +1,103 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/img2vec-neural/ent" +) + +type vectorizer struct { + origin string + httpClient *http.Client + logger logrus.FieldLogger +} + +func New(origin string, timeout time.Duration, logger logrus.FieldLogger) *vectorizer { + return &vectorizer{ + origin: origin, + httpClient: &http.Client{ + Timeout: timeout, + }, + logger: logger, + } +} + +func (v *vectorizer) Vectorize(ctx context.Context, + id, image string, +) (*ent.VectorizationResult, error) { + body, err := json.Marshal(vecRequest{ + ID: id, + Image: image, + }) + if err != nil { + return nil, errors.Wrapf(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", v.url("/vectors"), + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + + res, err := v.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + var resBody vecResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if res.StatusCode > 399 { + return nil, errors.Errorf("fail with status %d", res.StatusCode) + } + + return &ent.VectorizationResult{ + ID: resBody.ID, + Image: image, + Dimensions: resBody.Dim, + Vector: resBody.Vector, + }, nil +} + +func (v *vectorizer) url(path string) string { + return fmt.Sprintf("%s%s", v.origin, path) +} + +type vecRequest struct { + ID string `json:"id"` + Image string `json:"image"` +} + +type vecResponse struct { + ID string `json:"id"` + Vector []float32 `json:"vector"` + Dim int `json:"dim"` + Error string `json:"error"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/config.go b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/config.go new file mode 100644 index 0000000000000000000000000000000000000000..13655d56b0208000115810c0e0df39778e31b859 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/config.go @@ -0,0 +1,44 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modimage + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/img2vec-neural/vectorizer" +) + +func (m *ImageModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *ImageModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *ImageModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + icheck := vectorizer.NewClassSettings(cfg) + return icheck.Validate() +} + +var ( + _ = modulecapabilities.ClassConfigurator(New()) + _ = modulecapabilities.MetaProvider(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/ent/vectorization_result.go b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/ent/vectorization_result.go new file mode 100644 index 0000000000000000000000000000000000000000..1fc23e163f517949550741d8a9120328aeb599db --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/ent/vectorization_result.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type VectorizationResult struct { + ID string + Image string + Dimensions int + Vector []float32 +} diff --git a/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/module.go b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/module.go new file mode 100644 index 0000000000000000000000000000000000000000..20b502beb04b642ba79a97e7787fd0a9d67e02e1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/module.go @@ -0,0 +1,115 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modimage + +import ( + "context" + "os" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/img2vec-neural/clients" + "github.com/weaviate/weaviate/modules/img2vec-neural/vectorizer" +) + +const Name = "img2vec-neural" + +func New() *ImageModule { + return &ImageModule{} +} + +type ImageModule struct { + vectorizer imageVectorizer + graphqlProvider modulecapabilities.GraphQLArguments + searcher modulecapabilities.Searcher[[]float32] + logger logrus.FieldLogger +} + +type imageVectorizer interface { + Object(ctx context.Context, obj *models.Object, cfg moduletools.ClassConfig) ([]float32, models.AdditionalProperties, error) + VectorizeImage(ctx context.Context, + id, image string, cfg moduletools.ClassConfig) ([]float32, error) +} + +func (m *ImageModule) Name() string { + return Name +} + +func (m *ImageModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Img2Vec +} + +func (m *ImageModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initNearImage(); err != nil { + return errors.Wrap(err, "init near text") + } + + return nil +} + +func (m *ImageModule) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + // TODO: proper config management + uri := os.Getenv("IMAGE_INFERENCE_API") + if uri == "" { + return errors.Errorf("required variable IMAGE_INFERENCE_API is not set") + } + + client := clients.New(uri, timeout, logger) + if err := client.WaitForStartup(ctx, 1*time.Second); err != nil { + return errors.Wrap(err, "init remote vectorizer") + } + + m.vectorizer = vectorizer.New(client) + + return nil +} + +func (m *ImageModule) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + return m.vectorizer.Object(ctx, obj, cfg) +} + +func (m *ImageModule) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + ichek := vectorizer.NewClassSettings(cfg) + mediaProps, err := ichek.Properties() + return false, mediaProps, err +} + +func (m *ImageModule) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + return batch.VectorizeBatch(ctx, objs, skipObject, cfg, m.logger, m.vectorizer.Object) +} + +func (m *ImageModule) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{}, nil +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/nearImage.go b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/nearImage.go new file mode 100644 index 0000000000000000000000000000000000000000..408b860e9c16db93a4f6fd809a504fec8d2572cd --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/nearImage.go @@ -0,0 +1,36 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modimage + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearImage" +) + +func (m *ImageModule) initNearImage() error { + m.searcher = nearImage.NewSearcher(m.vectorizer) + m.graphqlProvider = nearImage.New() + return nil +} + +func (m *ImageModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return m.graphqlProvider.Arguments() +} + +func (m *ImageModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + return m.searcher.VectorSearches() +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/vectorizer/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/vectorizer/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..b1fb404d0202f8b68e3609fa6549921222d8f800 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/vectorizer/class_settings.go @@ -0,0 +1,96 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "errors" + + "github.com/weaviate/weaviate/entities/moduletools" +) + +type ClsSettings struct { + Cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *ClsSettings { + return &ClsSettings{Cfg: cfg} +} + +func (ic *ClsSettings) Properties() ([]string, error) { + if ic.Cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return nil, errors.New("empty config") + } + + imageFields, ok := ic.Cfg.Class()["imageFields"] + if !ok { + return nil, errors.New("imageFields not present") + } + + imageFieldsArray, ok := imageFields.([]interface{}) + if !ok { + return nil, errors.New("imageFields must be an array") + } + + fieldNames := make([]string, len(imageFieldsArray)) + for i, value := range imageFieldsArray { + fieldNames[i] = value.(string) + } + return fieldNames, nil +} + +func (ic *ClsSettings) ImageField(property string) bool { + fieldNames, err := ic.Properties() + if err != nil { + return false + } + for i := range fieldNames { + if fieldNames[i] == property { + return true + } + } + + return false +} + +func (ic *ClsSettings) Validate() error { + if ic.Cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + + imageFields, ok := ic.Cfg.Class()["imageFields"] + if !ok { + return errors.New("imageFields not present") + } + + imageFieldsArray, ok := imageFields.([]interface{}) + if !ok { + return errors.New("imageFields must be an array") + } + + if len(imageFieldsArray) == 0 { + return errors.New("must contain at least one image field name in imageFields") + } + + for _, value := range imageFieldsArray { + v, ok := value.(string) + if !ok { + return errors.New("imageField must be a string") + } + if len(v) == 0 { + return errors.New("imageField values cannot be empty") + } + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/vectorizer/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/vectorizer/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..24b8956cca41fc118ab4270526e2ab00e6082d36 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/vectorizer/class_settings_test.go @@ -0,0 +1,94 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer_test + +import ( + "testing" + + "github.com/weaviate/weaviate/modules/img2vec-neural/vectorizer" + + "github.com/weaviate/weaviate/entities/moduletools" +) + +func Test_classSettings_Validate(t *testing.T) { + type fields struct { + cfg moduletools.ClassConfig + } + tests := []struct { + name string + fields fields + wantErr bool + }{ + { + name: "should not pass with empty config", + wantErr: true, + }, + { + name: "should not pass with nil config", + fields: fields{ + cfg: nil, + }, + wantErr: true, + }, + { + name: "should not pass with nil imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", nil).build(), + }, + wantErr: true, + }, + { + name: "should not pass with fault imageFields value", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []string{}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with empty imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with empty string in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{""}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with int value in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{1.0}).build(), + }, + wantErr: true, + }, + { + name: "should pass with proper value in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{"field"}).build(), + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := &vectorizer.ClsSettings{ + Cfg: tt.fields.cfg, + } + if err := ic.Validate(); (err != nil) != tt.wantErr { + t.Errorf("ClsSettings.Validate() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/vectorizer/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/vectorizer/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..f18d25acb710d8cf0e02ff89dd522175f8a755fa --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/vectorizer/fakes_for_test.go @@ -0,0 +1,84 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer_test + +import ( + "context" + + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/img2vec-neural/ent" + "github.com/weaviate/weaviate/usecases/config" +) + +type builder struct { + fakeClassConfig *fakeClassConfig +} + +func newConfigBuilder() *builder { + return &builder{ + fakeClassConfig: &fakeClassConfig{config: map[string]interface{}{}}, + } +} + +func (b *builder) addSetting(name string, value interface{}) *builder { + b.fakeClassConfig.config[name] = value + return b +} + +func (b *builder) build() *fakeClassConfig { + return b.fakeClassConfig +} + +type fakeClassConfig struct { + config map[string]interface{} +} + +func (c fakeClassConfig) Class() map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) Property(propName string) map[string]interface{} { + return c.config +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} + +type fakeClient struct{} + +func (c *fakeClient) Vectorize(ctx context.Context, + id, image string, +) (*ent.VectorizationResult, error) { + result := &ent.VectorizationResult{ + ID: id, + Image: image, + Vector: []float32{1.0, 2.0, 3.0, 4.0, 5.0}, + } + return result, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/vectorizer/vectorizer.go b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/vectorizer/vectorizer.go new file mode 100644 index 0000000000000000000000000000000000000000..fa33dde73c24bbddfff0d0653322cc40df69738b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/vectorizer/vectorizer.go @@ -0,0 +1,94 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "fmt" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/img2vec-neural/ent" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +type Vectorizer struct { + client Client +} + +func New(client Client) *Vectorizer { + return &Vectorizer{ + client: client, + } +} + +type Client interface { + Vectorize(ctx context.Context, + id, image string) (*ent.VectorizationResult, error) +} + +type ClassSettings interface { + ImageField(property string) bool + Properties() ([]string, error) +} + +func (v *Vectorizer) Object(ctx context.Context, object *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + vec, err := v.object(ctx, object, cfg) + return vec, nil, err +} + +func (v *Vectorizer) VectorizeImage(ctx context.Context, id, image string, cfg moduletools.ClassConfig) ([]float32, error) { + res, err := v.client.Vectorize(ctx, id, image) + if err != nil { + return nil, err + } + + return res.Vector, nil +} + +func (v *Vectorizer) object(ctx context.Context, object *models.Object, cfg moduletools.ClassConfig, +) ([]float32, error) { + ichek := NewClassSettings(cfg) + + // vectorize image + images := []string{} + + if object.Properties != nil { + schemamap := object.Properties.(map[string]interface{}) + for _, propName := range moduletools.SortStringKeys(schemamap) { + if !ichek.ImageField(propName) { + continue + } + + switch val := schemamap[propName].(type) { + case string: + images = append(images, val) + + default: + } + + } + } + + vectors := [][]float32{} + for i, image := range images { + imgID := fmt.Sprintf("%s_%v", object.ID, i) + vector, err := v.VectorizeImage(ctx, imgID, image, cfg) + if err != nil { + return nil, err + } + vectors = append(vectors, vector) + } + + return libvectorizer.CombineVectors(vectors), nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/vectorizer/vectorizer_test.go b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/vectorizer/vectorizer_test.go new file mode 100644 index 0000000000000000000000000000000000000000..145f3a3e9cf45f92a2dba5f8ef829a9403efebc3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/img2vec-neural/vectorizer/vectorizer_test.go @@ -0,0 +1,71 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer_test + +import ( + "context" + "testing" + + "github.com/weaviate/weaviate/modules/img2vec-neural/vectorizer" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" +) + +const image = "iVBORw0KGgoAAAANSUhEUgAAAGAAAAA/CAYAAAAfQM0aAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyRpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDpCRjQ5NEM3RDI5QTkxMUUyOTc1NENCMzI4N0QwNDNCOSIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDpCRjQ5NEM3RTI5QTkxMUUyOTc1NENCMzI4N0QwNDNCOSI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOkJGNDk0QzdCMjlBOTExRTI5NzU0Q0IzMjg3RDA0M0I5IiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOkJGNDk0QzdDMjlBOTExRTI5NzU0Q0IzMjg3RDA0M0I5Ii8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+WeGRxAAAB2hJREFUeNrUXFtslUUQ3hJCoQVEKy0k1qQgrRg0vaAJaq1tvJSgaLy8mKDF2IvxBY2Bgm8+iIoxvhB72tTUmKgPigbFKCEtxeKD9hZjAi3GJrYJtqRai7TQB+pMz/zwU/5zzsxe2u4kXwiwZ+bb/Xb/s7v/zEmrra1VTFsFeBRQCtgEuBWwkv5vHPAn4DdAB+B7wBjXcUNDQ8o2dXV1SmDzyhUtLS3tBPyxC9CdrN1ihi/swKuA7YD0BG1uJhQDngdcAnwDeJ86Ole2kLii+J2AFsA+wF9RjRalmEUHaZY8m6RDUYZtn6HPHiRfLm2hck0D7AScAdRH8UokwD2AnwA7UoiUyhaRD/S12dHg+8B1OWA/4BTgqVQCPEJL8haLBNDXEfJt03ziipYH+BJwHFAYJcAWwCeAZQ6CLyPfWyz584nrbCuj74eHwgKsddih2R1ba+jHJ65R1k6PuWNhAd4DZM/BTiWbdhwm5hPXsA0AngY8COgNP4JwSTyu4zE/P18VFhZKP7aNYuouXxFX5Ic8Nc2Ea2D/AfYCNgIORZ0DdusOfnFxcXDwUD09PZKP76alKDUR16KiIlVQUHDl7/39/Uozpg7Xac45YB0dGrQHHw07KVwJpRRbYiKuyCc8+MhXcyXocP2RnvMvJhr8QIBK08EPbGJiQuqq0mX7KD4GIohi4xVPTU0N6/BRamPwu7u7dZb3/RozkW3IB3lZEkGHayeI8FFVVdWaZAIUcD2Wl5fbHHy024XtC6QBkomA/XHIFb8X0Xamp6efASHqt27dGnkVkcNxVlFRoXJycmwOvuLGNmifVATsD/bLZezgKgKE2J+bm3sKHk3XXUWs4Mz87Oxs24OvOLEN26cUAfvFXAkrlKGBCDNXEbAajldXV1+5ijjP+KCrg855x+3nk2uy8SwDdIIIM1cRI6k+0NraqkZGRmzuKAIbFrYf0Q2UaPOA/Wpra3PBNfHhYHq6HbC5qanpGB7ETgPWc0TApTr7eyDolOaj6LRG+/W2Bn94eJg7+DpcowZ+AGb+642NjYfC3wEdXAdI1uK2Du2ksH2HrcHHfggGX4frNVcRMPh7BwcHN8ZiseuuIr4DvKXib29YX2bhmW+wEqYptsREXC2eWXS44oyfuYqYmpra19LSEnkaRgEG6Nj8gGRHESVCRkaG9Kg+IOyTiGtmZqatnZsOV/zMLnjcsF7KH5AIECVCX1+f6u3tlbg4oLmc2VyDy8HgPshg2yzmCo8aFsdAALzpw9dw23REwJkvHPwjSu92UcwVRcAnAd4LaQ6+CVe2AGivAe5WwhcdGp0aoVgmJuIqnBy2uSa18Buxs4AXAJMO401SjLOGfnziyhYg2GrtcNSxSfJ90pI/n7iyBUA7quKv/IYsxhmiZ/ZRy/x94soWAO1nwL0qnhVw2cD/ZfKBvjod9cEnrmwB0DBh9RUVfxHxhYrnUHLtEn2mlHyMOe6HT1wT7oISGSas4ntNzJmsVFczjnMBN1CbfwGD1BYPID8A/lFzbz5xZQsQnmWfExa6ecNVIsBKWuIlgA0qnjG2PLhsou0aZgF3qfil2fg89ssbrhwBNtB+GN/dLUnQ5kbCHYAnAFMAvGpsoY7OlS0krmOhxx7WLHwAeBLwVahN2uIUswgrPB5T8rRv7DxWqDwM+JaCjzue8b5wZe2C7gJ8quKVJqY599vJ1yZHffCJK0uA+wAfAtZYjIO+Gsi3TfOJK0sAfFP/jpKV+HBtKfkutOTPJ64sAVYD3qXgrmwpxVht6McnrmwBMAP4pjlYdRij3tCHT1xZAuDdermOA836gDKKqWNirob1ASZc2eeAl3QH36A+AGP+ohFWxNVSfYAuV9YKyKUTo/bgo2nUB5RQbImJuFqsD9DhyhbAuDgjMI36gFKX7S3XB5S6egSV2Bh8zYyDYjr4SGYi2yzmMIm5YnFGkFOLSQGNjY3X/BtaLBabWQF5XKcO6gOkZT950gAW6wPWuXoEZXEaOqoPyHLcPqkIwvqALFcCZHJmvqP6gEzH7VOKIKgPyHQlwIVUjRzWB1xw3H4+ubIFGE3VyGF9wKjj9ik3D4L6gFFXArCSTlEEzKe3LMIfwvYDNgcf+4P9csSVLUAXt7GD+oBuYfsuW4OvUR/Q7UoA/G2zaRvbOqEI0xRbYiKulusDTrgSYEg6sxKJIKwP6FLyjDYRV4v1ATpc2QKgNZtu6zTqA5o1ObM/h5eDyMvCtrlZObLgNhRv+jAHvkwqQjDzhYPfrvRvF0VcLdQHaHGNxWKrZv0d//hahcqr8Ccww1kRbwPuVMIXHRqd+ptimZiIq0F9gA2urEcQ2jkVf/tz0WG8ixTjnKEfn7iyBQi2WnuULLlV0qE9FrdzPnFlC4CGRQkvqyQ/MqRh6KtO2S948IkrWwC0XwHPAQ4r85z7w+TL1U8Y+8Q14S4oyjA9703AZ4AqFX8RvoTpN8i3/Bi/p+egHz5xZQsQGCasvqGuZhzj76DdpuIZx8FPuOAviWDG8e8qXl0yXxnHPnGdsf8FGAByGwC02iMZswAAAABJRU5ErkJggg==" + +func TestVectorizer(t *testing.T) { + t.Run("should vectorize image", func(t *testing.T) { + // given + client := &fakeClient{} + vectorizer := vectorizer.New(client) + config := newConfigBuilder().addSetting("imageFields", []interface{}{"image"}).build() + + props := map[string]interface{}{ + "image": image, + } + object := &models.Object{ + ID: "some-uuid", + Properties: props, + } + + // when + vector, _, err := vectorizer.Object(context.Background(), object, config) + + // then + require.Nil(t, err) + assert.NotNil(t, vector) + }) + + t.Run("should vectorize 2 image fields", func(t *testing.T) { + // given + client := &fakeClient{} + vectorizer := vectorizer.New(client) + config := newConfigBuilder().addSetting("imageFields", []interface{}{"image1", "image2"}).build() + + props := map[string]interface{}{ + "image1": image, + "image2": image, + } + object := &models.Object{ + ID: "some-uuid", + Properties: props, + } + // when + vector, _, err := vectorizer.Object(context.Background(), object, config) + + // then + require.Nil(t, err) + assert.NotNil(t, vector) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/clients/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/clients/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..66b3424d989ced1b66c11b705780a757ec395d48 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/clients/fakes_for_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizePropertyName bool + skippedProperty string + excludedProperty string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/clients/jinaai.go b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/clients/jinaai.go new file mode 100644 index 0000000000000000000000000000000000000000..fa34b78d51a967a677d2670a9f85040308c4b2bb --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/clients/jinaai.go @@ -0,0 +1,78 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "time" + + "github.com/weaviate/weaviate/entities/moduletools" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/multi2multivec-jinaai/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/jinaai" +) + +const ( + defaultRPM = 500 // from https://jina.ai/embeddings/ + defaultTPM = 1_000_000 +) + +type vectorizer struct { + client *jinaai.Client[[][]float32] + logger logrus.FieldLogger +} + +func New(jinaAIApiKey string, timeout time.Duration, logger logrus.FieldLogger) *vectorizer { + return &vectorizer{ + client: jinaai.New[[][]float32](jinaAIApiKey, timeout, defaultRPM, defaultTPM, jinaai.EmbeddingsBuildUrlFn, logger), + logger: logger, + } +} + +func (v *vectorizer) Vectorize(ctx context.Context, + texts, images []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationCLIPResult[[][]float32], error) { + settings := ent.NewClassSettings(cfg) + res, err := v.client.VectorizeMultiModal(ctx, texts, images, jinaai.Settings{ + BaseURL: settings.BaseURL(), + Model: settings.Model(), + Task: jinaai.RetrievalPassage, + Normalized: true, + ReturnMultivector: true, + }) + return res, err +} + +func (v *vectorizer) VectorizeQuery(ctx context.Context, texts []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[][]float32], error) { + settings := ent.NewClassSettings(cfg) + res, _, _, err := v.client.Vectorize(ctx, texts, jinaai.Settings{ + BaseURL: settings.BaseURL(), + Model: settings.Model(), + Task: jinaai.RetrievalQuery, + Normalized: true, + ReturnMultivector: true, + }) + return res, err +} + +func (v *vectorizer) GetApiKeyHash(ctx context.Context, config moduletools.ClassConfig) [32]byte { + return v.client.GetApiKeyHash(ctx, config) +} + +func (v *vectorizer) GetVectorizerRateLimit(ctx context.Context, cfg moduletools.ClassConfig) *modulecomponents.RateLimits { + return v.client.GetVectorizerRateLimit(ctx, cfg) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/clients/jinaai_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/clients/jinaai_test.go new file mode 100644 index 0000000000000000000000000000000000000000..6df95245ef7009c673db4dd32fc8175d02b93646 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/clients/jinaai_test.go @@ -0,0 +1,199 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestClient(t *testing.T) { + defaultSettings := func(url string) fakeClassConfig { + return fakeClassConfig{classConfig: map[string]interface{}{"Model": "jina-embeddings-v4", "baseURL": url}} + } + t.Run("when all is fine and we send text only", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + + c := New("apiKey", 0, nullLogger()) + + expected := &modulecomponents.VectorizationCLIPResult[[][]float32]{ + TextVectors: [][][]float32{{{0.1, 0.2, 0.3}, {0.11, 0.22, 0.33}}}, + } + res, err := c.Vectorize(context.Background(), []string{"This is my text"}, nil, defaultSettings(server.URL)) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when all is fine and we send image only", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + + c := New("apiKey", 0, nullLogger()) + + expected := &modulecomponents.VectorizationCLIPResult[[][]float32]{ + ImageVectors: [][][]float32{{{0.1, 0.2, 0.3}, {0.11, 0.22, 0.33}}}, + } + res, err := c.Vectorize(context.Background(), nil, []string{"base64"}, defaultSettings(server.URL)) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when the context is expired", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("apiKey", 0, nullLogger()) + + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, err := c.Vectorize(ctx, []string{"This is my text"}, nil, fakeClassConfig{classConfig: map[string]interface{}{}}) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "context deadline exceeded") + }) + + t.Run("when the server returns an error", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{ + t: t, + serverError: errors.Errorf("nope, not gonna happen"), + }) + defer server.Close() + c := New("apiKey", 0, nullLogger()) + + _, err := c.Vectorize(context.Background(), []string{"This is my text"}, nil, fakeClassConfig{classConfig: map[string]interface{}{"baseURL": server.URL}}) + + require.NotNil(t, err) + assert.EqualError(t, err, "connection to: JinaAI API failed with status: 500 error: nope, not gonna happen") + }) + + t.Run("when JinaAI key is passed using X-Jinaai-Api-Key header", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("", 0, nullLogger()) + + ctxWithValue := context.WithValue(context.Background(), + "X-Jinaai-Api-Key", []string{"some-key"}) + + expected := &modulecomponents.VectorizationCLIPResult[[][]float32]{ + TextVectors: [][][]float32{{{0.1, 0.2, 0.3}, {0.11, 0.22, 0.33}}}, + } + res, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, nil, defaultSettings(server.URL)) + + require.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when JinaAI key is empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("", 0, nullLogger()) + + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, err := c.Vectorize(ctx, []string{"This is my text"}, nil, fakeClassConfig{classConfig: map[string]interface{}{}}) + + require.NotNil(t, err) + assert.EqualError(t, err, "API Key: no api key found "+ + "neither in request header: X-Jinaai-Api-Key "+ + "nor in environment variable under JINAAI_APIKEY") + }) + + t.Run("when X-Jinaai-Api-Key header is passed but empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("", 0, nullLogger()) + + ctxWithValue := context.WithValue(context.Background(), + "X-Jinaai-Api-Key", []string{""}) + + _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, nil, fakeClassConfig{classConfig: map[string]interface{}{"Model": "jina-embedding-v2"}}) + + require.NotNil(t, err) + assert.EqualError(t, err, "API Key: no api key found "+ + "neither in request header: X-Jinaai-Api-Key "+ + "nor in environment variable under JINAAI_APIKEY") + }) +} + +type fakeHandler struct { + t *testing.T + serverError error +} + +func (f *fakeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.serverError != nil { + embedding := map[string]interface{}{ + "detail": f.serverError.Error(), + } + outBytes, err := json.Marshal(embedding) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b map[string]interface{} + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + textInputArray := b["input"].([]interface{}) + textInput := textInputArray[0].(map[string]interface{}) + assert.Greater(f.t, len(textInput), 0) + obj := textInput["text"] + if textInput["image"] != nil { + obj = textInput["image"] + } + + embeddingTextData := map[string]interface{}{ + "object": obj, + "index": 0, + "embeddings": [][]float32{{0.1, 0.2, 0.3}, {0.11, 0.22, 0.33}}, + } + + embedding := map[string]interface{}{ + "object": "list", + "data": []interface{}{embeddingTextData}, + } + + outBytes, err := json.Marshal(embedding) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..0ff72c39b86980666d3831ecc4f9ed5e4947034a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/clients/meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *vectorizer) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "JinaAI CLIP Multivec Module", + "documentationHref": "https://jina.ai/embeddings/", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/config.go b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/config.go new file mode 100644 index 0000000000000000000000000000000000000000..6ea5c5663a1b3e9cb6d5c6541711006c6000b0e2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/config.go @@ -0,0 +1,45 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modm2mvjinaai + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/multi2multivec-jinaai/ent" +) + +func (m *Module) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{ + ent.BaseURLProperty: ent.DefaultBaseURL, + ent.ModelProperty: ent.DefaultJinaAIModel, + "vectorizeClassName": ent.DefaultVectorizeClassName, + } +} + +func (m *Module) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *Module) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + icheck := ent.NewClassSettings(cfg) + return icheck.Validate() +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/ent/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/ent/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..b3d42aa23b01ef375edf356afa6fb90d342a99da --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/ent/class_settings.go @@ -0,0 +1,213 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "fmt" + "strings" + + "github.com/pkg/errors" + + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + BaseURLProperty = "baseURL" + ModelProperty = "model" +) + +const ( + DefaultBaseURL = "https://api.jina.ai" + DefaultJinaAIModel = "jina-embeddings-v4" + DefaultVectorizeClassName = false +) + +type classSettings struct { + base *basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{ + cfg: cfg, + base: basesettings.NewBaseClassSettingsWithAltNames(cfg, false, "multi2multivec-jinaai", nil, nil), + } +} + +// JinaAI settings +func (cs *classSettings) Model() string { + return cs.base.GetPropertyAsString(ModelProperty, DefaultJinaAIModel) +} + +func (cs *classSettings) BaseURL() string { + return cs.base.GetPropertyAsString(BaseURLProperty, DefaultBaseURL) +} + +// CLIP module specific settings +func (ic *classSettings) ImageField(property string) bool { + return ic.field("imageFields", property) +} + +func (ic *classSettings) TextField(property string) bool { + return ic.field("textFields", property) +} + +func (ic *classSettings) Properties() ([]string, error) { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return nil, errors.New("empty config") + } + props := make([]string, 0) + + fields := []string{"textFields", "imageFields"} + + for _, field := range fields { + fields, ok := ic.base.GetSettings()[field] + if !ok { + continue + } + + fieldsArray, ok := fields.([]interface{}) + if !ok { + return nil, errors.Errorf("%s must be an array", field) + } + + for _, value := range fieldsArray { + v, ok := value.(string) + if !ok { + return nil, errors.Errorf("%s must be a string", field) + } + props = append(props, v) + } + } + return props, nil +} + +func (ic *classSettings) field(name, property string) bool { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return false + } + + fields, ok := ic.base.GetSettings()[name] + if !ok { + return false + } + + fieldsArray, ok := fields.([]interface{}) + if !ok { + return false + } + + for _, field := range fieldsArray { + if property == field.(string) { + return true + } + } + + return false +} + +func (ic *classSettings) Validate() error { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + + var errorMessages []string + + imageFields, imageFieldsOk := ic.cfg.Class()["imageFields"] + textFields, textFieldsOk := ic.cfg.Class()["textFields"] + if !imageFieldsOk && !textFieldsOk { + errorMessages = append(errorMessages, "textFields or imageFields setting needs to be present") + } + + if imageFieldsOk && textFieldsOk { + errorMessages = append(errorMessages, "only one textFields or imageFields setting needs to be present, not both") + } + + if imageFieldsOk { + if errorMsgs := ic.validateField("image", imageFields); len(errorMsgs) > 0 { + errorMessages = append(errorMessages, errorMsgs...) + } + } + + if textFieldsOk { + if errorMsgs := ic.validateField("text", textFields); len(errorMsgs) > 0 { + errorMessages = append(errorMessages, errorMsgs...) + } + } + + if len(errorMessages) > 0 { + return fmt.Errorf("%s", strings.Join(errorMessages, ", ")) + } + + return nil +} + +func (ic *classSettings) validateField(name string, fields interface{}) []string { + var errorMessages []string + fieldsCount, err := ic.validateFields(name, fields) + if err != nil { + errorMessages = append(errorMessages, err.Error()) + } + if fieldsCount > 1 { + errorMessages = append(errorMessages, fmt.Sprintf("only one %s property is allowed to define", name)) + } + _, ok := ic.getWeights(name) + if ok { + errorMessages = append(errorMessages, "%s weights settigs are not allowed to define", name) + } + return errorMessages +} + +func (ic *classSettings) validateFields(name string, fields interface{}) (int, error) { + fieldsArray, ok := fields.([]interface{}) + if !ok { + return 0, errors.Errorf("%sFields must be an array", name) + } + + if len(fieldsArray) == 0 { + return 0, errors.Errorf("must contain at least one %s field name in %sFields", name, name) + } + + for _, value := range fieldsArray { + v, ok := value.(string) + if !ok { + return 0, errors.Errorf("%sField must be a string", name) + } + if len(v) == 0 { + return 0, errors.Errorf("%sField values cannot be empty", name) + } + } + + return len(fieldsArray), nil +} + +func (ic *classSettings) getWeights(name string) ([]interface{}, bool) { + weights, ok := ic.base.GetSettings()["weights"] + if ok { + weightsObject, ok := weights.(map[string]interface{}) + if ok { + fieldWeights, ok := weightsObject[fmt.Sprintf("%sFields", name)] + if ok { + fieldWeightsArray, ok := fieldWeights.([]interface{}) + if ok { + return fieldWeightsArray, ok + } + } + } + } + + return nil, false +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/ent/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/ent/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..70476b4afdec3c40d7e9f81d979d9741ca5c9913 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/ent/class_settings_test.go @@ -0,0 +1,263 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "encoding/json" + "testing" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_classSettings_Validate(t *testing.T) { + type fields struct { + cfg moduletools.ClassConfig + } + tests := []struct { + name string + fields fields + wantErr bool + }{ + { + name: "should not pass with empty config", + wantErr: true, + }, + { + name: "should not pass with nil config", + fields: fields{ + cfg: nil, + }, + wantErr: true, + }, + { + name: "should not pass with nil imageFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("model", "model").addSetting("imageFields", nil).build(), + }, + wantErr: true, + }, + { + name: "should not pass with fault imageFields value", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []string{}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with empty imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with empty string in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{""}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with int value in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{1.0}).build(), + }, + wantErr: true, + }, + { + name: "should pass with proper value in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{"field"}).build(), + }, + }, + { + name: "should not pass with both values in imageFields and textFields defined", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("imageFields", []interface{}{"imageField"}). + addSetting("textFields", []interface{}{"textField"}). + build(), + }, + wantErr: true, + }, + { + name: "should not pass with 2 imageFields and 2 textFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1", "imageField2"}). + build(), + }, + wantErr: true, + }, + { + name: "should not pass with values in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1", "imageField2"}). + addWeights([]interface{}{1, 2}, []interface{}{1, 2}). + build(), + }, + wantErr: true, + }, + { + name: "should not pass with values in 1 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, 2}, []interface{}{1}). + build(), + }, + wantErr: true, + }, + { + name: "should not pass with values in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, 2}, []interface{}{1}). + build(), + }, + wantErr: true, + }, + { + name: "should not pass with proper value in 1 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1}, []interface{}{1}). + build(), + }, + wantErr: true, + }, + { + name: "should not pass with not proper weight value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, "aaaa"}, []interface{}{1}). + build(), + }, + wantErr: true, + }, + { + name: "should not pass with not proper weight value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{json.Number("1"), json.Number("2")}, []interface{}{json.Number("3")}). + build(), + }, + wantErr: true, + }, + { + name: "should pass with 1 imageFields defined", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("imageFields", []interface{}{"image1"}). + build(), + }, + wantErr: false, + }, + { + name: "should pass with 1 textFields defined", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"text1"}). + build(), + }, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.fields.cfg) + if err := ic.Validate(); (err != nil) != tt.wantErr { + t.Errorf("classSettings.Validate() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} + +type builder struct { + fakeClassConfig *fakeClassConfig +} + +func newConfigBuilder() *builder { + return &builder{ + fakeClassConfig: &fakeClassConfig{config: map[string]interface{}{}}, + } +} + +func (b *builder) addSetting(name string, value interface{}) *builder { + b.fakeClassConfig.config[name] = value + return b +} + +func (b *builder) addWeights(textWeights, imageWeights []interface{}) *builder { + if textWeights != nil || imageWeights != nil { + weightSettings := map[string]interface{}{} + if textWeights != nil { + weightSettings["textFields"] = textWeights + } + if imageWeights != nil { + weightSettings["imageFields"] = imageWeights + } + b.fakeClassConfig.config["weights"] = weightSettings + } + return b +} + +func (b *builder) build() *fakeClassConfig { + return b.fakeClassConfig +} + +type fakeClassConfig struct { + config map[string]interface{} +} + +func (c fakeClassConfig) Class() map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) Property(propName string) map[string]interface{} { + return c.config +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/module.go b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/module.go new file mode 100644 index 0000000000000000000000000000000000000000..ce6bcfe80c652239b23bb619737f29a83c3146e2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/module.go @@ -0,0 +1,137 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modm2mvjinaai + +import ( + "context" + "os" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/multi2multivec-jinaai/clients" + "github.com/weaviate/weaviate/modules/multi2multivec-jinaai/ent" + "github.com/weaviate/weaviate/modules/multi2multivec-jinaai/vectorizer" +) + +const Name = "multi2multivec-jinaai" + +func New() *Module { + return &Module{} +} + +type Module struct { + vectorizer *vectorizer.Vectorizer + nearImageGraphqlProvider modulecapabilities.GraphQLArguments + nearImageSearcher modulecapabilities.Searcher[[][]float32] + nearTextGraphqlProvider modulecapabilities.GraphQLArguments + nearTextSearcher modulecapabilities.Searcher[[][]float32] + nearTextTransformer modulecapabilities.TextTransform + metaClient metaClient + logger logrus.FieldLogger +} + +type metaClient interface { + MetaInfo() (map[string]interface{}, error) +} + +func (m *Module) Name() string { + return Name +} + +func (m *Module) Type() modulecapabilities.ModuleType { + return modulecapabilities.Multi2Multivec +} + +func (m *Module) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initNearImage(); err != nil { + return errors.Wrap(err, "init near image") + } + + return nil +} + +func (m *Module) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init near text") + } + + return nil +} + +func (m *Module) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("JINAAI_APIKEY") + client := clients.New(apiKey, timeout, logger) + + m.vectorizer = vectorizer.New(client) + m.metaClient = client + + return nil +} + +func (m *Module) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([][]float32, models.AdditionalProperties, error) { + return m.vectorizer.Object(ctx, obj, cfg) +} + +func (m *Module) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][][]float32, []models.AdditionalProperties, map[int]error) { + return batch.VectorizeBatch(ctx, objs, skipObject, cfg, m.logger, m.vectorizer.Object) +} + +func (m *Module) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + ichek := ent.NewClassSettings(cfg) + mediaProps, err := ichek.Properties() + return false, mediaProps, err +} + +func (m *Module) MetaInfo() (map[string]interface{}, error) { + return m.metaClient.MetaInfo() +} + +func (m *Module) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([][]float32, error) { + return m.vectorizer.Texts(ctx, []string{input}, cfg) +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[][]float32](New()) + _ = modulecapabilities.InputVectorizer[[][]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/nearArguments.go b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/nearArguments.go new file mode 100644 index 0000000000000000000000000000000000000000..69d3a34638be7f45b949a7ade1272979f8b84068 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/nearArguments.go @@ -0,0 +1,57 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modm2mvjinaai + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearImage" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +func (m *Module) initNearImage() error { + m.nearImageSearcher = nearImage.NewSearcher(m.vectorizer) + m.nearImageGraphqlProvider = nearImage.New() + return nil +} + +func (m *Module) initNearText() error { + m.nearTextSearcher = nearText.NewSearcher(m.vectorizer) + m.nearTextGraphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *Module) Arguments() map[string]modulecapabilities.GraphQLArgument { + arguments := map[string]modulecapabilities.GraphQLArgument{} + for name, arg := range m.nearImageGraphqlProvider.Arguments() { + arguments[name] = arg + } + for name, arg := range m.nearTextGraphqlProvider.Arguments() { + arguments[name] = arg + } + return arguments +} + +func (m *Module) VectorSearches() map[string]modulecapabilities.VectorForParams[[][]float32] { + vectorSearches := map[string]modulecapabilities.VectorForParams[[][]float32]{} + for name, arg := range m.nearImageSearcher.VectorSearches() { + vectorSearches[name] = arg + } + for name, arg := range m.nearTextSearcher.VectorSearches() { + vectorSearches[name] = arg + } + return vectorSearches +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[][]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/vectorizer/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/vectorizer/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..336fe287b80ff345a85f50c49aa89727a2937015 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/vectorizer/fakes_for_test.go @@ -0,0 +1,92 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/modulecomponents" +) + +type builder struct { + fakeClassConfig *fakeClassConfig +} + +func newConfigBuilder() *builder { + return &builder{ + fakeClassConfig: &fakeClassConfig{config: map[string]interface{}{}}, + } +} + +func (b *builder) addSetting(name string, value interface{}) *builder { + b.fakeClassConfig.config[name] = value + return b +} + +func (b *builder) build() *fakeClassConfig { + return b.fakeClassConfig +} + +type fakeClassConfig struct { + config map[string]interface{} +} + +func (c fakeClassConfig) Class() map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) Property(propName string) map[string]interface{} { + return c.config +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} + +type fakeClient struct{} + +func (c *fakeClient) Vectorize(ctx context.Context, + texts, images []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationCLIPResult[[][]float32], error) { + result := &modulecomponents.VectorizationCLIPResult[[][]float32]{ + ImageVectors: [][][]float32{{{10.0, 20.0, 30.0, 40.0, 50.0}}}, + } + return result, nil +} + +func (c *fakeClient) VectorizeQuery(ctx context.Context, texts []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[][]float32], error) { + result := &modulecomponents.VectorizationResult[[][]float32]{ + Vector: [][][]float32{{{1.0, 2.0, 3.0, 4.0, 5.0}}}, + } + return result, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/vectorizer/texts.go b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/vectorizer/texts.go new file mode 100644 index 0000000000000000000000000000000000000000..dc8ff0d66810a081fbb458c4171684569bce0908 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/vectorizer/texts.go @@ -0,0 +1,35 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/moduletools" +) + +func (v *Vectorizer) Texts(ctx context.Context, inputs []string, + cfg moduletools.ClassConfig, +) ([][]float32, error) { + if len(inputs) != 1 { + return nil, errors.Errorf("only 1 query can be vectorized, passed %v queries", len(inputs)) + } + res, err := v.client.VectorizeQuery(ctx, inputs, cfg) + if err != nil { + return nil, errors.Wrap(err, "remote client vectorize") + } + if len(inputs) != len(res.Vector) { + return nil, errors.New("inputs are not equal to vectors returned") + } + return res.Vector[0], nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/vectorizer/vectorizer.go b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/vectorizer/vectorizer.go new file mode 100644 index 0000000000000000000000000000000000000000..0384a83a0cc72c472d6c53bdfe4dcb6daa0616ce --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/vectorizer/vectorizer.go @@ -0,0 +1,115 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/pkg/errors" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/multi2multivec-jinaai/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents" + objectsvectorizer "github.com/weaviate/weaviate/usecases/modulecomponents/vectorizer" +) + +type Vectorizer struct { + client Client + objectVectorizer *objectsvectorizer.ObjectVectorizer +} + +func New(client Client) *Vectorizer { + return &Vectorizer{ + client: client, + objectVectorizer: objectsvectorizer.New(), + } +} + +type Client interface { + Vectorize(ctx context.Context, + texts, images []string, cfg moduletools.ClassConfig, + ) (*modulecomponents.VectorizationCLIPResult[[][]float32], error) + VectorizeQuery(ctx context.Context, texts []string, + cfg moduletools.ClassConfig) (*modulecomponents.VectorizationResult[[][]float32], error) +} + +type ClassSettings interface { + ImageField(property string) bool + ImageFieldsWeights() ([]float32, error) + TextField(property string) bool + TextFieldsWeights() ([]float32, error) +} + +func (v *Vectorizer) Object(ctx context.Context, object *models.Object, + cfg moduletools.ClassConfig, +) ([][]float32, models.AdditionalProperties, error) { + vec, err := v.object(ctx, object, cfg) + return vec, nil, err +} + +func (v *Vectorizer) VectorizeImage(ctx context.Context, id, image string, cfg moduletools.ClassConfig) ([][]float32, error) { + res, err := v.client.Vectorize(ctx, nil, []string{image}, cfg) + if err != nil { + return nil, err + } + if len(res.ImageVectors) != 1 { + return nil, errors.New("empty vector") + } + + return res.ImageVectors[0], nil +} + +func (v *Vectorizer) object(ctx context.Context, object *models.Object, + cfg moduletools.ClassConfig, +) ([][]float32, error) { + ichek := ent.NewClassSettings(cfg) + + // vectorize image and text + texts := []string{} + images := []string{} + + if object.Properties != nil { + schemamap := object.Properties.(map[string]interface{}) + for _, propName := range moduletools.SortStringKeys(schemamap) { + switch val := schemamap[propName].(type) { + case string: + if ichek.ImageField(propName) { + images = append(images, val) + } + if ichek.TextField(propName) { + texts = append(texts, val) + } + default: + // properties that are not part of the object + } + } + } + + vectors := [][][]float32{} + if len(texts) > 0 || len(images) > 0 { + res, err := v.client.Vectorize(ctx, texts, images, cfg) + if err != nil { + return nil, err + } + vectors = append(vectors, res.TextVectors...) + vectors = append(vectors, res.ImageVectors...) + + if len(vectors) > 1 { + return nil, errors.Errorf("got more than 1 embedding back: %v", len(vectors)) + } + + return vectors[0], nil + } + + return nil, errors.New("configured properties don't exist or are not of text or blob type") +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/vectorizer/vectorizer_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/vectorizer/vectorizer_test.go new file mode 100644 index 0000000000000000000000000000000000000000..2ba19ca5af651b7e1d8a6fe7c20d68ecbd9fa582 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2multivec-jinaai/vectorizer/vectorizer_test.go @@ -0,0 +1,99 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" +) + +const image = "iVBORw0KGgoAAAANSUhEUgAAAGAAAAA/CAYAAAAfQM0aAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyRpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDpCRjQ5NEM3RDI5QTkxMUUyOTc1NENCMzI4N0QwNDNCOSIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDpCRjQ5NEM3RTI5QTkxMUUyOTc1NENCMzI4N0QwNDNCOSI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOkJGNDk0QzdCMjlBOTExRTI5NzU0Q0IzMjg3RDA0M0I5IiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOkJGNDk0QzdDMjlBOTExRTI5NzU0Q0IzMjg3RDA0M0I5Ii8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+WeGRxAAAB2hJREFUeNrUXFtslUUQ3hJCoQVEKy0k1qQgrRg0vaAJaq1tvJSgaLy8mKDF2IvxBY2Bgm8+iIoxvhB72tTUmKgPigbFKCEtxeKD9hZjAi3GJrYJtqRai7TQB+pMz/zwU/5zzsxe2u4kXwiwZ+bb/Xb/s7v/zEmrra1VTFsFeBRQCtgEuBWwkv5vHPAn4DdAB+B7wBjXcUNDQ8o2dXV1SmDzyhUtLS3tBPyxC9CdrN1ihi/swKuA7YD0BG1uJhQDngdcAnwDeJ86Ole2kLii+J2AFsA+wF9RjRalmEUHaZY8m6RDUYZtn6HPHiRfLm2hck0D7AScAdRH8UokwD2AnwA7UoiUyhaRD/S12dHg+8B1OWA/4BTgqVQCPEJL8haLBNDXEfJt03ziipYH+BJwHFAYJcAWwCeAZQ6CLyPfWyz584nrbCuj74eHwgKsddih2R1ba+jHJ65R1k6PuWNhAd4DZM/BTiWbdhwm5hPXsA0AngY8COgNP4JwSTyu4zE/P18VFhZKP7aNYuouXxFX5Ic8Nc2Ea2D/AfYCNgIORZ0DdusOfnFxcXDwUD09PZKP76alKDUR16KiIlVQUHDl7/39/Uozpg7Xac45YB0dGrQHHw07KVwJpRRbYiKuyCc8+MhXcyXocP2RnvMvJhr8QIBK08EPbGJiQuqq0mX7KD4GIohi4xVPTU0N6/BRamPwu7u7dZb3/RozkW3IB3lZEkGHayeI8FFVVdWaZAIUcD2Wl5fbHHy024XtC6QBkomA/XHIFb8X0Xamp6efASHqt27dGnkVkcNxVlFRoXJycmwOvuLGNmifVATsD/bLZezgKgKE2J+bm3sKHk3XXUWs4Mz87Oxs24OvOLEN26cUAfvFXAkrlKGBCDNXEbAajldXV1+5ijjP+KCrg855x+3nk2uy8SwDdIIIM1cRI6k+0NraqkZGRmzuKAIbFrYf0Q2UaPOA/Wpra3PBNfHhYHq6HbC5qanpGB7ETgPWc0TApTr7eyDolOaj6LRG+/W2Bn94eJg7+DpcowZ+AGb+642NjYfC3wEdXAdI1uK2Du2ksH2HrcHHfggGX4frNVcRMPh7BwcHN8ZiseuuIr4DvKXib29YX2bhmW+wEqYptsREXC2eWXS44oyfuYqYmpra19LSEnkaRgEG6Nj8gGRHESVCRkaG9Kg+IOyTiGtmZqatnZsOV/zMLnjcsF7KH5AIECVCX1+f6u3tlbg4oLmc2VyDy8HgPshg2yzmCo8aFsdAALzpw9dw23REwJkvHPwjSu92UcwVRcAnAd4LaQ6+CVe2AGivAe5WwhcdGp0aoVgmJuIqnBy2uSa18Buxs4AXAJMO401SjLOGfnziyhYg2GrtcNSxSfJ90pI/n7iyBUA7quKv/IYsxhmiZ/ZRy/x94soWAO1nwL0qnhVw2cD/ZfKBvjod9cEnrmwB0DBh9RUVfxHxhYrnUHLtEn2mlHyMOe6HT1wT7oISGSas4ntNzJmsVFczjnMBN1CbfwGD1BYPID8A/lFzbz5xZQsQnmWfExa6ecNVIsBKWuIlgA0qnjG2PLhsou0aZgF3qfil2fg89ssbrhwBNtB+GN/dLUnQ5kbCHYAnAFMAvGpsoY7OlS0krmOhxx7WLHwAeBLwVahN2uIUswgrPB5T8rRv7DxWqDwM+JaCjzue8b5wZe2C7gJ8quKVJqY599vJ1yZHffCJK0uA+wAfAtZYjIO+Gsi3TfOJK0sAfFP/jpKV+HBtKfkutOTPJ64sAVYD3qXgrmwpxVht6McnrmwBMAP4pjlYdRij3tCHT1xZAuDdermOA836gDKKqWNirob1ASZc2eeAl3QH36A+AGP+ohFWxNVSfYAuV9YKyKUTo/bgo2nUB5RQbImJuFqsD9DhyhbAuDgjMI36gFKX7S3XB5S6egSV2Bh8zYyDYjr4SGYi2yzmMIm5YnFGkFOLSQGNjY3X/BtaLBabWQF5XKcO6gOkZT950gAW6wPWuXoEZXEaOqoPyHLcPqkIwvqALFcCZHJmvqP6gEzH7VOKIKgPyHQlwIVUjRzWB1xw3H4+ubIFGE3VyGF9wKjj9ik3D4L6gFFXArCSTlEEzKe3LMIfwvYDNgcf+4P9csSVLUAXt7GD+oBuYfsuW4OvUR/Q7UoA/G2zaRvbOqEI0xRbYiKulusDTrgSYEg6sxKJIKwP6FLyjDYRV4v1ATpc2QKgNZtu6zTqA5o1ObM/h5eDyMvCtrlZObLgNhRv+jAHvkwqQjDzhYPfrvRvF0VcLdQHaHGNxWKrZv0d//hahcqr8Ccww1kRbwPuVMIXHRqd+ptimZiIq0F9gA2urEcQ2jkVf/tz0WG8ixTjnKEfn7iyBQi2WnuULLlV0qE9FrdzPnFlC4CGRQkvqyQ/MqRh6KtO2S948IkrWwC0XwHPAQ4r85z7w+TL1U8Y+8Q14S4oyjA9703AZ4AqFX8RvoTpN8i3/Bi/p+egHz5xZQsQGCasvqGuZhzj76DdpuIZx8FPuOAviWDG8e8qXl0yXxnHPnGdsf8FGAByGwC02iMZswAAAABJRU5ErkJggg==" + +func TestVectorizer(t *testing.T) { + t.Run("should vectorize image", func(t *testing.T) { + // given + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder().addSetting("imageFields", []interface{}{"image"}).build() + + props := map[string]interface{}{ + "image": image, + } + object := &models.Object{ + ID: "some-uuid", + Properties: props, + } + + // when + vector, _, err := vectorizer.Object(context.Background(), object, config) + + // then + require.Nil(t, err) + assert.NotNil(t, vector) + }) + + t.Run("should vectorize with diff", func(t *testing.T) { + type testCase struct { + name string + input *models.Object + } + + props := map[string]interface{}{ + "image": image, + "text": "text", + "description": "non-vectorizable", + } + + tests := []testCase{ + { + name: "noop comp", + input: &models.Object{ + ID: "some-uuid", + Properties: props, + }, + }, + { + name: "one vectorizable prop changed (1)", + input: &models.Object{ + ID: "some-uuid", + Properties: props, + }, + }, + { + name: "one vectorizable prop changed (2)", + input: &models.Object{ + ID: "some-uuid", + Properties: props, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder(). + addSetting("imageFields", []interface{}{"image"}). + build() + + vector, _, err := vectorizer.Object(context.Background(), test.input, config) + + require.Nil(t, err) + assert.Equal(t, [][]float32{{10.0, 20.0, 30.0, 40.0, 50.0}}, vector) + }) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..b63877b763e3806992fe8b5f3919bf6b72da6056 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/clients/meta.go @@ -0,0 +1,45 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + + "github.com/pkg/errors" +) + +func (v *vectorizer) MetaInfo() (map[string]interface{}, error) { + req, err := http.NewRequestWithContext(context.Background(), "GET", v.url("/meta"), nil) + if err != nil { + return nil, errors.Wrap(err, "create GET meta request") + } + + res, err := v.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send GET meta request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read meta response body") + } + + var resBody map[string]interface{} + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, "unmarshal meta response body") + } + return resBody, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/clients/meta_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/clients/meta_test.go new file mode 100644 index 0000000000000000000000000000000000000000..0a6b06889eb79a432e715f84d5e68a7f4ebe381d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/clients/meta_test.go @@ -0,0 +1,59 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestGetMeta(t *testing.T) { + t.Run("when the server is providing meta", func(t *testing.T) { + server := httptest.NewServer(&testMetaHandler{t: t}) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + meta, err := c.MetaInfo() + + assert.Nil(t, err) + assert.NotNil(t, meta) + assert.NotNil(t, meta["model"] != nil) + assert.NotNil(t, meta["version"] != nil) + }) +} + +type testMetaHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testMetaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/meta", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.Write([]byte(f.metaInfo())) +} + +func (f *testMetaHandler) metaInfo() string { + return `{ + "model": "ImageBindModel", + "version": 1 +}` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/clients/startup.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/clients/startup.go new file mode 100644 index 0000000000000000000000000000000000000000..f2a6ccc2140b1d5c0f18042a236dca6067c21519 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/clients/startup.go @@ -0,0 +1,68 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "net/http" + "time" + + "github.com/pkg/errors" +) + +func (v *vectorizer) WaitForStartup(initCtx context.Context, + interval time.Duration, +) error { + t := time.NewTicker(interval) + defer t.Stop() + expired := initCtx.Done() + var lastErr error + for { + select { + case <-t.C: + lastErr = v.checkReady(initCtx) + if lastErr == nil { + return nil + } + v.logger. + WithField("action", "multi2vec_remote_wait_for_startup"). + WithError(lastErr).Warnf("multi2vec-bind inference service not ready") + case <-expired: + return errors.Wrapf(lastErr, "init context expired before remote was ready") + } + } +} + +func (v *vectorizer) checkReady(initCtx context.Context) error { + // spawn a new context (derived on the overall context) which is used to + // consider an individual request timed out + requestCtx, cancel := context.WithTimeout(initCtx, 500*time.Millisecond) + defer cancel() + + req, err := http.NewRequestWithContext(requestCtx, http.MethodGet, + v.url("/.well-known/ready"), nil) + if err != nil { + return errors.Wrap(err, "create check ready request") + } + + res, err := v.httpClient.Do(req) + if err != nil { + return errors.Wrap(err, "send check ready request") + } + + defer res.Body.Close() + if res.StatusCode > 299 { + return errors.Errorf("not ready: status %d", res.StatusCode) + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/clients/startup_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/clients/startup_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ff65558b1e87f7f143c6c43afdaf422dd3829c34 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/clients/startup_test.go @@ -0,0 +1,98 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestWaitForStartup(t *testing.T) { + t.Run("when the server is immediately ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{t: t}) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + err := c.WaitForStartup(context.Background(), 50*time.Millisecond) + + assert.Nil(t, err) + }) + + t.Run("when the server is down", func(t *testing.T) { + c := New("http://nothing-running-at-this-url", 0, nullLogger()) + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 150*time.Millisecond) + + require.NotNil(t, err, nullLogger()) + assert.Contains(t, err.Error(), "expired before remote was ready") + }) + + t.Run("when the server is alive, but not ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(1 * time.Minute), + }) + c := New(server.URL, 0, nullLogger()) + defer server.Close() + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 50*time.Millisecond) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "expired before remote was ready") + }) + + t.Run("when the server is initially not ready, but then becomes ready", + func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(100 * time.Millisecond), + }) + c := New(server.URL, 0, nullLogger()) + defer server.Close() + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 50*time.Millisecond) + + require.Nil(t, err) + }) +} + +type testReadyHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testReadyHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/.well-known/ready", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.WriteHeader(http.StatusNoContent) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/clients/vectorizer.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/clients/vectorizer.go new file mode 100644 index 0000000000000000000000000000000000000000..4579473860aa4bcf8f18011740c53b738079437b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/clients/vectorizer.go @@ -0,0 +1,124 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/multi2vec-bind/ent" +) + +type vectorizer struct { + origin string + httpClient *http.Client + logger logrus.FieldLogger +} + +func New(origin string, timeout time.Duration, logger logrus.FieldLogger) *vectorizer { + return &vectorizer{ + origin: origin, + httpClient: &http.Client{ + Timeout: timeout, + }, + logger: logger, + } +} + +func (v *vectorizer) Vectorize(ctx context.Context, + texts, images, audio, video, imu, thermal, depth []string, +) (*ent.VectorizationResult, error) { + body, err := json.Marshal(vecRequest{ + Texts: texts, + Images: images, + Audio: audio, + Video: video, + IMU: imu, + Thermal: thermal, + Depth: depth, + }) + if err != nil { + return nil, errors.Wrapf(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", v.url("/vectorize"), + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + + res, err := v.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + var resBody vecResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if res.StatusCode != 200 { + if resBody.Error != "" { + return nil, errors.Errorf("fail with status %d: %s", res.StatusCode, + resBody.Error) + } + return nil, errors.Errorf("fail with status %d", res.StatusCode) + } + + return &ent.VectorizationResult{ + TextVectors: resBody.TextVectors, + ImageVectors: resBody.ImageVectors, + AudioVectors: resBody.AudioVectors, + VideoVectors: resBody.VideoVectors, + IMUVectors: resBody.IMUVectors, + ThermalVectors: resBody.ThermalVectors, + DepthVectors: resBody.DepthVectors, + }, nil +} + +func (v *vectorizer) url(path string) string { + return fmt.Sprintf("%s%s", v.origin, path) +} + +type vecRequest struct { + Texts []string `json:"texts,omitempty"` + Images []string `json:"images,omitempty"` + Audio []string `json:"audio,omitempty"` + Video []string `json:"video,omitempty"` + IMU []string `json:"imu,omitempty"` + Thermal []string `json:"thermal,omitempty"` + Depth []string `json:"depth,omitempty"` +} + +type vecResponse struct { + TextVectors [][]float32 `json:"textVectors,omitempty"` + ImageVectors [][]float32 `json:"imageVectors,omitempty"` + AudioVectors [][]float32 `json:"audioVectors,omitempty"` + VideoVectors [][]float32 `json:"videoVectors,omitempty"` + IMUVectors [][]float32 `json:"imuVectors,omitempty"` + ThermalVectors [][]float32 `json:"thermalVectors,omitempty"` + DepthVectors [][]float32 `json:"depthVectors,omitempty"` + Error string `json:"error,omitempty"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/clients/vectorizer_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/clients/vectorizer_test.go new file mode 100644 index 0000000000000000000000000000000000000000..bd7fe4365788ad406a6180f2cfcd1e369f707e54 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/clients/vectorizer_test.go @@ -0,0 +1,95 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/modules/multi2vec-clip/ent" +) + +func TestVectorize(t *testing.T) { + t.Run("when the response is successful", func(t *testing.T) { + server := httptest.NewServer(&testVectorizeHandler{ + t: t, + res: vecResponse{ + TextVectors: [][]float32{ + {0, 1, 2}, + }, + ImageVectors: [][]float32{ + {1, 2, 3}, + }, + }, + }) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + res, err := c.Vectorize(context.Background(), []string{"hello"}, + []string{"image-encoding"}, nil, nil, nil, nil, nil) + + assert.Nil(t, err) + require.NotNil(t, res) + resp := &ent.VectorizationResult{ + TextVectors: [][]float32{ + {0, 1, 2}, + }, + ImageVectors: [][]float32{ + {1, 2, 3}, + }, + } + assert.Equal(t, res.TextVectors, resp.TextVectors) + assert.Equal(t, res.ImageVectors, resp.ImageVectors) + assert.Equal(t, res.VideoVectors, resp.VideoVectors) + assert.Equal(t, res.AudioVectors, resp.AudioVectors) + assert.Equal(t, res.ThermalVectors, resp.ThermalVectors) + assert.Equal(t, res.DepthVectors, resp.DepthVectors) + assert.Equal(t, res.IMUVectors, resp.IMUVectors) + }) + + t.Run("when the server has a an error", func(t *testing.T) { + server := httptest.NewServer(&testVectorizeHandler{ + t: t, + res: vecResponse{ + Error: "some error from the server", + }, + }) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + _, err := c.Vectorize(context.Background(), []string{"hello"}, + []string{"image-encoding"}, []string{}, []string{}, []string{}, []string{}, []string{}) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "some error from the server") + }) +} + +type testVectorizeHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + res vecResponse +} + +func (f *testVectorizeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/vectorize", r.URL.String()) + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.res.Error != "" { + w.WriteHeader(500) + } + jsonBytes, _ := json.Marshal(f.res) + w.Write(jsonBytes) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/config.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/config.go new file mode 100644 index 0000000000000000000000000000000000000000..31c433c85a72390ae31d3e2870e51386a86a4016 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/config.go @@ -0,0 +1,41 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modbind + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/multi2vec-clip/vectorizer" +) + +func (m *BindModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *BindModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *BindModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + icheck := vectorizer.NewClassSettings(cfg) + return icheck.Validate() +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/ent/vectorization_result.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/ent/vectorization_result.go new file mode 100644 index 0000000000000000000000000000000000000000..bb05ebd17c90b4f0b3a2f607027bc54ec373f71c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/ent/vectorization_result.go @@ -0,0 +1,22 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type VectorizationResult struct { + TextVectors [][]float32 + ImageVectors [][]float32 + AudioVectors [][]float32 + VideoVectors [][]float32 + IMUVectors [][]float32 + ThermalVectors [][]float32 + DepthVectors [][]float32 +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/module.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/module.go new file mode 100644 index 0000000000000000000000000000000000000000..df997f974895397368d433c7eb8b96cb315ca63b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/module.go @@ -0,0 +1,191 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modbind + +import ( + "context" + "os" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/multi2vec-bind/clients" + "github.com/weaviate/weaviate/modules/multi2vec-bind/vectorizer" +) + +const Name = "multi2vec-bind" + +func New() *BindModule { + return &BindModule{} +} + +type BindModule struct { + bindVectorizer bindVectorizer + nearImageGraphqlProvider modulecapabilities.GraphQLArguments + nearImageSearcher modulecapabilities.Searcher[[]float32] + nearAudioGraphqlProvider modulecapabilities.GraphQLArguments + nearAudioSearcher modulecapabilities.Searcher[[]float32] + nearVideoGraphqlProvider modulecapabilities.GraphQLArguments + nearVideoSearcher modulecapabilities.Searcher[[]float32] + nearIMUGraphqlProvider modulecapabilities.GraphQLArguments + nearIMUSearcher modulecapabilities.Searcher[[]float32] + nearThermalGraphqlProvider modulecapabilities.GraphQLArguments + nearThermalSearcher modulecapabilities.Searcher[[]float32] + nearDepthGraphqlProvider modulecapabilities.GraphQLArguments + nearDepthSearcher modulecapabilities.Searcher[[]float32] + textVectorizer textVectorizer + nearTextGraphqlProvider modulecapabilities.GraphQLArguments + nearTextSearcher modulecapabilities.Searcher[[]float32] + nearTextTransformer modulecapabilities.TextTransform + metaClient metaClient + logger logrus.FieldLogger +} + +type metaClient interface { + MetaInfo() (map[string]interface{}, error) +} + +type bindVectorizer interface { + Object(ctx context.Context, object *models.Object, cfg moduletools.ClassConfig) ([]float32, models.AdditionalProperties, error) + VectorizeImage(ctx context.Context, id, image string, cfg moduletools.ClassConfig) ([]float32, error) + VectorizeAudio(ctx context.Context, audio string, cfg moduletools.ClassConfig) ([]float32, error) + VectorizeVideo(ctx context.Context, video string, cfg moduletools.ClassConfig) ([]float32, error) + VectorizeIMU(ctx context.Context, imu string, cfg moduletools.ClassConfig) ([]float32, error) + VectorizeThermal(ctx context.Context, thermal string, cfg moduletools.ClassConfig) ([]float32, error) + VectorizeDepth(ctx context.Context, depth string, cfg moduletools.ClassConfig) ([]float32, error) +} + +type textVectorizer interface { + Texts(ctx context.Context, input []string, + cfg moduletools.ClassConfig) ([]float32, error) +} + +func (m *BindModule) Name() string { + return Name +} + +func (m *BindModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Multi2Vec +} + +func (m *BindModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initNearImage(); err != nil { + return errors.Wrap(err, "init near image") + } + + if err := m.initNearAudio(); err != nil { + return errors.Wrap(err, "init near audio") + } + + if err := m.initNearVideo(); err != nil { + return errors.Wrap(err, "init near video") + } + + if err := m.initNearIMU(); err != nil { + return errors.Wrap(err, "init near imu") + } + + if err := m.initNearThermal(); err != nil { + return errors.Wrap(err, "init near thermal") + } + + if err := m.initNearDepth(); err != nil { + return errors.Wrap(err, "init near depth") + } + + return nil +} + +func (m *BindModule) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init near text") + } + + return nil +} + +func (m *BindModule) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + // TODO: proper config management + uri := os.Getenv("BIND_INFERENCE_API") + if uri == "" { + return errors.Errorf("required variable BIND_INFERENCE_API is not set") + } + + client := clients.New(uri, timeout, logger) + if err := client.WaitForStartup(ctx, 1*time.Second); err != nil { + return errors.Wrap(err, "init remote vectorizer") + } + + m.bindVectorizer = vectorizer.New(client) + m.textVectorizer = vectorizer.New(client) + m.metaClient = client + + return nil +} + +func (m *BindModule) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + return m.bindVectorizer.Object(ctx, obj, cfg) +} + +func (m *BindModule) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + ichek := vectorizer.NewClassSettings(cfg) + mediaProps, err := ichek.Properties() + return true, mediaProps, err +} + +func (m *BindModule) MetaInfo() (map[string]interface{}, error) { + return m.metaClient.MetaInfo() +} + +func (m *BindModule) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + return batch.VectorizeBatch(ctx, objs, skipObject, cfg, m.logger, m.bindVectorizer.Object) +} + +func (m *BindModule) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + return m.textVectorizer.Texts(ctx, []string{input}, cfg) +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.InputVectorizer[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/nearArguments.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/nearArguments.go new file mode 100644 index 0000000000000000000000000000000000000000..73810851163c6ad8dd0659c48531bff0f17771bc --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/nearArguments.go @@ -0,0 +1,122 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modbind + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearAudio" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearDepth" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearImage" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearImu" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearThermal" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearVideo" +) + +func (m *BindModule) initNearText() error { + m.nearTextSearcher = nearText.NewSearcher(m.textVectorizer) + m.nearTextGraphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *BindModule) initNearImage() error { + m.nearImageSearcher = nearImage.NewSearcher(m.bindVectorizer) + m.nearImageGraphqlProvider = nearImage.New() + return nil +} + +func (m *BindModule) initNearAudio() error { + m.nearAudioSearcher = nearAudio.NewSearcher(m.bindVectorizer) + m.nearAudioGraphqlProvider = nearAudio.New() + return nil +} + +func (m *BindModule) initNearVideo() error { + m.nearVideoSearcher = nearVideo.NewSearcher(m.bindVectorizer) + m.nearVideoGraphqlProvider = nearVideo.New() + return nil +} + +func (m *BindModule) initNearIMU() error { + m.nearIMUSearcher = nearImu.NewSearcher(m.bindVectorizer) + m.nearIMUGraphqlProvider = nearImu.New() + return nil +} + +func (m *BindModule) initNearThermal() error { + m.nearThermalSearcher = nearThermal.NewSearcher(m.bindVectorizer) + m.nearThermalGraphqlProvider = nearThermal.New() + return nil +} + +func (m *BindModule) initNearDepth() error { + m.nearDepthSearcher = nearDepth.NewSearcher(m.bindVectorizer) + m.nearDepthGraphqlProvider = nearDepth.New() + return nil +} + +func (m *BindModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + arguments := map[string]modulecapabilities.GraphQLArgument{} + for name, arg := range m.nearTextGraphqlProvider.Arguments() { + arguments[name] = arg + } + for name, arg := range m.nearImageGraphqlProvider.Arguments() { + arguments[name] = arg + } + for name, arg := range m.nearAudioGraphqlProvider.Arguments() { + arguments[name] = arg + } + for name, arg := range m.nearVideoGraphqlProvider.Arguments() { + arguments[name] = arg + } + for name, arg := range m.nearIMUGraphqlProvider.Arguments() { + arguments[name] = arg + } + for name, arg := range m.nearThermalGraphqlProvider.Arguments() { + arguments[name] = arg + } + for name, arg := range m.nearDepthGraphqlProvider.Arguments() { + arguments[name] = arg + } + return arguments +} + +func (m *BindModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + vectorSearches := map[string]modulecapabilities.VectorForParams[[]float32]{} + for name, arg := range m.nearTextSearcher.VectorSearches() { + vectorSearches[name] = arg + } + for name, arg := range m.nearImageSearcher.VectorSearches() { + vectorSearches[name] = arg + } + for name, arg := range m.nearAudioSearcher.VectorSearches() { + vectorSearches[name] = arg + } + for name, arg := range m.nearVideoSearcher.VectorSearches() { + vectorSearches[name] = arg + } + for name, arg := range m.nearIMUSearcher.VectorSearches() { + vectorSearches[name] = arg + } + for name, arg := range m.nearThermalSearcher.VectorSearches() { + vectorSearches[name] = arg + } + for name, arg := range m.nearDepthSearcher.VectorSearches() { + vectorSearches[name] = arg + } + return vectorSearches +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/vectorizer/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/vectorizer/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..be75aa2ab096bbc702a80085d0a256ecbf9c9e88 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/vectorizer/class_settings.go @@ -0,0 +1,298 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "fmt" + + "github.com/pkg/errors" + + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +type classSettings struct { + cfg moduletools.ClassConfig + base *basesettings.BaseClassSettings +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, base: basesettings.NewBaseClassSettings(cfg, false)} +} + +func (ic *classSettings) ImageField(property string) bool { + return ic.field("imageFields", property) +} + +func (ic *classSettings) ImageFieldsWeights() ([]float32, error) { + return ic.getFieldsWeights("image") +} + +func (ic *classSettings) TextField(property string) bool { + return ic.field("textFields", property) +} + +func (ic *classSettings) TextFieldsWeights() ([]float32, error) { + return ic.getFieldsWeights("text") +} + +func (ic *classSettings) AudioField(property string) bool { + return ic.field("audioFields", property) +} + +func (ic *classSettings) AudioFieldsWeights() ([]float32, error) { + return ic.getFieldsWeights("audio") +} + +func (ic *classSettings) VideoField(property string) bool { + return ic.field("videoFields", property) +} + +func (ic *classSettings) VideoFieldsWeights() ([]float32, error) { + return ic.getFieldsWeights("video") +} + +func (ic *classSettings) IMUField(property string) bool { + return ic.field("imuFields", property) +} + +func (ic *classSettings) IMUFieldsWeights() ([]float32, error) { + return ic.getFieldsWeights("imu") +} + +func (ic *classSettings) ThermalField(property string) bool { + return ic.field("thermalFields", property) +} + +func (ic *classSettings) ThermalFieldsWeights() ([]float32, error) { + return ic.getFieldsWeights("thermal") +} + +func (ic *classSettings) DepthField(property string) bool { + return ic.field("depthFields", property) +} + +func (ic *classSettings) DepthFieldsWeights() ([]float32, error) { + return ic.getFieldsWeights("depth") +} + +func (ic *classSettings) Properties() ([]string, error) { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return nil, errors.New("empty config") + } + props := make([]string, 0) + + fields := []string{"imageFields", "textFields", "audioFields", "videoFields", "imuFields", "thermalFields", "depthFields"} + + for _, field := range fields { + fields, ok := ic.cfg.Class()[field] + if !ok { + continue + } + + fieldsArray, ok := fields.([]interface{}) + if !ok { + return nil, errors.Errorf("%s must be an array", field) + } + + for _, value := range fieldsArray { + v, ok := value.(string) + if !ok { + return nil, errors.Errorf("%s must be a string", field) + } + props = append(props, v) + } + } + return props, nil +} + +func (ic *classSettings) field(name, property string) bool { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return false + } + + fields, ok := ic.cfg.Class()[name] + if !ok { + return false + } + + fieldsArray, ok := fields.([]interface{}) + if !ok { + return false + } + + fieldNames := make([]string, len(fieldsArray)) + for i, value := range fieldsArray { + fieldNames[i] = value.(string) + } + + for i := range fieldNames { + if fieldNames[i] == property { + return true + } + } + + return false +} + +func (ic *classSettings) Validate() error { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + + imageFields, imageFieldsOk := ic.cfg.Class()["imageFields"] + textFields, textFieldsOk := ic.cfg.Class()["textFields"] + audioFields, audioFieldsOk := ic.cfg.Class()["audioFields"] + videoFields, videoFieldsOk := ic.cfg.Class()["videoFields"] + imuFields, imuFieldsOk := ic.cfg.Class()["imuFields"] + thermalFields, thermalFieldsOk := ic.cfg.Class()["thermalFields"] + depthFields, depthFieldsOk := ic.cfg.Class()["depthFields"] + + if !imageFieldsOk && !textFieldsOk && !audioFieldsOk && !videoFieldsOk && + !imuFieldsOk && !thermalFieldsOk && !depthFieldsOk { + return errors.New("textFields or imageFields or audioFields or videoFields " + + "or imuFields or thermalFields or depthFields setting needs to be present") + } + + if imageFieldsOk { + if err := ic.validateWeightFieldCount("image", imageFields); err != nil { + return err + } + } + if textFieldsOk { + if err := ic.validateWeightFieldCount("text", textFields); err != nil { + return err + } + } + if audioFieldsOk { + if err := ic.validateWeightFieldCount("audio", audioFields); err != nil { + return err + } + } + if videoFieldsOk { + if err := ic.validateWeightFieldCount("video", videoFields); err != nil { + return err + } + } + if imuFieldsOk { + if err := ic.validateWeightFieldCount("imu", imuFields); err != nil { + return err + } + } + if thermalFieldsOk { + if err := ic.validateWeightFieldCount("thermal", thermalFields); err != nil { + return err + } + } + if depthFieldsOk { + if err := ic.validateWeightFieldCount("depth", depthFields); err != nil { + return err + } + } + + return nil +} + +func (ic *classSettings) validateWeightFieldCount(name string, fields interface{}) error { + imageFieldsCount, err := ic.validateFields(name, fields) + if err != nil { + return err + } + err = ic.validateWeights(name, imageFieldsCount) + if err != nil { + return err + } + return nil +} + +func (ic *classSettings) validateFields(name string, fields interface{}) (int, error) { + fieldsArray, ok := fields.([]interface{}) + if !ok { + return 0, errors.Errorf("%sFields must be an array", name) + } + + if len(fieldsArray) == 0 { + return 0, errors.Errorf("must contain at least one %s field name in %sFields", name, name) + } + + for _, value := range fieldsArray { + v, ok := value.(string) + if !ok { + return 0, errors.Errorf("%sField must be a string", name) + } + if len(v) == 0 { + return 0, errors.Errorf("%sField values cannot be empty", name) + } + } + + return len(fieldsArray), nil +} + +func (ic *classSettings) validateWeights(name string, count int) error { + weights, ok := ic.getWeights(name) + if ok { + if len(weights) != count { + return errors.Errorf("weights.%sFields does not equal number of %sFields", name, name) + } + _, err := ic.getWeightsArray(weights) + if err != nil { + return err + } + } + + return nil +} + +func (ic *classSettings) getWeights(name string) ([]interface{}, bool) { + weights, ok := ic.cfg.Class()["weights"] + if ok { + weightsObject, ok := weights.(map[string]interface{}) + if ok { + fieldWeights, ok := weightsObject[fmt.Sprintf("%sFields", name)] + if ok { + fieldWeightsArray, ok := fieldWeights.([]interface{}) + if ok { + return fieldWeightsArray, ok + } + } + } + } + + return nil, false +} + +func (ic *classSettings) getWeightsArray(weights []interface{}) ([]float32, error) { + weightsArray := make([]float32, len(weights)) + for i := range weights { + weight, err := ic.getNumber(weights[i]) + if err != nil { + return nil, err + } + weightsArray[i] = weight + } + return weightsArray, nil +} + +func (ic *classSettings) getFieldsWeights(name string) ([]float32, error) { + weights, ok := ic.getWeights(name) + if ok { + return ic.getWeightsArray(weights) + } + return nil, nil +} + +func (ic *classSettings) getNumber(in interface{}) (float32, error) { + return ic.base.GetNumber(in) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/vectorizer/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/vectorizer/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..59545efdbc395ba9040b6be2e6fbc892ba825b5a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/vectorizer/class_settings_test.go @@ -0,0 +1,226 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer_test + +import ( + "encoding/json" + "testing" + + "github.com/weaviate/weaviate/modules/multi2vec-bind/vectorizer" + + "github.com/weaviate/weaviate/entities/moduletools" +) + +func Test_classSettings_Validate(t *testing.T) { + type fields struct { + cfg moduletools.ClassConfig + } + tests := []struct { + name string + fields fields + wantErr bool + }{ + { + name: "should not pass with empty config", + wantErr: true, + }, + { + name: "should not pass with nil config", + fields: fields{ + cfg: nil, + }, + wantErr: true, + }, + { + name: "should not pass with nil imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", nil).build(), + }, + wantErr: true, + }, + { + name: "should not pass with fault imageFields value", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []string{}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with empty imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with empty string in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{""}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with int value in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{1.0}).build(), + }, + wantErr: true, + }, + { + name: "should pass with proper value in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{"field"}).build(), + }, + }, + { + name: "should pass with proper value in imageFields and textFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("imageFields", []interface{}{"imageField"}). + addSetting("textFields", []interface{}{"textField"}). + build(), + }, + }, + { + name: "should pass with proper value in 2 imageFields and 2 textFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1", "imageField2"}). + build(), + }, + }, + { + name: "should pass with proper value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1", "imageField2"}). + addWeights([]interface{}{1, 2}, []interface{}{1, 2}, nil, nil, nil, nil, nil). + build(), + }, + }, + { + name: "should pass with proper value in 1 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, 2}, []interface{}{1}, nil, nil, nil, nil, nil). + build(), + }, + }, + { + name: "should pass with proper value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, 2}, []interface{}{1}, nil, nil, nil, nil, nil). + build(), + }, + }, + { + name: "should not pass with proper value in 1 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1}, []interface{}{1}, nil, nil, nil, nil, nil). + build(), + }, + wantErr: true, + }, + { + name: "should not pass with not proper weight value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, "aaaa"}, []interface{}{1}, nil, nil, nil, nil, nil). + build(), + }, + wantErr: true, + }, + { + name: "should not pass with not proper weight value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{json.Number("1"), json.Number("2")}, []interface{}{json.Number("3")}, nil, nil, nil, nil, nil). + build(), + }, + }, + { + name: "should pass with proper values in all fields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addSetting("audioFields", []interface{}{"audioField1"}). + addSetting("videoFields", []interface{}{"videoField1"}). + addSetting("imuFields", []interface{}{"imuField1"}). + addSetting("thermalFields", []interface{}{"thermalField1"}). + addSetting("depthFields", []interface{}{"depthField1", "depthField2"}). + build(), + }, + }, + { + name: "should pass with proper values in all fields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addSetting("audioFields", []interface{}{"audioField1"}). + addSetting("videoFields", []interface{}{"videoField1"}). + addSetting("imuFields", []interface{}{"imuField1"}). + addSetting("thermalFields", []interface{}{"thermalField1"}). + addSetting("depthFields", []interface{}{"depthField1", "depthField2"}). + addWeights([]interface{}{1, 2}, []interface{}{1}, []interface{}{1}, []interface{}{1}, []interface{}{1}, []interface{}{1}, []interface{}{1, 2}). + build(), + }, + }, + { + name: "should pass with proper values audio, video, imu, thermal and depth fields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("audioFields", []interface{}{"audioField1", "audioField2"}). + addSetting("videoFields", []interface{}{"videoField1"}). + addSetting("imuFields", []interface{}{"imuField1"}). + addSetting("thermalFields", []interface{}{"thermalField1"}). + addSetting("depthFields", []interface{}{"depthField1", "depthField2"}). + addWeights(nil, nil, []interface{}{1, 2}, []interface{}{1}, []interface{}{1}, []interface{}{1}, []interface{}{1, 2}). + build(), + }, + }, + { + name: "should not pass with thermal and depth fields and not proper weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("thermalFields", []interface{}{"thermalField1"}). + addSetting("depthFields", []interface{}{"depthField1", "depthField2"}). + addWeights(nil, nil, nil, nil, nil, []interface{}{1, 100}, []interface{}{1, 2}). + build(), + }, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := vectorizer.NewClassSettings(tt.fields.cfg) + if err := ic.Validate(); (err != nil) != tt.wantErr { + t.Errorf("classSettings.Validate() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/vectorizer/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/vectorizer/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..261e64ca997a0cb94974f5f305b89a65aeecf96a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/vectorizer/fakes_for_test.go @@ -0,0 +1,114 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer_test + +import ( + "context" + + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/multi2vec-bind/ent" + "github.com/weaviate/weaviate/usecases/config" +) + +type builder struct { + fakeClassConfig *fakeClassConfig +} + +func newConfigBuilder() *builder { + return &builder{ + fakeClassConfig: &fakeClassConfig{config: map[string]interface{}{}}, + } +} + +func (b *builder) addSetting(name string, value interface{}) *builder { + b.fakeClassConfig.config[name] = value + return b +} + +func (b *builder) addWeights(textWeights, imageWeights, audioWeights, + videoWeights, imuWeights, thermalWeights, depthWeights []interface{}, +) *builder { + weightSettings := map[string]interface{}{} + if textWeights != nil { + weightSettings["textFields"] = textWeights + } + if imageWeights != nil { + weightSettings["imageFields"] = imageWeights + } + if audioWeights != nil { + weightSettings["audioFields"] = audioWeights + } + if videoWeights != nil { + weightSettings["videoFields"] = videoWeights + } + if imuWeights != nil { + weightSettings["imuFields"] = imuWeights + } + if thermalWeights != nil { + weightSettings["thermalFields"] = thermalWeights + } + if depthWeights != nil { + weightSettings["depthFields"] = depthWeights + } + if len(weightSettings) > 0 { + b.fakeClassConfig.config["weights"] = weightSettings + } + return b +} + +func (b *builder) build() *fakeClassConfig { + return b.fakeClassConfig +} + +type fakeClassConfig struct { + config map[string]interface{} +} + +func (c fakeClassConfig) Class() map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) Property(propName string) map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} + +type fakeClient struct{} + +func (c *fakeClient) Vectorize(ctx context.Context, + texts, images, audio, video, imu, thermal, depth []string, +) (*ent.VectorizationResult, error) { + result := &ent.VectorizationResult{ + TextVectors: [][]float32{{1.0, 2.0, 3.0, 4.0, 5.0}}, + ImageVectors: [][]float32{{10.0, 20.0, 30.0, 40.0, 50.0}}, + } + return result, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/vectorizer/texts.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/vectorizer/texts.go new file mode 100644 index 0000000000000000000000000000000000000000..dfe49f5030433b22f543b36fae3d1dc2e74b30e6 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/vectorizer/texts.go @@ -0,0 +1,33 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/moduletools" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +func (v *Vectorizer) Texts(ctx context.Context, inputs []string, + cfg moduletools.ClassConfig, +) ([]float32, error) { + res, err := v.client.Vectorize(ctx, inputs, []string{}, []string{}, []string{}, []string{}, []string{}, []string{}) + if err != nil { + return nil, errors.Wrap(err, "remote client vectorize") + } + if len(inputs) != len(res.TextVectors) { + return nil, errors.New("inputs are not equal to vectors returned") + } + return libvectorizer.CombineVectors(res.TextVectors), nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/vectorizer/vectorizer.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/vectorizer/vectorizer.go new file mode 100644 index 0000000000000000000000000000000000000000..f8af70b53ae7801c81cfd217b8722e02ce38a042 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/vectorizer/vectorizer.go @@ -0,0 +1,229 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/pkg/errors" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/multi2vec-bind/ent" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +type Vectorizer struct { + client Client +} + +func New(client Client) *Vectorizer { + return &Vectorizer{ + client: client, + } +} + +type Client interface { + Vectorize(ctx context.Context, + texts, images, audio, video, imu, thermal, depth []string, + ) (*ent.VectorizationResult, error) +} + +type ClassSettings interface { + ImageField(property string) bool + ImageFieldsWeights() ([]float32, error) + TextField(property string) bool + TextFieldsWeights() ([]float32, error) + AudioField(property string) bool + AudioFieldsWeights() ([]float32, error) + VideoField(property string) bool + VideoFieldsWeights() ([]float32, error) + IMUField(property string) bool + IMUFieldsWeights() ([]float32, error) + ThermalField(property string) bool + ThermalFieldsWeights() ([]float32, error) + DepthField(property string) bool + DepthFieldsWeights() ([]float32, error) + Properties() ([]string, error) +} + +func (v *Vectorizer) Object(ctx context.Context, object *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + vec, err := v.object(ctx, object, cfg) + return vec, nil, err +} + +func (v *Vectorizer) VectorizeImage(ctx context.Context, id, image string, cfg moduletools.ClassConfig) ([]float32, error) { + res, err := v.client.Vectorize(ctx, nil, []string{image}, nil, nil, nil, nil, nil) + if err != nil { + return nil, err + } + return v.getVector(res.ImageVectors) +} + +func (v *Vectorizer) VectorizeAudio(ctx context.Context, audio string, cfg moduletools.ClassConfig) ([]float32, error) { + res, err := v.client.Vectorize(ctx, nil, nil, []string{audio}, nil, nil, nil, nil) + if err != nil { + return nil, err + } + return v.getVector(res.AudioVectors) +} + +func (v *Vectorizer) VectorizeVideo(ctx context.Context, video string, cfg moduletools.ClassConfig) ([]float32, error) { + res, err := v.client.Vectorize(ctx, nil, nil, nil, []string{video}, nil, nil, nil) + if err != nil { + return nil, err + } + return v.getVector(res.VideoVectors) +} + +func (v *Vectorizer) VectorizeIMU(ctx context.Context, imu string, cfg moduletools.ClassConfig) ([]float32, error) { + res, err := v.client.Vectorize(ctx, nil, nil, nil, nil, []string{imu}, nil, nil) + if err != nil { + return nil, err + } + return v.getVector(res.IMUVectors) +} + +func (v *Vectorizer) VectorizeThermal(ctx context.Context, thermal string, cfg moduletools.ClassConfig) ([]float32, error) { + res, err := v.client.Vectorize(ctx, nil, nil, nil, nil, nil, []string{thermal}, nil) + if err != nil { + return nil, err + } + return v.getVector(res.ThermalVectors) +} + +func (v *Vectorizer) VectorizeDepth(ctx context.Context, depth string, cfg moduletools.ClassConfig) ([]float32, error) { + res, err := v.client.Vectorize(ctx, nil, nil, nil, nil, nil, nil, []string{depth}) + if err != nil { + return nil, err + } + return v.getVector(res.DepthVectors) +} + +func (v *Vectorizer) getVector(vectors [][]float32) ([]float32, error) { + if len(vectors) != 1 { + return nil, errors.New("empty vector") + } + return vectors[0], nil +} + +func (v *Vectorizer) object(ctx context.Context, object *models.Object, cfg moduletools.ClassConfig, +) ([]float32, error) { + icheck := NewClassSettings(cfg) + + // vectorize image and text + var texts, images, audio, video, imu, thermal, depth []string + + if object.Properties != nil { + schemamap := object.Properties.(map[string]interface{}) + for _, propName := range moduletools.SortStringKeys(schemamap) { + switch typed := schemamap[propName].(type) { + case string: + if icheck.ImageField(propName) { + images = append(images, typed) + } + if icheck.TextField(propName) { + texts = append(texts, typed) + } + if icheck.AudioField(propName) { + audio = append(audio, typed) + } + if icheck.VideoField(propName) { + video = append(video, typed) + } + if icheck.IMUField(propName) { + imu = append(imu, typed) + } + if icheck.ThermalField(propName) { + thermal = append(thermal, typed) + } + if icheck.DepthField(propName) { + depth = append(depth, typed) + } + + case []string: + if icheck.TextField(propName) { + texts = append(texts, typed...) + } + + default: + } + } + } + + vectors := [][]float32{} + if len(texts) > 0 || len(images) > 0 || len(audio) > 0 || len(video) > 0 || + len(imu) > 0 || len(thermal) > 0 || len(depth) > 0 { + res, err := v.client.Vectorize(ctx, texts, images, audio, video, imu, thermal, depth) + if err != nil { + return nil, err + } + vectors = append(vectors, res.TextVectors...) + vectors = append(vectors, res.ImageVectors...) + vectors = append(vectors, res.AudioVectors...) + vectors = append(vectors, res.VideoVectors...) + vectors = append(vectors, res.IMUVectors...) + vectors = append(vectors, res.ThermalVectors...) + vectors = append(vectors, res.DepthVectors...) + } + weights, err := v.getWeights(icheck) + if err != nil { + return nil, err + } + + return libvectorizer.CombineVectorsWithWeights(vectors, weights), nil +} + +func (v *Vectorizer) getWeights(ichek ClassSettings) ([]float32, error) { + weights := []float32{} + textFieldsWeights, err := ichek.TextFieldsWeights() + if err != nil { + return nil, err + } + imageFieldsWeights, err := ichek.ImageFieldsWeights() + if err != nil { + return nil, err + } + audioFieldsWeights, err := ichek.AudioFieldsWeights() + if err != nil { + return nil, err + } + videoFieldsWeights, err := ichek.VideoFieldsWeights() + if err != nil { + return nil, err + } + imuFieldsWeights, err := ichek.IMUFieldsWeights() + if err != nil { + return nil, err + } + thermalFieldsWeights, err := ichek.ThermalFieldsWeights() + if err != nil { + return nil, err + } + depthFieldsWeights, err := ichek.DepthFieldsWeights() + if err != nil { + return nil, err + } + + weights = append(weights, textFieldsWeights...) + weights = append(weights, imageFieldsWeights...) + weights = append(weights, audioFieldsWeights...) + weights = append(weights, videoFieldsWeights...) + weights = append(weights, imuFieldsWeights...) + weights = append(weights, thermalFieldsWeights...) + weights = append(weights, depthFieldsWeights...) + + normalizedWeights := moduletools.NormalizeWeights(weights) + + return normalizedWeights, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/vectorizer/vectorizer_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/vectorizer/vectorizer_test.go new file mode 100644 index 0000000000000000000000000000000000000000..116513c8f503102e25b1ff1b4d1757e560c088f8 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-bind/vectorizer/vectorizer_test.go @@ -0,0 +1,116 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer_test + +import ( + "context" + "testing" + + "github.com/weaviate/weaviate/modules/multi2vec-bind/vectorizer" + + "github.com/weaviate/weaviate/entities/moduletools" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" +) + +const image = "iVBORw0KGgoAAAANSUhEUgAAAGAAAAA/CAYAAAAfQM0aAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyRpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDpCRjQ5NEM3RDI5QTkxMUUyOTc1NENCMzI4N0QwNDNCOSIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDpCRjQ5NEM3RTI5QTkxMUUyOTc1NENCMzI4N0QwNDNCOSI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOkJGNDk0QzdCMjlBOTExRTI5NzU0Q0IzMjg3RDA0M0I5IiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOkJGNDk0QzdDMjlBOTExRTI5NzU0Q0IzMjg3RDA0M0I5Ii8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+WeGRxAAAB2hJREFUeNrUXFtslUUQ3hJCoQVEKy0k1qQgrRg0vaAJaq1tvJSgaLy8mKDF2IvxBY2Bgm8+iIoxvhB72tTUmKgPigbFKCEtxeKD9hZjAi3GJrYJtqRai7TQB+pMz/zwU/5zzsxe2u4kXwiwZ+bb/Xb/s7v/zEmrra1VTFsFeBRQCtgEuBWwkv5vHPAn4DdAB+B7wBjXcUNDQ8o2dXV1SmDzyhUtLS3tBPyxC9CdrN1ihi/swKuA7YD0BG1uJhQDngdcAnwDeJ86Ole2kLii+J2AFsA+wF9RjRalmEUHaZY8m6RDUYZtn6HPHiRfLm2hck0D7AScAdRH8UokwD2AnwA7UoiUyhaRD/S12dHg+8B1OWA/4BTgqVQCPEJL8haLBNDXEfJt03ziipYH+BJwHFAYJcAWwCeAZQ6CLyPfWyz584nrbCuj74eHwgKsddih2R1ba+jHJ65R1k6PuWNhAd4DZM/BTiWbdhwm5hPXsA0AngY8COgNP4JwSTyu4zE/P18VFhZKP7aNYuouXxFX5Ic8Nc2Ea2D/AfYCNgIORZ0DdusOfnFxcXDwUD09PZKP76alKDUR16KiIlVQUHDl7/39/Uozpg7Xac45YB0dGrQHHw07KVwJpRRbYiKuyCc8+MhXcyXocP2RnvMvJhr8QIBK08EPbGJiQuqq0mX7KD4GIohi4xVPTU0N6/BRamPwu7u7dZb3/RozkW3IB3lZEkGHayeI8FFVVdWaZAIUcD2Wl5fbHHy024XtC6QBkomA/XHIFb8X0Xamp6efASHqt27dGnkVkcNxVlFRoXJycmwOvuLGNmifVATsD/bLZezgKgKE2J+bm3sKHk3XXUWs4Mz87Oxs24OvOLEN26cUAfvFXAkrlKGBCDNXEbAajldXV1+5ijjP+KCrg855x+3nk2uy8SwDdIIIM1cRI6k+0NraqkZGRmzuKAIbFrYf0Q2UaPOA/Wpra3PBNfHhYHq6HbC5qanpGB7ETgPWc0TApTr7eyDolOaj6LRG+/W2Bn94eJg7+DpcowZ+AGb+642NjYfC3wEdXAdI1uK2Du2ksH2HrcHHfggGX4frNVcRMPh7BwcHN8ZiseuuIr4DvKXib29YX2bhmW+wEqYptsREXC2eWXS44oyfuYqYmpra19LSEnkaRgEG6Nj8gGRHESVCRkaG9Kg+IOyTiGtmZqatnZsOV/zMLnjcsF7KH5AIECVCX1+f6u3tlbg4oLmc2VyDy8HgPshg2yzmCo8aFsdAALzpw9dw23REwJkvHPwjSu92UcwVRcAnAd4LaQ6+CVe2AGivAe5WwhcdGp0aoVgmJuIqnBy2uSa18Buxs4AXAJMO401SjLOGfnziyhYg2GrtcNSxSfJ90pI/n7iyBUA7quKv/IYsxhmiZ/ZRy/x94soWAO1nwL0qnhVw2cD/ZfKBvjod9cEnrmwB0DBh9RUVfxHxhYrnUHLtEn2mlHyMOe6HT1wT7oISGSas4ntNzJmsVFczjnMBN1CbfwGD1BYPID8A/lFzbz5xZQsQnmWfExa6ecNVIsBKWuIlgA0qnjG2PLhsou0aZgF3qfil2fg89ssbrhwBNtB+GN/dLUnQ5kbCHYAnAFMAvGpsoY7OlS0krmOhxx7WLHwAeBLwVahN2uIUswgrPB5T8rRv7DxWqDwM+JaCjzue8b5wZe2C7gJ8quKVJqY599vJ1yZHffCJK0uA+wAfAtZYjIO+Gsi3TfOJK0sAfFP/jpKV+HBtKfkutOTPJ64sAVYD3qXgrmwpxVht6McnrmwBMAP4pjlYdRij3tCHT1xZAuDdermOA836gDKKqWNirob1ASZc2eeAl3QH36A+AGP+ohFWxNVSfYAuV9YKyKUTo/bgo2nUB5RQbImJuFqsD9DhyhbAuDgjMI36gFKX7S3XB5S6egSV2Bh8zYyDYjr4SGYi2yzmMIm5YnFGkFOLSQGNjY3X/BtaLBabWQF5XKcO6gOkZT950gAW6wPWuXoEZXEaOqoPyHLcPqkIwvqALFcCZHJmvqP6gEzH7VOKIKgPyHQlwIVUjRzWB1xw3H4+ubIFGE3VyGF9wKjj9ik3D4L6gFFXArCSTlEEzKe3LMIfwvYDNgcf+4P9csSVLUAXt7GD+oBuYfsuW4OvUR/Q7UoA/G2zaRvbOqEI0xRbYiKulusDTrgSYEg6sxKJIKwP6FLyjDYRV4v1ATpc2QKgNZtu6zTqA5o1ObM/h5eDyMvCtrlZObLgNhRv+jAHvkwqQjDzhYPfrvRvF0VcLdQHaHGNxWKrZv0d//hahcqr8Ccww1kRbwPuVMIXHRqd+ptimZiIq0F9gA2urEcQ2jkVf/tz0WG8ixTjnKEfn7iyBQi2WnuULLlV0qE9FrdzPnFlC4CGRQkvqyQ/MqRh6KtO2S948IkrWwC0XwHPAQ4r85z7w+TL1U8Y+8Q14S4oyjA9703AZ4AqFX8RvoTpN8i3/Bi/p+egHz5xZQsQGCasvqGuZhzj76DdpuIZx8FPuOAviWDG8e8qXl0yXxnHPnGdsf8FGAByGwC02iMZswAAAABJRU5ErkJggg==" + +func TestVectorizer(t *testing.T) { + t.Run("should vectorize image", func(t *testing.T) { + // given + client := &fakeClient{} + vectorizer := vectorizer.New(client) + config := newConfigBuilder().addSetting("imageFields", []interface{}{"image"}).build() + + props := map[string]interface{}{ + "image": image, + } + object := &models.Object{ + ID: "some-uuid", + Properties: props, + } + // when + vector, _, err := vectorizer.Object(context.Background(), object, config) + + // then + require.Nil(t, err) + assert.NotNil(t, vector) + }) + + t.Run("should vectorize 2 image fields", func(t *testing.T) { + // given + client := &fakeClient{} + vectorizer := vectorizer.New(client) + config := newConfigBuilder().addSetting("imageFields", []interface{}{"image1", "image2"}).build() + + props := map[string]interface{}{ + "image1": image, + "image2": image, + } + object := &models.Object{ + ID: "some-uuid", + Properties: props, + } + + // when + vector, _, err := vectorizer.Object(context.Background(), object, config) + + // then + require.Nil(t, err) + assert.NotNil(t, vector) + }) +} + +func TestVectorizer_normalizeWeights(t *testing.T) { + tests := []struct { + name string + weights []float32 + }{ + { + name: "normalize example 1", + weights: []float32{200, 100, 0.1}, + }, + { + name: "normalize example 2", + weights: []float32{300.22, 0.7, 17, 54}, + }, + { + name: "normalize example 3", + weights: []float32{300, 0.02, 17}, + }, + { + name: "normalize example 4", + weights: []float32{500, 0.02, 17.4, 180}, + }, + { + name: "normalize example 5", + weights: []float32{500, 0.02, 17.4, 2, 4, 5, .88}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := moduletools.NormalizeWeights(tt.weights); !checkNormalization(got) { + t.Errorf("Vectorizer.normalizeWeights() = %v, want %v", got, 1.0) + } + }) + } +} + +func checkNormalization(weights []float32) bool { + var result float32 + for i := range weights { + result += weights[i] + } + return result == 1.0 +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..27787999fc33a788e4f6d5725bd882e3cfde7d37 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/clients/meta.go @@ -0,0 +1,45 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + + "github.com/pkg/errors" +) + +func (v *vectorizer) MetaInfo() (map[string]interface{}, error) { + req, err := http.NewRequestWithContext(context.Background(), "GET", v.url("/meta", ""), nil) + if err != nil { + return nil, errors.Wrap(err, "create GET meta request") + } + + res, err := v.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send GET meta request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read meta response body") + } + + var resBody map[string]interface{} + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, "unmarshal meta response body") + } + return resBody, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/clients/meta_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/clients/meta_test.go new file mode 100644 index 0000000000000000000000000000000000000000..4efc11302b1f11c21b8ad16cc9b3a4d165d6a174 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/clients/meta_test.go @@ -0,0 +1,135 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestGetMeta(t *testing.T) { + t.Run("when the server is providing meta", func(t *testing.T) { + server := httptest.NewServer(&testMetaHandler{t: t}) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + meta, err := c.MetaInfo() + + assert.Nil(t, err) + assert.NotNil(t, meta) + metaModel := meta["model"] + assert.True(t, metaModel != nil) + model, modelOK := metaModel.(map[string]interface{}) + assert.True(t, modelOK) + assert.True(t, model["_name_or_path"] != nil) + assert.True(t, model["architectures"] != nil) + modelID2label, modelID2labelOK := model["id2label"].(map[string]interface{}) + assert.True(t, modelID2labelOK) + assert.True(t, modelID2label["0"] != nil) + }) +} + +type testMetaHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testMetaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/meta", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.Write([]byte(f.metaInfo())) +} + +func (f *testMetaHandler) metaInfo() string { + return `{ + "model": { + "_name_or_path": "clip", + "add_cross_attention": false, + "architectures": [ + "BertForQuestionAnswering" + ], + "attention_probs_dropout_prob": 0.1, + "bad_words_ids": null, + "bos_token_id": null, + "chunk_size_feed_forward": 0, + "decoder_start_token_id": null, + "diversity_penalty": 0.0, + "do_sample": false, + "early_stopping": false, + "encoder_no_repeat_ngram_size": 0, + "eos_token_id": null, + "finetuning_task": null, + "gradient_checkpointing": false, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 1024, + "id2label": { + "0": "LABEL_0", + "1": "LABEL_1" + }, + "initializer_range": 0.02, + "intermediate_size": 4096, + "is_decoder": false, + "is_encoder_decoder": false, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1 + }, + "layer_norm_eps": 1e-12, + "length_penalty": 1.0, + "max_length": 20, + "max_position_embeddings": 512, + "min_length": 0, + "model_type": "bert", + "no_repeat_ngram_size": 0, + "num_attention_heads": 16, + "num_beam_groups": 1, + "num_beams": 1, + "num_hidden_layers": 24, + "num_return_sequences": 1, + "output_attentions": false, + "output_hidden_states": false, + "output_scores": false, + "pad_token_id": 0, + "position_embedding_type": "absolute", + "prefix": null, + "pruned_heads": {}, + "repetition_penalty": 1.0, + "return_dict": true, + "return_dict_in_generate": false, + "sep_token_id": null, + "task_specific_params": null, + "temperature": 1.0, + "tie_encoder_decoder": false, + "tie_word_embeddings": true, + "tokenizer_class": null, + "top_k": 50, + "top_p": 1.0, + "torchscript": false, + "transformers_version": "4.3.2", + "type_vocab_size": 2, + "use_bfloat16": false, + "use_cache": true, + "vocab_size": 30522, + "xla_device": null + } +}` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/clients/startup.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/clients/startup.go new file mode 100644 index 0000000000000000000000000000000000000000..166af5826f306d1ba26f7f2aaccdfd2659b53353 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/clients/startup.go @@ -0,0 +1,68 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "net/http" + "time" + + "github.com/pkg/errors" +) + +func (v *vectorizer) WaitForStartup(initCtx context.Context, + interval time.Duration, +) error { + t := time.NewTicker(interval) + defer t.Stop() + expired := initCtx.Done() + var lastErr error + for { + select { + case <-t.C: + lastErr = v.checkReady(initCtx) + if lastErr == nil { + return nil + } + v.logger. + WithField("action", "multi2vec_remote_wait_for_startup"). + WithError(lastErr).Warnf("multi2vec-clip inference service not ready") + case <-expired: + return errors.Wrapf(lastErr, "init context expired before remote was ready") + } + } +} + +func (v *vectorizer) checkReady(initCtx context.Context) error { + // spawn a new context (derived on the overall context) which is used to + // consider an individual request timed out + requestCtx, cancel := context.WithTimeout(initCtx, 500*time.Millisecond) + defer cancel() + + req, err := http.NewRequestWithContext(requestCtx, http.MethodGet, + v.url("/.well-known/ready", ""), nil) + if err != nil { + return errors.Wrap(err, "create check ready request") + } + + res, err := v.httpClient.Do(req) + if err != nil { + return errors.Wrap(err, "send check ready request") + } + + defer res.Body.Close() + if res.StatusCode > 299 { + return errors.Errorf("not ready: status %d", res.StatusCode) + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/clients/startup_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/clients/startup_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ff65558b1e87f7f143c6c43afdaf422dd3829c34 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/clients/startup_test.go @@ -0,0 +1,98 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestWaitForStartup(t *testing.T) { + t.Run("when the server is immediately ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{t: t}) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + err := c.WaitForStartup(context.Background(), 50*time.Millisecond) + + assert.Nil(t, err) + }) + + t.Run("when the server is down", func(t *testing.T) { + c := New("http://nothing-running-at-this-url", 0, nullLogger()) + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 150*time.Millisecond) + + require.NotNil(t, err, nullLogger()) + assert.Contains(t, err.Error(), "expired before remote was ready") + }) + + t.Run("when the server is alive, but not ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(1 * time.Minute), + }) + c := New(server.URL, 0, nullLogger()) + defer server.Close() + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 50*time.Millisecond) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "expired before remote was ready") + }) + + t.Run("when the server is initially not ready, but then becomes ready", + func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(100 * time.Millisecond), + }) + c := New(server.URL, 0, nullLogger()) + defer server.Close() + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 50*time.Millisecond) + + require.Nil(t, err) + }) +} + +type testReadyHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testReadyHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/.well-known/ready", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.WriteHeader(http.StatusNoContent) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/clients/vectorizer.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/clients/vectorizer.go new file mode 100644 index 0000000000000000000000000000000000000000..f4d5703922897ec8cb0b88b9376d23effd142dcf --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/clients/vectorizer.go @@ -0,0 +1,106 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/multi2vec-clip/ent" +) + +type vectorizer struct { + origin string + httpClient *http.Client + logger logrus.FieldLogger +} + +func New(origin string, timeout time.Duration, logger logrus.FieldLogger) *vectorizer { + return &vectorizer{ + origin: origin, + httpClient: &http.Client{ + Timeout: timeout, + }, + logger: logger, + } +} + +func (v *vectorizer) Vectorize(ctx context.Context, + texts, images []string, config ent.VectorizationConfig, +) (*ent.VectorizationResult, error) { + body, err := json.Marshal(vecRequest{ + Texts: texts, + Images: images, + }) + if err != nil { + return nil, errors.Wrapf(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", v.url("/vectorize", config.InferenceURL), + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + + req.Header.Set("Content-Type", "application/json") + + res, err := v.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + var resBody vecResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if res.StatusCode > 399 { + return nil, errors.Errorf("fail with status %d: %s", res.StatusCode, + resBody.Error) + } + + return &ent.VectorizationResult{ + TextVectors: resBody.TextVectors, + ImageVectors: resBody.ImageVectors, + }, nil +} + +func (v *vectorizer) url(path string, inferenceURL string) string { + if inferenceURL != "" { + return fmt.Sprintf("%s%s", inferenceURL, path) + } + return fmt.Sprintf("%s%s", v.origin, path) +} + +type vecRequest struct { + Texts []string `json:"texts,omitempty"` + Images []string `json:"images,omitempty"` +} + +type vecResponse struct { + TextVectors [][]float32 `json:"textVectors"` + ImageVectors [][]float32 `json:"imageVectors"` + Error string `json:"error"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/clients/vectorizer_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/clients/vectorizer_test.go new file mode 100644 index 0000000000000000000000000000000000000000..933acd46bc6d30f424b411d6adea8c97485da069 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/clients/vectorizer_test.go @@ -0,0 +1,87 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/modules/multi2vec-clip/ent" +) + +func TestVectorize(t *testing.T) { + t.Run("when the response is successful", func(t *testing.T) { + server := httptest.NewServer(&testVectorizeHandler{ + t: t, + res: vecResponse{ + TextVectors: [][]float32{ + {0, 1, 2}, + }, + ImageVectors: [][]float32{ + {1, 2, 3}, + }, + }, + }) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + res, err := c.Vectorize(context.Background(), []string{"hello"}, + []string{"image-encoding"}, ent.VectorizationConfig{}) + + assert.Nil(t, err) + assert.Equal(t, &ent.VectorizationResult{ + TextVectors: [][]float32{ + {0, 1, 2}, + }, + ImageVectors: [][]float32{ + {1, 2, 3}, + }, + }, res) + }) + + t.Run("when the server has a an error", func(t *testing.T) { + server := httptest.NewServer(&testVectorizeHandler{ + t: t, + res: vecResponse{ + Error: "some error from the server", + }, + }) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + _, err := c.Vectorize(context.Background(), []string{"hello"}, + []string{"image-encoding"}, ent.VectorizationConfig{}) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "some error from the server") + }) +} + +type testVectorizeHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + res vecResponse +} + +func (f *testVectorizeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/vectorize", r.URL.String()) + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.res.Error != "" { + w.WriteHeader(500) + } + jsonBytes, _ := json.Marshal(f.res) + w.Write(jsonBytes) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/config.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/config.go new file mode 100644 index 0000000000000000000000000000000000000000..5b32854409b95095765d0885cbe8fba1edeca419 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/config.go @@ -0,0 +1,41 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modclip + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/multi2vec-clip/vectorizer" +) + +func (m *ClipModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *ClipModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *ClipModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + icheck := vectorizer.NewClassSettings(cfg) + return icheck.Validate() +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/ent/vectorization_config.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/ent/vectorization_config.go new file mode 100644 index 0000000000000000000000000000000000000000..eac68232c5fed64054700f259e075bc12cdb0de9 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/ent/vectorization_config.go @@ -0,0 +1,16 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type VectorizationConfig struct { + InferenceURL string +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/ent/vectorization_result.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/ent/vectorization_result.go new file mode 100644 index 0000000000000000000000000000000000000000..bb05ebd17c90b4f0b3a2f607027bc54ec373f71c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/ent/vectorization_result.go @@ -0,0 +1,22 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type VectorizationResult struct { + TextVectors [][]float32 + ImageVectors [][]float32 + AudioVectors [][]float32 + VideoVectors [][]float32 + IMUVectors [][]float32 + ThermalVectors [][]float32 + DepthVectors [][]float32 +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/module.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/module.go new file mode 100644 index 0000000000000000000000000000000000000000..986e73aee3cd3a83b7fdea65ea0dab90b02df40f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/module.go @@ -0,0 +1,163 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modclip + +import ( + "context" + "os" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + entcfg "github.com/weaviate/weaviate/entities/config" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/multi2vec-clip/clients" + "github.com/weaviate/weaviate/modules/multi2vec-clip/vectorizer" +) + +const Name = "multi2vec-clip" + +func New() *ClipModule { + return &ClipModule{} +} + +type ClipModule struct { + imageVectorizer imageVectorizer + nearImageGraphqlProvider modulecapabilities.GraphQLArguments + nearImageSearcher modulecapabilities.Searcher[[]float32] + textVectorizer textVectorizer + nearTextGraphqlProvider modulecapabilities.GraphQLArguments + nearTextSearcher modulecapabilities.Searcher[[]float32] + nearTextTransformer modulecapabilities.TextTransform + metaClient metaClient + logger logrus.FieldLogger +} + +type metaClient interface { + MetaInfo() (map[string]interface{}, error) +} + +type imageVectorizer interface { + Object(ctx context.Context, obj *models.Object, cfg moduletools.ClassConfig) ([]float32, models.AdditionalProperties, error) + VectorizeImage(ctx context.Context, id, image string, cfg moduletools.ClassConfig) ([]float32, error) +} + +type textVectorizer interface { + Texts(ctx context.Context, input []string, + cfg moduletools.ClassConfig) ([]float32, error) +} + +func (m *ClipModule) Name() string { + return Name +} + +func (m *ClipModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Multi2Vec +} + +func (m *ClipModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initNearImage(); err != nil { + return errors.Wrap(err, "init near text") + } + + return nil +} + +func (m *ClipModule) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init near text") + } + + return nil +} + +func (m *ClipModule) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + uri := os.Getenv("CLIP_INFERENCE_API") + if uri == "" { + return errors.Errorf("required variable CLIP_INFERENCE_API is not set") + } + + waitForStartup := true + if envWaitForStartup := os.Getenv("CLIP_WAIT_FOR_STARTUP"); envWaitForStartup != "" { + waitForStartup = entcfg.Enabled(envWaitForStartup) + } + + client := clients.New(uri, timeout, logger) + if waitForStartup { + if err := client.WaitForStartup(ctx, 1*time.Second); err != nil { + return errors.Wrap(err, "init remote vectorizer") + } + } + + m.imageVectorizer = vectorizer.New(client) + m.textVectorizer = vectorizer.New(client) + m.metaClient = client + + return nil +} + +func (m *ClipModule) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + return m.imageVectorizer.Object(ctx, obj, cfg) +} + +func (m *ClipModule) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + return batch.VectorizeBatch(ctx, objs, skipObject, cfg, m.logger, m.imageVectorizer.Object) +} + +func (m *ClipModule) MetaInfo() (map[string]interface{}, error) { + return m.metaClient.MetaInfo() +} + +func (m *ClipModule) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + return m.textVectorizer.Texts(ctx, []string{input}, cfg) +} + +func (m *ClipModule) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + ichek := vectorizer.NewClassSettings(cfg) + mediaProps, err := ichek.Properties() + return false, mediaProps, err +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.InputVectorizer[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/nearArguments.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/nearArguments.go new file mode 100644 index 0000000000000000000000000000000000000000..f7514ffc3e92efbcd44cb9bdcb4b98ec39d10f0a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/nearArguments.go @@ -0,0 +1,57 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modclip + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearImage" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +func (m *ClipModule) initNearImage() error { + m.nearImageSearcher = nearImage.NewSearcher(m.imageVectorizer) + m.nearImageGraphqlProvider = nearImage.New() + return nil +} + +func (m *ClipModule) initNearText() error { + m.nearTextSearcher = nearText.NewSearcher(m.textVectorizer) + m.nearTextGraphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *ClipModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + arguments := map[string]modulecapabilities.GraphQLArgument{} + for name, arg := range m.nearImageGraphqlProvider.Arguments() { + arguments[name] = arg + } + for name, arg := range m.nearTextGraphqlProvider.Arguments() { + arguments[name] = arg + } + return arguments +} + +func (m *ClipModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + vectorSearches := map[string]modulecapabilities.VectorForParams[[]float32]{} + for name, arg := range m.nearImageSearcher.VectorSearches() { + vectorSearches[name] = arg + } + for name, arg := range m.nearTextSearcher.VectorSearches() { + vectorSearches[name] = arg + } + return vectorSearches +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/vectorizer/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/vectorizer/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..52751ec17b138123c82f80370d42aefc586de38c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/vectorizer/class_settings.go @@ -0,0 +1,228 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "fmt" + + "github.com/pkg/errors" + + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +type classSettings struct { + cfg moduletools.ClassConfig + base *basesettings.BaseClassSettings +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, base: basesettings.NewBaseClassSettings(cfg, false)} +} + +func (ic *classSettings) ImageField(property string) bool { + return ic.field("imageFields", property) +} + +func (ic *classSettings) ImageFieldsWeights() ([]float32, error) { + return ic.getFieldsWeights("image") +} + +func (ic *classSettings) TextField(property string) bool { + return ic.field("textFields", property) +} + +func (ic *classSettings) TextFieldsWeights() ([]float32, error) { + return ic.getFieldsWeights("text") +} + +func (ic *classSettings) Properties() ([]string, error) { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return nil, errors.New("empty config") + } + props := make([]string, 0) + + fields := []string{"imageFields", "textFields"} + + for _, field := range fields { + fields, ok := ic.cfg.Class()[field] + if !ok { + continue + } + + fieldsArray, ok := fields.([]interface{}) + if !ok { + return nil, errors.Errorf("%s must be an array", field) + } + + for _, value := range fieldsArray { + v, ok := value.(string) + if !ok { + return nil, errors.Errorf("%s must be a string", field) + } + props = append(props, v) + } + } + return props, nil +} + +func (ic *classSettings) InferenceURL() string { + return ic.base.GetPropertyAsString("inferenceUrl", "") +} + +func (ic *classSettings) field(name, property string) bool { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return false + } + + fields, ok := ic.cfg.Class()[name] + if !ok { + return false + } + + fieldsArray, ok := fields.([]interface{}) + if !ok { + return false + } + + fieldNames := make([]string, len(fieldsArray)) + for i, value := range fieldsArray { + fieldNames[i] = value.(string) + } + + for i := range fieldNames { + if fieldNames[i] == property { + return true + } + } + + return false +} + +func (ic *classSettings) Validate() error { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + + imageFields, imageFieldsOk := ic.cfg.Class()["imageFields"] + textFields, textFieldsOk := ic.cfg.Class()["textFields"] + if !imageFieldsOk && !textFieldsOk { + return errors.New("textFields or imageFields setting needs to be present") + } + + if imageFieldsOk { + imageFieldsCount, err := ic.validateFields("image", imageFields) + if err != nil { + return err + } + err = ic.validateWeights("image", imageFieldsCount) + if err != nil { + return err + } + } + + if textFieldsOk { + textFieldsCount, err := ic.validateFields("text", textFields) + if err != nil { + return err + } + err = ic.validateWeights("text", textFieldsCount) + if err != nil { + return err + } + } + + return nil +} + +func (ic *classSettings) validateFields(name string, fields interface{}) (int, error) { + fieldsArray, ok := fields.([]interface{}) + if !ok { + return 0, errors.Errorf("%sFields must be an array", name) + } + + if len(fieldsArray) == 0 { + return 0, errors.Errorf("must contain at least one %s field name in %sFields", name, name) + } + + for _, value := range fieldsArray { + v, ok := value.(string) + if !ok { + return 0, errors.Errorf("%sField must be a string", name) + } + if len(v) == 0 { + return 0, errors.Errorf("%sField values cannot be empty", name) + } + } + + return len(fieldsArray), nil +} + +func (ic *classSettings) validateWeights(name string, count int) error { + weights, ok := ic.getWeights(name) + if ok { + if len(weights) != count { + return errors.Errorf("weights.%sFields does not equal number of %sFields", name, name) + } + _, err := ic.getWeightsArray(weights) + if err != nil { + return err + } + } + + return nil +} + +func (ic *classSettings) getWeights(name string) ([]interface{}, bool) { + weights, ok := ic.cfg.Class()["weights"] + if ok { + weightsObject, ok := weights.(map[string]interface{}) + if ok { + fieldWeights, ok := weightsObject[fmt.Sprintf("%sFields", name)] + if ok { + fieldWeightsArray, ok := fieldWeights.([]interface{}) + if ok { + return fieldWeightsArray, ok + } + } + } + } + + return nil, false +} + +func (ic *classSettings) getWeightsArray(weights []interface{}) ([]float32, error) { + weightsArray := make([]float32, len(weights)) + for i := range weights { + weight, err := ic.getNumber(weights[i]) + if err != nil { + return nil, err + } + weightsArray[i] = weight + } + return weightsArray, nil +} + +func (ic *classSettings) getFieldsWeights(name string) ([]float32, error) { + weights, ok := ic.getWeights(name) + if ok { + return ic.getWeightsArray(weights) + } + return nil, nil +} + +func (ic *classSettings) getNumber(in interface{}) (float32, error) { + return ic.base.GetNumber(in) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/vectorizer/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/vectorizer/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..77f05ad19d1893fa04eca093af97619dda8c7dda --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/vectorizer/class_settings_test.go @@ -0,0 +1,179 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "encoding/json" + "testing" + + "github.com/weaviate/weaviate/entities/moduletools" +) + +func Test_classSettings_Validate(t *testing.T) { + type fields struct { + cfg moduletools.ClassConfig + } + tests := []struct { + name string + fields fields + wantErr bool + }{ + { + name: "should not pass with empty config", + wantErr: true, + }, + { + name: "should not pass with nil config", + fields: fields{ + cfg: nil, + }, + wantErr: true, + }, + { + name: "should not pass with nil imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", nil).build(), + }, + wantErr: true, + }, + { + name: "should not pass with fault imageFields value", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []string{}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with empty imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with empty string in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{""}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with int value in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{1.0}).build(), + }, + wantErr: true, + }, + { + name: "should pass with proper value in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{"field"}).build(), + }, + }, + { + name: "should pass with proper value in imageFields and inferenceUrl", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("inferenceUrl", "http://inference.url"). + addSetting("imageFields", []interface{}{"field"}).build(), + }, + }, + { + name: "should pass with proper value in imageFields and textFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("imageFields", []interface{}{"imageField"}). + addSetting("textFields", []interface{}{"textField"}). + build(), + }, + }, + { + name: "should pass with proper value in 2 imageFields and 2 textFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1", "imageField2"}). + build(), + }, + }, + { + name: "should pass with proper value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1", "imageField2"}). + addWeights([]interface{}{1, 2}, []interface{}{1, 2}). + build(), + }, + }, + { + name: "should pass with proper value in 1 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, 2}, []interface{}{1}). + build(), + }, + }, + { + name: "should pass with proper value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, 2}, []interface{}{1}). + build(), + }, + }, + { + name: "should not pass with proper value in 1 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1}, []interface{}{1}). + build(), + }, + wantErr: true, + }, + { + name: "should not pass with not proper weight value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, "aaaa"}, []interface{}{1}). + build(), + }, + wantErr: true, + }, + { + name: "should not pass with not proper weight value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{json.Number("1"), json.Number("2")}, []interface{}{json.Number("3")}). + build(), + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.fields.cfg) + if err := ic.Validate(); (err != nil) != tt.wantErr { + t.Errorf("classSettings.Validate() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/vectorizer/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/vectorizer/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..b941917b1f0b9c541f30ed186ec9e49d9489ad1b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/vectorizer/fakes_for_test.go @@ -0,0 +1,97 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/multi2vec-clip/ent" + "github.com/weaviate/weaviate/usecases/config" +) + +type builder struct { + fakeClassConfig *fakeClassConfig +} + +func newConfigBuilder() *builder { + return &builder{ + fakeClassConfig: &fakeClassConfig{config: map[string]interface{}{}}, + } +} + +func (b *builder) addSetting(name string, value interface{}) *builder { + b.fakeClassConfig.config[name] = value + return b +} + +func (b *builder) addWeights(textWeights, imageWeights []interface{}) *builder { + if textWeights != nil || imageWeights != nil { + weightSettings := map[string]interface{}{} + if textWeights != nil { + weightSettings["textFields"] = textWeights + } + if imageWeights != nil { + weightSettings["imageFields"] = imageWeights + } + b.fakeClassConfig.config["weights"] = weightSettings + } + return b +} + +func (b *builder) build() *fakeClassConfig { + return b.fakeClassConfig +} + +type fakeClassConfig struct { + config map[string]interface{} +} + +func (c fakeClassConfig) Class() map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) Property(propName string) map[string]interface{} { + return c.config +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} + +type fakeClient struct{} + +func (c *fakeClient) Vectorize(ctx context.Context, + texts, images []string, config ent.VectorizationConfig, +) (*ent.VectorizationResult, error) { + result := &ent.VectorizationResult{ + TextVectors: [][]float32{{1.0, 2.0, 3.0, 4.0, 5.0}}, + ImageVectors: [][]float32{{10.0, 20.0, 30.0, 40.0, 50.0}}, + } + return result, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/vectorizer/texts.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/vectorizer/texts.go new file mode 100644 index 0000000000000000000000000000000000000000..6f3ef47bfaf9598d85a9228e04ef38416f7b6dcc --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/vectorizer/texts.go @@ -0,0 +1,38 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/multi2vec-clip/ent" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +func (v *Vectorizer) Texts(ctx context.Context, inputs []string, + cfg moduletools.ClassConfig, +) ([]float32, error) { + res, err := v.client.Vectorize(ctx, inputs, []string{}, v.getVectorizationConfig(cfg)) + if err != nil { + return nil, errors.Wrap(err, "remote client vectorize") + } + if len(inputs) != len(res.TextVectors) { + return nil, errors.New("inputs are not equal to vectors returned") + } + return libvectorizer.CombineVectors(res.TextVectors), nil +} + +func (v *Vectorizer) getVectorizationConfig(cfg moduletools.ClassConfig) ent.VectorizationConfig { + return ent.VectorizationConfig{InferenceURL: NewClassSettings(cfg).InferenceURL()} +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/vectorizer/vectorizer.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/vectorizer/vectorizer.go new file mode 100644 index 0000000000000000000000000000000000000000..0907316769743863ba51d3e35f2feb75d2f701ce --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/vectorizer/vectorizer.go @@ -0,0 +1,134 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/pkg/errors" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/multi2vec-clip/ent" + objectsvectorizer "github.com/weaviate/weaviate/usecases/modulecomponents/vectorizer" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +type Vectorizer struct { + client Client + objectVectorizer *objectsvectorizer.ObjectVectorizer +} + +func New(client Client) *Vectorizer { + return &Vectorizer{ + client: client, + objectVectorizer: objectsvectorizer.New(), + } +} + +type Client interface { + Vectorize(ctx context.Context, + texts, images []string, config ent.VectorizationConfig) (*ent.VectorizationResult, error) +} + +type ClassSettings interface { + ImageField(property string) bool + ImageFieldsWeights() ([]float32, error) + TextField(property string) bool + TextFieldsWeights() ([]float32, error) + Properties() ([]string, error) + InferenceURL() string +} + +func (v *Vectorizer) Object(ctx context.Context, object *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + vec, err := v.object(ctx, object, cfg) + return vec, nil, err +} + +func (v *Vectorizer) VectorizeImage(ctx context.Context, id, image string, cfg moduletools.ClassConfig) ([]float32, error) { + res, err := v.client.Vectorize(ctx, []string{}, []string{image}, v.getVectorizationConfig(cfg)) + if err != nil { + return nil, err + } + if len(res.ImageVectors) != 1 { + return nil, errors.New("empty vector") + } + + return res.ImageVectors[0], nil +} + +func (v *Vectorizer) object(ctx context.Context, object *models.Object, cfg moduletools.ClassConfig, +) ([]float32, error) { + ichek := NewClassSettings(cfg) + + // vectorize image and text + texts := []string{} + images := []string{} + + if object.Properties != nil { + schemamap := object.Properties.(map[string]interface{}) + for _, propName := range moduletools.SortStringKeys(schemamap) { + switch val := schemamap[propName].(type) { + case string: + if ichek.ImageField(propName) { + images = append(images, val) + } + if ichek.TextField(propName) { + texts = append(texts, val) + } + + case []string: + if ichek.TextField(propName) { + texts = append(texts, val...) + } + default: // properties that are not part of the object + + } + } + } + + vectors := [][]float32{} + if len(texts) > 0 || len(images) > 0 { + res, err := v.client.Vectorize(ctx, texts, images, v.getVectorizationConfig(cfg)) + if err != nil { + return nil, err + } + vectors = append(vectors, res.TextVectors...) + vectors = append(vectors, res.ImageVectors...) + } + weights, err := v.getWeights(ichek) + if err != nil { + return nil, err + } + + return libvectorizer.CombineVectorsWithWeights(vectors, weights), nil +} + +func (v *Vectorizer) getWeights(ichek ClassSettings) ([]float32, error) { + weights := []float32{} + textFieldsWeights, err := ichek.TextFieldsWeights() + if err != nil { + return nil, err + } + imageFieldsWeights, err := ichek.ImageFieldsWeights() + if err != nil { + return nil, err + } + + weights = append(weights, textFieldsWeights...) + weights = append(weights, imageFieldsWeights...) + + normalizedWeights := moduletools.NormalizeWeights(weights) + + return normalizedWeights, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/vectorizer/vectorizer_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/vectorizer/vectorizer_test.go new file mode 100644 index 0000000000000000000000000000000000000000..c6e6377872fda4af5208a6c4d3e050a908ea080a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-clip/vectorizer/vectorizer_test.go @@ -0,0 +1,142 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "testing" + + "github.com/weaviate/weaviate/entities/moduletools" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" +) + +const image = "iVBORw0KGgoAAAANSUhEUgAAAGAAAAA/CAYAAAAfQM0aAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyRpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDpCRjQ5NEM3RDI5QTkxMUUyOTc1NENCMzI4N0QwNDNCOSIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDpCRjQ5NEM3RTI5QTkxMUUyOTc1NENCMzI4N0QwNDNCOSI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOkJGNDk0QzdCMjlBOTExRTI5NzU0Q0IzMjg3RDA0M0I5IiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOkJGNDk0QzdDMjlBOTExRTI5NzU0Q0IzMjg3RDA0M0I5Ii8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+WeGRxAAAB2hJREFUeNrUXFtslUUQ3hJCoQVEKy0k1qQgrRg0vaAJaq1tvJSgaLy8mKDF2IvxBY2Bgm8+iIoxvhB72tTUmKgPigbFKCEtxeKD9hZjAi3GJrYJtqRai7TQB+pMz/zwU/5zzsxe2u4kXwiwZ+bb/Xb/s7v/zEmrra1VTFsFeBRQCtgEuBWwkv5vHPAn4DdAB+B7wBjXcUNDQ8o2dXV1SmDzyhUtLS3tBPyxC9CdrN1ihi/swKuA7YD0BG1uJhQDngdcAnwDeJ86Ole2kLii+J2AFsA+wF9RjRalmEUHaZY8m6RDUYZtn6HPHiRfLm2hck0D7AScAdRH8UokwD2AnwA7UoiUyhaRD/S12dHg+8B1OWA/4BTgqVQCPEJL8haLBNDXEfJt03ziipYH+BJwHFAYJcAWwCeAZQ6CLyPfWyz584nrbCuj74eHwgKsddih2R1ba+jHJ65R1k6PuWNhAd4DZM/BTiWbdhwm5hPXsA0AngY8COgNP4JwSTyu4zE/P18VFhZKP7aNYuouXxFX5Ic8Nc2Ea2D/AfYCNgIORZ0DdusOfnFxcXDwUD09PZKP76alKDUR16KiIlVQUHDl7/39/Uozpg7Xac45YB0dGrQHHw07KVwJpRRbYiKuyCc8+MhXcyXocP2RnvMvJhr8QIBK08EPbGJiQuqq0mX7KD4GIohi4xVPTU0N6/BRamPwu7u7dZb3/RozkW3IB3lZEkGHayeI8FFVVdWaZAIUcD2Wl5fbHHy024XtC6QBkomA/XHIFb8X0Xamp6efASHqt27dGnkVkcNxVlFRoXJycmwOvuLGNmifVATsD/bLZezgKgKE2J+bm3sKHk3XXUWs4Mz87Oxs24OvOLEN26cUAfvFXAkrlKGBCDNXEbAajldXV1+5ijjP+KCrg855x+3nk2uy8SwDdIIIM1cRI6k+0NraqkZGRmzuKAIbFrYf0Q2UaPOA/Wpra3PBNfHhYHq6HbC5qanpGB7ETgPWc0TApTr7eyDolOaj6LRG+/W2Bn94eJg7+DpcowZ+AGb+642NjYfC3wEdXAdI1uK2Du2ksH2HrcHHfggGX4frNVcRMPh7BwcHN8ZiseuuIr4DvKXib29YX2bhmW+wEqYptsREXC2eWXS44oyfuYqYmpra19LSEnkaRgEG6Nj8gGRHESVCRkaG9Kg+IOyTiGtmZqatnZsOV/zMLnjcsF7KH5AIECVCX1+f6u3tlbg4oLmc2VyDy8HgPshg2yzmCo8aFsdAALzpw9dw23REwJkvHPwjSu92UcwVRcAnAd4LaQ6+CVe2AGivAe5WwhcdGp0aoVgmJuIqnBy2uSa18Buxs4AXAJMO401SjLOGfnziyhYg2GrtcNSxSfJ90pI/n7iyBUA7quKv/IYsxhmiZ/ZRy/x94soWAO1nwL0qnhVw2cD/ZfKBvjod9cEnrmwB0DBh9RUVfxHxhYrnUHLtEn2mlHyMOe6HT1wT7oISGSas4ntNzJmsVFczjnMBN1CbfwGD1BYPID8A/lFzbz5xZQsQnmWfExa6ecNVIsBKWuIlgA0qnjG2PLhsou0aZgF3qfil2fg89ssbrhwBNtB+GN/dLUnQ5kbCHYAnAFMAvGpsoY7OlS0krmOhxx7WLHwAeBLwVahN2uIUswgrPB5T8rRv7DxWqDwM+JaCjzue8b5wZe2C7gJ8quKVJqY599vJ1yZHffCJK0uA+wAfAtZYjIO+Gsi3TfOJK0sAfFP/jpKV+HBtKfkutOTPJ64sAVYD3qXgrmwpxVht6McnrmwBMAP4pjlYdRij3tCHT1xZAuDdermOA836gDKKqWNirob1ASZc2eeAl3QH36A+AGP+ohFWxNVSfYAuV9YKyKUTo/bgo2nUB5RQbImJuFqsD9DhyhbAuDgjMI36gFKX7S3XB5S6egSV2Bh8zYyDYjr4SGYi2yzmMIm5YnFGkFOLSQGNjY3X/BtaLBabWQF5XKcO6gOkZT950gAW6wPWuXoEZXEaOqoPyHLcPqkIwvqALFcCZHJmvqP6gEzH7VOKIKgPyHQlwIVUjRzWB1xw3H4+ubIFGE3VyGF9wKjj9ik3D4L6gFFXArCSTlEEzKe3LMIfwvYDNgcf+4P9csSVLUAXt7GD+oBuYfsuW4OvUR/Q7UoA/G2zaRvbOqEI0xRbYiKulusDTrgSYEg6sxKJIKwP6FLyjDYRV4v1ATpc2QKgNZtu6zTqA5o1ObM/h5eDyMvCtrlZObLgNhRv+jAHvkwqQjDzhYPfrvRvF0VcLdQHaHGNxWKrZv0d//hahcqr8Ccww1kRbwPuVMIXHRqd+ptimZiIq0F9gA2urEcQ2jkVf/tz0WG8ixTjnKEfn7iyBQi2WnuULLlV0qE9FrdzPnFlC4CGRQkvqyQ/MqRh6KtO2S948IkrWwC0XwHPAQ4r85z7w+TL1U8Y+8Q14S4oyjA9703AZ4AqFX8RvoTpN8i3/Bi/p+egHz5xZQsQGCasvqGuZhzj76DdpuIZx8FPuOAviWDG8e8qXl0yXxnHPnGdsf8FGAByGwC02iMZswAAAABJRU5ErkJggg==" + +func TestVectorizer(t *testing.T) { + t.Run("should vectorize image", func(t *testing.T) { + // given + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder().addSetting("imageFields", []interface{}{"image"}).build() + + props := map[string]interface{}{ + "image": image, + } + object := &models.Object{ + ID: "some-uuid", + Properties: props, + } + + // when + vector, _, err := vectorizer.Object(context.Background(), object, config) + + // then + require.Nil(t, err) + assert.NotNil(t, vector) + }) + + t.Run("should vectorize 2 image fields", func(t *testing.T) { + // given + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder().addSetting("imageFields", []interface{}{"image1", "image2"}).build() + + props := map[string]interface{}{ + "image1": image, + "image2": image, + } + object := &models.Object{ + ID: "some-uuid", + Properties: props, + } + + // when + vector, _, err := vectorizer.Object(context.Background(), object, config) + + // then + require.Nil(t, err) + assert.NotNil(t, vector) + }) +} + +func TestVectorizerWithWeights(t *testing.T) { + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder(). + addSetting("imageFields", []interface{}{"image"}). + addSetting("textFields", []interface{}{"text"}). + addWeights([]interface{}{0.4}, []interface{}{0.6}). + build() + + props := map[string]interface{}{ + "image": image, + "text": "text", + "description": "non-vectorizable", + } + input := &models.Object{ + ID: "some-uuid", + Properties: props, + } + + vector, _, err := vectorizer.Object(context.Background(), input, config) + + require.Nil(t, err) + assert.Equal(t, []float32{3.2, 6.4, 9.6, 12.8, 16}, vector) + // vectors are defined in Vectorize within fakes_for_test.go + // result calculated with above weights as (textVectors[0][i]*0.4+imageVectors[0][i]*0.6) / 2 +} + +func TestVectorizer_normalizeWeights(t *testing.T) { + tests := []struct { + name string + weights []float32 + }{ + { + name: "normalize example 1", + weights: []float32{200, 100, 0.1}, + }, + { + name: "normalize example 2", + weights: []float32{300.22, 0.7, 17, 54}, + }, + { + name: "normalize example 3", + weights: []float32{300, 0.02, 17}, + }, + { + name: "normalize example 4", + weights: []float32{500, 0.02, 17.4, 180}, + }, + { + name: "normalize example 5", + weights: []float32{500, 0.02, 17.4, 2, 4, 5, .88}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := moduletools.NormalizeWeights(tt.weights); !checkNormalization(got) { + t.Errorf("Vectorizer.normalizeWeights() = %v, want %v", got, 1.0) + } + }) + } +} + +func checkNormalization(weights []float32) bool { + var result float32 + for i := range weights { + result += weights[i] + } + return result == 1.0 +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/clients/cohere.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/clients/cohere.go new file mode 100644 index 0000000000000000000000000000000000000000..610796e74f3f5ca6f122579febdfab5717e70355 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/clients/cohere.go @@ -0,0 +1,83 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "time" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/usecases/modulecomponents" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/cohere" + + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/multi2vec-cohere/ent" +) + +type vectorizer struct { + client *cohere.Client + logger logrus.FieldLogger +} + +func New(apiKey string, timeout time.Duration, logger logrus.FieldLogger) *vectorizer { + return &vectorizer{ + client: cohere.New(apiKey, timeout, logger), + logger: logger, + } +} + +func (v *vectorizer) Vectorize(ctx context.Context, + texts, images []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationCLIPResult[[]float32], error) { + return v.vectorize(ctx, texts, images, cfg) +} + +func (v *vectorizer) VectorizeQuery(ctx context.Context, + input []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationCLIPResult[[]float32], error) { + return v.vectorize(ctx, input, nil, cfg) +} + +func (v *vectorizer) vectorize(ctx context.Context, + texts, images []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationCLIPResult[[]float32], error) { + var textVectors [][]float32 + var imageVectors [][]float32 + settings := ent.NewClassSettings(cfg) + if len(texts) > 0 { + textEmbeddings, err := v.client.Vectorize(ctx, texts, cohere.Settings{ + Model: settings.Model(), + Truncate: settings.Truncate(), + BaseURL: settings.BaseURL(), + InputType: cohere.SearchDocument, + }) + if err != nil { + return nil, err + } + textVectors = textEmbeddings.Vector + } + if len(images) > 0 { + imageEmbeddings, err := v.client.Vectorize(ctx, images, cohere.Settings{ + Model: settings.Model(), + BaseURL: settings.BaseURL(), + InputType: cohere.Image, + }) + if err != nil { + return nil, err + } + imageVectors = imageEmbeddings.Vector + } + return &modulecomponents.VectorizationCLIPResult[[]float32]{ + TextVectors: textVectors, + ImageVectors: imageVectors, + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..c40f5c9ea690d7bbdfdb92474cd695fed969f87c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/clients/meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *vectorizer) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Cohere Module", + "documentationHref": "https://docs.cohere.ai/embedding-wiki/", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/config.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/config.go new file mode 100644 index 0000000000000000000000000000000000000000..393bffb179ca765e6c764c1646b2d5c65816a16c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/config.go @@ -0,0 +1,41 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modclip + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/multi2vec-cohere/ent" +) + +func (m *Module) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *Module) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *Module) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + icheck := ent.NewClassSettings(cfg) + return icheck.Validate() +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/ent/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/ent/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..d54e04ead5dae416e60ad6959e83d665b7f7b948 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/ent/class_settings.go @@ -0,0 +1,263 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "fmt" + "strings" + + "github.com/pkg/errors" + + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + baseURLProperty = "baseURL" + modelProperty = "model" + truncateProperty = "truncate" +) + +const ( + DefaultBaseURL = "https://api.cohere.com" + DefaultCohereModel = "embed-multilingual-v3.0" + DefaultTruncate = "END" + DefaultVectorizeClassName = false + DefaultPropertyIndexed = true + DefaultVectorizePropertyName = false +) + +type classSettings struct { + base *basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{ + cfg: cfg, + base: basesettings.NewBaseClassSettingsWithAltNames(cfg, false, "multi2vec-cohere", nil, nil), + } +} + +// Cohere settings +func (cs *classSettings) Model() string { + return cs.base.GetPropertyAsString(modelProperty, DefaultCohereModel) +} + +func (cs *classSettings) BaseURL() string { + return cs.base.GetPropertyAsString(baseURLProperty, DefaultBaseURL) +} + +func (cs *classSettings) Truncate() string { + return cs.base.GetPropertyAsString(truncateProperty, DefaultTruncate) +} + +// CLIP module specific settings +func (ic *classSettings) ImageField(property string) bool { + return ic.field("imageFields", property) +} + +func (ic *classSettings) ImageFieldsWeights() ([]float32, error) { + return ic.getFieldsWeights("image") +} + +func (ic *classSettings) TextField(property string) bool { + return ic.field("textFields", property) +} + +func (ic *classSettings) TextFieldsWeights() ([]float32, error) { + return ic.getFieldsWeights("text") +} + +func (ic *classSettings) Properties() ([]string, error) { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return nil, errors.New("empty config") + } + props := make([]string, 0) + + fields := []string{"textFields", "imageFields"} + + for _, field := range fields { + fields, ok := ic.base.GetSettings()[field] + if !ok { + continue + } + + fieldsArray, ok := fields.([]interface{}) + if !ok { + return nil, errors.Errorf("%s must be an array", field) + } + + for _, value := range fieldsArray { + v, ok := value.(string) + if !ok { + return nil, errors.Errorf("%s must be a string", field) + } + props = append(props, v) + } + } + return props, nil +} + +func (ic *classSettings) field(name, property string) bool { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return false + } + + fields, ok := ic.base.GetSettings()[name] + if !ok { + return false + } + + fieldsArray, ok := fields.([]interface{}) + if !ok { + return false + } + + fieldNames := make([]string, len(fieldsArray)) + for i, value := range fieldsArray { + fieldNames[i] = value.(string) + } + + for i := range fieldNames { + if fieldNames[i] == property { + return true + } + } + + return false +} + +func (ic *classSettings) Validate() error { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + + var errorMessages []string + + imageFields, imageFieldsOk := ic.cfg.Class()["imageFields"] + textFields, textFieldsOk := ic.cfg.Class()["textFields"] + if !imageFieldsOk && !textFieldsOk { + errorMessages = append(errorMessages, "textFields or imageFields setting needs to be present") + } + + if imageFieldsOk { + imageFieldsCount, err := ic.validateFields("image", imageFields) + if err != nil { + errorMessages = append(errorMessages, err.Error()) + } + err = ic.validateWeights("image", imageFieldsCount) + if err != nil { + errorMessages = append(errorMessages, err.Error()) + } + } + + if textFieldsOk { + textFieldsCount, err := ic.validateFields("text", textFields) + if err != nil { + errorMessages = append(errorMessages, err.Error()) + } + err = ic.validateWeights("text", textFieldsCount) + if err != nil { + errorMessages = append(errorMessages, err.Error()) + } + } + + if len(errorMessages) > 0 { + return fmt.Errorf("%s", strings.Join(errorMessages, ", ")) + } + + return nil +} + +func (ic *classSettings) validateFields(name string, fields interface{}) (int, error) { + fieldsArray, ok := fields.([]interface{}) + if !ok { + return 0, errors.Errorf("%sFields must be an array", name) + } + + if len(fieldsArray) == 0 { + return 0, errors.Errorf("must contain at least one %s field name in %sFields", name, name) + } + + for _, value := range fieldsArray { + v, ok := value.(string) + if !ok { + return 0, errors.Errorf("%sField must be a string", name) + } + if len(v) == 0 { + return 0, errors.Errorf("%sField values cannot be empty", name) + } + } + + return len(fieldsArray), nil +} + +func (ic *classSettings) validateWeights(name string, count int) error { + weights, ok := ic.getWeights(name) + if ok { + if len(weights) != count { + return errors.Errorf("weights.%sFields does not equal number of %sFields", name, name) + } + _, err := ic.getWeightsArray(weights) + if err != nil { + return err + } + } + + return nil +} + +func (ic *classSettings) getWeights(name string) ([]interface{}, bool) { + weights, ok := ic.base.GetSettings()["weights"] + if ok { + weightsObject, ok := weights.(map[string]interface{}) + if ok { + fieldWeights, ok := weightsObject[fmt.Sprintf("%sFields", name)] + if ok { + fieldWeightsArray, ok := fieldWeights.([]interface{}) + if ok { + return fieldWeightsArray, ok + } + } + } + } + + return nil, false +} + +func (ic *classSettings) getWeightsArray(weights []interface{}) ([]float32, error) { + weightsArray := make([]float32, len(weights)) + for i := range weights { + weight, err := ic.getNumber(weights[i]) + if err != nil { + return nil, err + } + weightsArray[i] = weight + } + return weightsArray, nil +} + +func (ic *classSettings) getFieldsWeights(name string) ([]float32, error) { + weights, ok := ic.getWeights(name) + if ok { + return ic.getWeightsArray(weights) + } + return nil, nil +} + +func (ic *classSettings) getNumber(in interface{}) (float32, error) { + return ic.base.GetNumber(in) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/ent/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/ent/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..00ed00ac67c8787f66101a09f703765c0e4a1c06 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/ent/class_settings_test.go @@ -0,0 +1,249 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "encoding/json" + "testing" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_classSettings_Validate(t *testing.T) { + type fields struct { + cfg moduletools.ClassConfig + } + tests := []struct { + name string + fields fields + wantErr bool + }{ + { + name: "should not pass with empty config", + wantErr: true, + }, + { + name: "should not pass with nil config", + fields: fields{ + cfg: nil, + }, + wantErr: true, + }, + { + name: "should not pass with nil imageFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("model", "model").addSetting("imageFields", nil).build(), + }, + wantErr: true, + }, + { + name: "should not pass with fault imageFields value", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []string{}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with empty imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with empty string in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{""}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with int value in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{1.0}).build(), + }, + wantErr: true, + }, + { + name: "should pass with proper value in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{"field"}).build(), + }, + }, + { + name: "should pass with proper value in imageFields and textFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("imageFields", []interface{}{"imageField"}). + addSetting("textFields", []interface{}{"textField"}). + build(), + }, + }, + { + name: "should pass with proper value in 2 imageFields and 2 textFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1", "imageField2"}). + build(), + }, + }, + { + name: "should pass with proper value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1", "imageField2"}). + addWeights([]interface{}{1, 2}, []interface{}{1, 2}). + build(), + }, + }, + { + name: "should pass with proper value in 1 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, 2}, []interface{}{1}). + build(), + }, + }, + { + name: "should pass with proper value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, 2}, []interface{}{1}). + build(), + }, + }, + { + name: "should not pass with proper value in 1 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1}, []interface{}{1}). + build(), + }, + wantErr: true, + }, + { + name: "should not pass with not proper weight value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, "aaaa"}, []interface{}{1}). + build(), + }, + wantErr: true, + }, + { + name: "should pass with not proper weight value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{json.Number("1"), json.Number("2")}, []interface{}{json.Number("3")}). + build(), + }, + }, + { + name: "should pass with proper dimensions setting in videoFields together with image fields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("videoFields", []interface{}{"video1"}). + addSetting("imageFields", []interface{}{"image1"}). + build(), + }, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.fields.cfg) + if err := ic.Validate(); (err != nil) != tt.wantErr { + t.Errorf("classSettings.Validate() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} + +type builder struct { + fakeClassConfig *fakeClassConfig +} + +func newConfigBuilder() *builder { + return &builder{ + fakeClassConfig: &fakeClassConfig{config: map[string]interface{}{}}, + } +} + +func (b *builder) addSetting(name string, value interface{}) *builder { + b.fakeClassConfig.config[name] = value + return b +} + +func (b *builder) addWeights(textWeights, imageWeights []interface{}) *builder { + if textWeights != nil || imageWeights != nil { + weightSettings := map[string]interface{}{} + if textWeights != nil { + weightSettings["textFields"] = textWeights + } + if imageWeights != nil { + weightSettings["imageFields"] = imageWeights + } + b.fakeClassConfig.config["weights"] = weightSettings + } + return b +} + +func (b *builder) build() *fakeClassConfig { + return b.fakeClassConfig +} + +type fakeClassConfig struct { + config map[string]interface{} +} + +func (c fakeClassConfig) Class() map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) Property(propName string) map[string]interface{} { + return c.config +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/module.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/module.go new file mode 100644 index 0000000000000000000000000000000000000000..9788b9b1777cb391d3051dce50c3885aba4be9de --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/module.go @@ -0,0 +1,137 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modclip + +import ( + "context" + "os" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/multi2vec-cohere/clients" + "github.com/weaviate/weaviate/modules/multi2vec-cohere/ent" + "github.com/weaviate/weaviate/modules/multi2vec-cohere/vectorizer" +) + +const Name = "multi2vec-cohere" + +func New() *Module { + return &Module{} +} + +type Module struct { + vectorizer *vectorizer.Vectorizer + nearImageGraphqlProvider modulecapabilities.GraphQLArguments + nearImageSearcher modulecapabilities.Searcher[[]float32] + nearTextGraphqlProvider modulecapabilities.GraphQLArguments + nearTextSearcher modulecapabilities.Searcher[[]float32] + nearTextTransformer modulecapabilities.TextTransform + metaClient metaClient + logger logrus.FieldLogger +} + +type metaClient interface { + MetaInfo() (map[string]interface{}, error) +} + +func (m *Module) Name() string { + return Name +} + +func (m *Module) Type() modulecapabilities.ModuleType { + return modulecapabilities.Multi2Vec +} + +func (m *Module) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initNearImage(); err != nil { + return errors.Wrap(err, "init near image") + } + + return nil +} + +func (m *Module) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init near text") + } + + return nil +} + +func (m *Module) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("COHERE_APIKEY") + client := clients.New(apiKey, timeout, logger) + + m.vectorizer = vectorizer.New(client) + m.metaClient = client + + return nil +} + +func (m *Module) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + return m.vectorizer.Object(ctx, obj, cfg) +} + +func (m *Module) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + return batch.VectorizeBatch(ctx, objs, skipObject, cfg, m.logger, m.vectorizer.Object) +} + +func (m *Module) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + ichek := ent.NewClassSettings(cfg) + mediaProps, err := ichek.Properties() + return false, mediaProps, err +} + +func (m *Module) MetaInfo() (map[string]interface{}, error) { + return m.metaClient.MetaInfo() +} + +func (m *Module) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + return m.vectorizer.Texts(ctx, []string{input}, cfg) +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.InputVectorizer[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/nearArguments.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/nearArguments.go new file mode 100644 index 0000000000000000000000000000000000000000..55e86ecd93bbb8b526a89387a3238344cde98c85 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/nearArguments.go @@ -0,0 +1,57 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modclip + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearImage" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +func (m *Module) initNearImage() error { + m.nearImageSearcher = nearImage.NewSearcher(m.vectorizer) + m.nearImageGraphqlProvider = nearImage.New() + return nil +} + +func (m *Module) initNearText() error { + m.nearTextSearcher = nearText.NewSearcher(m.vectorizer) + m.nearTextGraphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *Module) Arguments() map[string]modulecapabilities.GraphQLArgument { + arguments := map[string]modulecapabilities.GraphQLArgument{} + for name, arg := range m.nearImageGraphqlProvider.Arguments() { + arguments[name] = arg + } + for name, arg := range m.nearTextGraphqlProvider.Arguments() { + arguments[name] = arg + } + return arguments +} + +func (m *Module) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + vectorSearches := map[string]modulecapabilities.VectorForParams[[]float32]{} + for name, arg := range m.nearImageSearcher.VectorSearches() { + vectorSearches[name] = arg + } + for name, arg := range m.nearTextSearcher.VectorSearches() { + vectorSearches[name] = arg + } + return vectorSearches +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/vectorizer/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/vectorizer/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..f42ead07c7bd08f956ea6871e146ac9c0c7fcb5c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/vectorizer/fakes_for_test.go @@ -0,0 +1,98 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/modulecomponents" +) + +type builder struct { + fakeClassConfig *fakeClassConfig +} + +func newConfigBuilder() *builder { + return &builder{ + fakeClassConfig: &fakeClassConfig{config: map[string]interface{}{}}, + } +} + +func (b *builder) addSetting(name string, value interface{}) *builder { + b.fakeClassConfig.config[name] = value + return b +} + +func (b *builder) addWeights(textWeights, imageWeights []interface{}) *builder { + if textWeights != nil || imageWeights != nil { + weightSettings := map[string]interface{}{} + if textWeights != nil { + weightSettings["textFields"] = textWeights + } + if imageWeights != nil { + weightSettings["imageFields"] = imageWeights + } + b.fakeClassConfig.config["weights"] = weightSettings + } + return b +} + +func (b *builder) build() *fakeClassConfig { + return b.fakeClassConfig +} + +type fakeClassConfig struct { + config map[string]interface{} +} + +func (c fakeClassConfig) Class() map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) Property(propName string) map[string]interface{} { + return c.config +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} + +type fakeClient struct{} + +func (c *fakeClient) Vectorize(ctx context.Context, + texts, images []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationCLIPResult[[]float32], error) { + result := &modulecomponents.VectorizationCLIPResult[[]float32]{ + TextVectors: [][]float32{{1.0, 2.0, 3.0, 4.0, 5.0}}, + ImageVectors: [][]float32{{10.0, 20.0, 30.0, 40.0, 50.0}}, + } + return result, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/vectorizer/texts.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/vectorizer/texts.go new file mode 100644 index 0000000000000000000000000000000000000000..9600d9caac59f5f7884587d90f2733431242c192 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/vectorizer/texts.go @@ -0,0 +1,33 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/moduletools" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +func (v *Vectorizer) Texts(ctx context.Context, inputs []string, + cfg moduletools.ClassConfig, +) ([]float32, error) { + res, err := v.client.Vectorize(ctx, inputs, nil, cfg) + if err != nil { + return nil, errors.Wrap(err, "remote client vectorize") + } + if len(inputs) != len(res.TextVectors) { + return nil, errors.New("inputs are not equal to vectors returned") + } + return libvectorizer.CombineVectors(res.TextVectors), nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/vectorizer/vectorizer.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/vectorizer/vectorizer.go new file mode 100644 index 0000000000000000000000000000000000000000..3f155fdedacb77d30883cc2b35e4ce36ac2bccd0 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/vectorizer/vectorizer.go @@ -0,0 +1,151 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/pkg/errors" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/multi2vec-cohere/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents" + objectsvectorizer "github.com/weaviate/weaviate/usecases/modulecomponents/vectorizer" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +type Vectorizer struct { + client Client + objectVectorizer *objectsvectorizer.ObjectVectorizer +} + +func New(client Client) *Vectorizer { + return &Vectorizer{ + client: client, + objectVectorizer: objectsvectorizer.New(), + } +} + +type Client interface { + Vectorize(ctx context.Context, + texts, images []string, cfg moduletools.ClassConfig) (*modulecomponents.VectorizationCLIPResult[[]float32], error) +} + +type ClassSettings interface { + ImageField(property string) bool + ImageFieldsWeights() ([]float32, error) + TextField(property string) bool + TextFieldsWeights() ([]float32, error) +} + +func (v *Vectorizer) Object(ctx context.Context, object *models.Object, + cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + vec, err := v.object(ctx, object, cfg) + return vec, nil, err +} + +func (v *Vectorizer) VectorizeImage(ctx context.Context, id, image string, cfg moduletools.ClassConfig) ([]float32, error) { + res, err := v.client.Vectorize(ctx, nil, []string{image}, cfg) + if err != nil { + return nil, err + } + if len(res.ImageVectors) != 1 { + return nil, errors.New("empty vector") + } + + return res.ImageVectors[0], nil +} + +func (v *Vectorizer) object(ctx context.Context, object *models.Object, + cfg moduletools.ClassConfig, +) ([]float32, error) { + ichek := ent.NewClassSettings(cfg) + + // vectorize image and text + texts := []string{} + images := []string{} + + if object.Properties != nil { + schemamap := object.Properties.(map[string]interface{}) + for _, propName := range moduletools.SortStringKeys(schemamap) { + switch val := schemamap[propName].(type) { + case string: + if ichek.ImageField(propName) { + images = append(images, val) + } + if ichek.TextField(propName) { + texts = append(texts, val) + } + case []string: + if ichek.TextField(propName) { + texts = append(texts, val...) + } + default: // properties that are not part of the object + + } + } + + } + + vectors := [][]float32{} + if len(texts) > 0 || len(images) > 0 { + res, err := v.client.Vectorize(ctx, texts, images, cfg) + if err != nil { + return nil, err + } + vectors = append(vectors, res.TextVectors...) + vectors = append(vectors, res.ImageVectors...) + } + weights, err := v.getWeights(ichek) + if err != nil { + return nil, err + } + + return libvectorizer.CombineVectorsWithWeights(vectors, weights), nil +} + +func (v *Vectorizer) getWeights(ichek ClassSettings) ([]float32, error) { + weights := []float32{} + textFieldsWeights, err := ichek.TextFieldsWeights() + if err != nil { + return nil, err + } + imageFieldsWeights, err := ichek.ImageFieldsWeights() + if err != nil { + return nil, err + } + + weights = append(weights, textFieldsWeights...) + weights = append(weights, imageFieldsWeights...) + + normalizedWeights := v.normalizeWeights(weights) + + return normalizedWeights, nil +} + +func (v *Vectorizer) normalizeWeights(weights []float32) []float32 { + if len(weights) > 0 { + var denominator float32 + for i := range weights { + denominator += weights[i] + } + normalizer := 1 / denominator + normalized := make([]float32, len(weights)) + for i := range weights { + normalized[i] = weights[i] * normalizer + } + return normalized + } + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/vectorizer/vectorizer_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/vectorizer/vectorizer_test.go new file mode 100644 index 0000000000000000000000000000000000000000..304ff48b7de73a4aa12b8b3e5795e6296e5e0115 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-cohere/vectorizer/vectorizer_test.go @@ -0,0 +1,194 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" +) + +const image = "iVBORw0KGgoAAAANSUhEUgAAAGAAAAA/CAYAAAAfQM0aAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyRpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDpCRjQ5NEM3RDI5QTkxMUUyOTc1NENCMzI4N0QwNDNCOSIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDpCRjQ5NEM3RTI5QTkxMUUyOTc1NENCMzI4N0QwNDNCOSI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOkJGNDk0QzdCMjlBOTExRTI5NzU0Q0IzMjg3RDA0M0I5IiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOkJGNDk0QzdDMjlBOTExRTI5NzU0Q0IzMjg3RDA0M0I5Ii8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+WeGRxAAAB2hJREFUeNrUXFtslUUQ3hJCoQVEKy0k1qQgrRg0vaAJaq1tvJSgaLy8mKDF2IvxBY2Bgm8+iIoxvhB72tTUmKgPigbFKCEtxeKD9hZjAi3GJrYJtqRai7TQB+pMz/zwU/5zzsxe2u4kXwiwZ+bb/Xb/s7v/zEmrra1VTFsFeBRQCtgEuBWwkv5vHPAn4DdAB+B7wBjXcUNDQ8o2dXV1SmDzyhUtLS3tBPyxC9CdrN1ihi/swKuA7YD0BG1uJhQDngdcAnwDeJ86Ole2kLii+J2AFsA+wF9RjRalmEUHaZY8m6RDUYZtn6HPHiRfLm2hck0D7AScAdRH8UokwD2AnwA7UoiUyhaRD/S12dHg+8B1OWA/4BTgqVQCPEJL8haLBNDXEfJt03ziipYH+BJwHFAYJcAWwCeAZQ6CLyPfWyz584nrbCuj74eHwgKsddih2R1ba+jHJ65R1k6PuWNhAd4DZM/BTiWbdhwm5hPXsA0AngY8COgNP4JwSTyu4zE/P18VFhZKP7aNYuouXxFX5Ic8Nc2Ea2D/AfYCNgIORZ0DdusOfnFxcXDwUD09PZKP76alKDUR16KiIlVQUHDl7/39/Uozpg7Xac45YB0dGrQHHw07KVwJpRRbYiKuyCc8+MhXcyXocP2RnvMvJhr8QIBK08EPbGJiQuqq0mX7KD4GIohi4xVPTU0N6/BRamPwu7u7dZb3/RozkW3IB3lZEkGHayeI8FFVVdWaZAIUcD2Wl5fbHHy024XtC6QBkomA/XHIFb8X0Xamp6efASHqt27dGnkVkcNxVlFRoXJycmwOvuLGNmifVATsD/bLZezgKgKE2J+bm3sKHk3XXUWs4Mz87Oxs24OvOLEN26cUAfvFXAkrlKGBCDNXEbAajldXV1+5ijjP+KCrg855x+3nk2uy8SwDdIIIM1cRI6k+0NraqkZGRmzuKAIbFrYf0Q2UaPOA/Wpra3PBNfHhYHq6HbC5qanpGB7ETgPWc0TApTr7eyDolOaj6LRG+/W2Bn94eJg7+DpcowZ+AGb+642NjYfC3wEdXAdI1uK2Du2ksH2HrcHHfggGX4frNVcRMPh7BwcHN8ZiseuuIr4DvKXib29YX2bhmW+wEqYptsREXC2eWXS44oyfuYqYmpra19LSEnkaRgEG6Nj8gGRHESVCRkaG9Kg+IOyTiGtmZqatnZsOV/zMLnjcsF7KH5AIECVCX1+f6u3tlbg4oLmc2VyDy8HgPshg2yzmCo8aFsdAALzpw9dw23REwJkvHPwjSu92UcwVRcAnAd4LaQ6+CVe2AGivAe5WwhcdGp0aoVgmJuIqnBy2uSa18Buxs4AXAJMO401SjLOGfnziyhYg2GrtcNSxSfJ90pI/n7iyBUA7quKv/IYsxhmiZ/ZRy/x94soWAO1nwL0qnhVw2cD/ZfKBvjod9cEnrmwB0DBh9RUVfxHxhYrnUHLtEn2mlHyMOe6HT1wT7oISGSas4ntNzJmsVFczjnMBN1CbfwGD1BYPID8A/lFzbz5xZQsQnmWfExa6ecNVIsBKWuIlgA0qnjG2PLhsou0aZgF3qfil2fg89ssbrhwBNtB+GN/dLUnQ5kbCHYAnAFMAvGpsoY7OlS0krmOhxx7WLHwAeBLwVahN2uIUswgrPB5T8rRv7DxWqDwM+JaCjzue8b5wZe2C7gJ8quKVJqY599vJ1yZHffCJK0uA+wAfAtZYjIO+Gsi3TfOJK0sAfFP/jpKV+HBtKfkutOTPJ64sAVYD3qXgrmwpxVht6McnrmwBMAP4pjlYdRij3tCHT1xZAuDdermOA836gDKKqWNirob1ASZc2eeAl3QH36A+AGP+ohFWxNVSfYAuV9YKyKUTo/bgo2nUB5RQbImJuFqsD9DhyhbAuDgjMI36gFKX7S3XB5S6egSV2Bh8zYyDYjr4SGYi2yzmMIm5YnFGkFOLSQGNjY3X/BtaLBabWQF5XKcO6gOkZT950gAW6wPWuXoEZXEaOqoPyHLcPqkIwvqALFcCZHJmvqP6gEzH7VOKIKgPyHQlwIVUjRzWB1xw3H4+ubIFGE3VyGF9wKjj9ik3D4L6gFFXArCSTlEEzKe3LMIfwvYDNgcf+4P9csSVLUAXt7GD+oBuYfsuW4OvUR/Q7UoA/G2zaRvbOqEI0xRbYiKulusDTrgSYEg6sxKJIKwP6FLyjDYRV4v1ATpc2QKgNZtu6zTqA5o1ObM/h5eDyMvCtrlZObLgNhRv+jAHvkwqQjDzhYPfrvRvF0VcLdQHaHGNxWKrZv0d//hahcqr8Ccww1kRbwPuVMIXHRqd+ptimZiIq0F9gA2urEcQ2jkVf/tz0WG8ixTjnKEfn7iyBQi2WnuULLlV0qE9FrdzPnFlC4CGRQkvqyQ/MqRh6KtO2S948IkrWwC0XwHPAQ4r85z7w+TL1U8Y+8Q14S4oyjA9703AZ4AqFX8RvoTpN8i3/Bi/p+egHz5xZQsQGCasvqGuZhzj76DdpuIZx8FPuOAviWDG8e8qXl0yXxnHPnGdsf8FGAByGwC02iMZswAAAABJRU5ErkJggg==" + +func TestVectorizer(t *testing.T) { + t.Run("should vectorize image", func(t *testing.T) { + // given + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder().addSetting("imageFields", []interface{}{"image"}).build() + + props := map[string]interface{}{ + "image": image, + } + object := &models.Object{ + ID: "some-uuid", + Properties: props, + } + + // when + vector, _, err := vectorizer.Object(context.Background(), object, config) + + // then + require.Nil(t, err) + assert.NotNil(t, vector) + }) + + t.Run("should vectorize 2 image fields", func(t *testing.T) { + // given + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder().addSetting("imageFields", []interface{}{"image1", "image2"}).build() + + props := map[string]interface{}{ + "image1": image, + "image2": image, + } + object := &models.Object{ + ID: "some-uuid", + Properties: props, + } + + // when + vector, _, err := vectorizer.Object(context.Background(), object, config) + + // then + require.Nil(t, err) + assert.NotNil(t, vector) + }) +} + +func TestVectorizerWithDiff(t *testing.T) { + type testCase struct { + name string + input *models.Object + } + + props := map[string]interface{}{ + "image": image, + "text": "text", + "description": "non-vectorizable", + } + + tests := []testCase{ + { + name: "noop comp", + input: &models.Object{ + ID: "some-uuid", + Properties: props, + }, + }, + { + name: "one vectorizable prop changed (1)", + input: &models.Object{ + ID: "some-uuid", + Properties: props, + }, + }, + { + name: "one vectorizable prop changed (2)", + input: &models.Object{ + ID: "some-uuid", + Properties: props, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder(). + addSetting("imageFields", []interface{}{"image"}). + addSetting("textFields", []interface{}{"text"}). + build() + + vector, _, err := vectorizer.Object(context.Background(), test.input, config) + + require.Nil(t, err) + assert.Equal(t, []float32{5.5, 11, 16.5, 22, 27.5}, vector) + }) + } +} + +func TestVectorizerWithWeights(t *testing.T) { + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder(). + addSetting("imageFields", []interface{}{"image"}). + addSetting("textFields", []interface{}{"text"}). + addWeights([]interface{}{0.4}, []interface{}{0.6}). + build() + + props := map[string]interface{}{ + "image": image, + "text": "text", + "description": "non-vectorizable", + } + input := &models.Object{ + ID: "some-uuid", + Properties: props, + } + + vector, _, err := vectorizer.Object(context.Background(), input, config) + + require.Nil(t, err) + assert.Equal(t, []float32{3.2, 6.4, 9.6, 12.8, 16}, vector) + // vectors are defined in Vectorize within fakes_for_test.go + // result calculated with above weights as (textVectors[0][i]*0.4+imageVectors[0][i]*0.6) / 2 +} + +func TestVectorizer_normalizeWeights(t *testing.T) { + tests := []struct { + name string + weights []float32 + }{ + { + name: "normalize example 1", + weights: []float32{200, 100, 0.1}, + }, + { + name: "normalize example 2", + weights: []float32{300.22, 0.7, 17, 54}, + }, + { + name: "normalize example 3", + weights: []float32{300, 0.02, 17}, + }, + { + name: "normalize example 4", + weights: []float32{500, 0.02, 17.4, 180}, + }, + { + name: "normalize example 5", + weights: []float32{500, 0.02, 17.4, 2, 4, 5, .88}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + v := &Vectorizer{} + if got := v.normalizeWeights(tt.weights); !checkNormalization(got) { + t.Errorf("Vectorizer.normalizeWeights() = %v, want %v", got, 1.0) + } + }) + } +} + +func checkNormalization(weights []float32) bool { + var result float32 + for i := range weights { + result += weights[i] + } + return result == 1.0 +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-dummy/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-dummy/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..769b0ca16f63cf931c9c5745a9b76acd93671475 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-dummy/clients/meta.go @@ -0,0 +1,20 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +type MetaClient struct{} + +func (v MetaClient) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Dummy module for tests", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-dummy/ent/vectorization_config.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-dummy/ent/vectorization_config.go new file mode 100644 index 0000000000000000000000000000000000000000..23edbc361cd9de39d610b0d46595f01567fadd77 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-dummy/ent/vectorization_config.go @@ -0,0 +1,20 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type VectorizationConfig struct { + Location string + ProjectID string + Model string + Dimensions int64 + VideoIntervalSeconds int64 +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-dummy/ent/vectorization_result.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-dummy/ent/vectorization_result.go new file mode 100644 index 0000000000000000000000000000000000000000..bb05ebd17c90b4f0b3a2f607027bc54ec373f71c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-dummy/ent/vectorization_result.go @@ -0,0 +1,22 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type VectorizationResult struct { + TextVectors [][]float32 + ImageVectors [][]float32 + AudioVectors [][]float32 + VideoVectors [][]float32 + IMUVectors [][]float32 + ThermalVectors [][]float32 + DepthVectors [][]float32 +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/clients/google.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/clients/google.go new file mode 100644 index 0000000000000000000000000000000000000000..ff034ad1d2100548263b29860adc803905cec2f2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/clients/google.go @@ -0,0 +1,286 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents/apikey" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/multi2vec-google/ent" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +func buildURL(location, projectID, model string) string { + return fmt.Sprintf("https://%s-aiplatform.googleapis.com/v1/projects/%s/locations/%s/publishers/google/models/%s:predict", + location, projectID, location, model) +} + +type google struct { + apiKey string + useGoogleAuth bool + googleApiKey *apikey.GoogleApiKey + httpClient *http.Client + urlBuilderFn func(location, projectID, model string) string + logger logrus.FieldLogger +} + +func New(apiKey string, useGoogleAuth bool, timeout time.Duration, logger logrus.FieldLogger) *google { + return &google{ + apiKey: apiKey, + useGoogleAuth: useGoogleAuth, + googleApiKey: apikey.NewGoogleApiKey(), + httpClient: &http.Client{ + Timeout: timeout, + }, + urlBuilderFn: buildURL, + logger: logger, + } +} + +func (v *google) Vectorize(ctx context.Context, + texts, images, videos []string, config ent.VectorizationConfig, +) (*ent.VectorizationResult, error) { + return v.vectorize(ctx, texts, images, videos, config) +} + +func (v *google) VectorizeQuery(ctx context.Context, input []string, + config ent.VectorizationConfig, +) (*ent.VectorizationResult, error) { + return v.vectorize(ctx, input, nil, nil, config) +} + +func (v *google) vectorize(ctx context.Context, + texts, images, videos []string, config ent.VectorizationConfig, +) (*ent.VectorizationResult, error) { + var textEmbeddings [][]float32 + var imageEmbeddings [][]float32 + var videoEmbeddings [][]float32 + endpointURL := v.getURL(config) + maxCount := max(len(texts), len(images), len(videos)) + for i := 0; i < maxCount; i++ { + text := v.safelyGet(texts, i) + image := v.safelyGet(images, i) + video := v.safelyGet(videos, i) + payload := v.getPayload(text, image, video, config) + statusCode, res, err := v.sendRequest(ctx, endpointURL, payload) + if err != nil { + return nil, err + } + textVectors, imageVectors, videoVectors, err := v.getEmbeddingsFromResponse(statusCode, res) + if err != nil { + return nil, err + } + textEmbeddings = append(textEmbeddings, textVectors...) + imageEmbeddings = append(imageEmbeddings, imageVectors...) + videoEmbeddings = append(videoEmbeddings, videoVectors...) + } + + return v.getResponse(textEmbeddings, imageEmbeddings, videoEmbeddings) +} + +func (v *google) safelyGet(input []string, i int) string { + if i < len(input) { + return input[i] + } + return "" +} + +func (v *google) sendRequest(ctx context.Context, + endpointURL string, payload embeddingsRequest, +) (int, embeddingsResponse, error) { + body, err := json.Marshal(payload) + if err != nil { + return 0, embeddingsResponse{}, errors.Wrapf(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", endpointURL, + bytes.NewReader(body)) + if err != nil { + return 0, embeddingsResponse{}, errors.Wrap(err, "create POST request") + } + + apiKey, err := v.getApiKey(ctx) + if err != nil { + return 0, embeddingsResponse{}, errors.Wrapf(err, "Google API Key") + } + req.Header.Add("Content-Type", "application/json; charset=utf-8") + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", apiKey)) + + res, err := v.httpClient.Do(req) + if err != nil { + return 0, embeddingsResponse{}, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return 0, embeddingsResponse{}, errors.Wrap(err, "read response body") + } + + var resBody embeddingsResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return 0, embeddingsResponse{}, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + return res.StatusCode, resBody, nil +} + +func (v *google) getURL(config ent.VectorizationConfig) string { + return v.urlBuilderFn(config.Location, config.ProjectID, config.Model) +} + +func (v *google) getPayload(text, img, vid string, config ent.VectorizationConfig) embeddingsRequest { + inst := instance{} + if text != "" { + inst.Text = &text + } + if img != "" { + inst.Image = &image{BytesBase64Encoded: img} + } + if vid != "" { + inst.Video = &video{ + BytesBase64Encoded: vid, + VideoSegmentConfig: videoSegmentConfig{IntervalSec: &config.VideoIntervalSeconds}, + } + } + req := embeddingsRequest{ + Instances: []instance{inst}, + } + if inst.Video == nil { + req.Parameters = parameters{Dimension: config.Dimensions} + } + return req +} + +func (v *google) checkResponse(statusCode int, googleApiError *googleApiError) error { + if statusCode != 200 || googleApiError != nil { + if googleApiError != nil { + return fmt.Errorf("connection to Google failed with status: %v error: %v", + statusCode, googleApiError.Message) + } + return fmt.Errorf("connection to Google failed with status: %d", statusCode) + } + return nil +} + +func (v *google) getApiKey(ctx context.Context) (string, error) { + return v.googleApiKey.GetApiKey(ctx, v.apiKey, false, v.useGoogleAuth) +} + +func (v *google) getEmbeddingsFromResponse(statusCode int, resBody embeddingsResponse) ( + textEmbeddings [][]float32, + imageEmbeddings [][]float32, + videoEmbeddings [][]float32, + err error, +) { + if respErr := v.checkResponse(statusCode, resBody.Error); respErr != nil { + err = respErr + return + } + + if len(resBody.Predictions) == 0 { + err = errors.Errorf("empty embeddings response") + return + } + + for _, p := range resBody.Predictions { + if len(p.TextEmbedding) > 0 { + textEmbeddings = append(textEmbeddings, p.TextEmbedding) + } + if len(p.ImageEmbedding) > 0 { + imageEmbeddings = append(imageEmbeddings, p.ImageEmbedding) + } + if len(p.VideoEmbeddings) > 0 { + var embeddings [][]float32 + for _, videoEmbedding := range p.VideoEmbeddings { + embeddings = append(embeddings, videoEmbedding.Embedding) + } + embedding := embeddings[0] + if len(embeddings) > 1 { + embedding = libvectorizer.CombineVectors(embeddings) + } + videoEmbeddings = append(videoEmbeddings, embedding) + } + } + return +} + +func (v *google) getResponse(textVectors, imageVectors, videoVectors [][]float32) (*ent.VectorizationResult, error) { + return &ent.VectorizationResult{ + TextVectors: textVectors, + ImageVectors: imageVectors, + VideoVectors: videoVectors, + }, nil +} + +type embeddingsRequest struct { + Instances []instance `json:"instances,omitempty"` + Parameters parameters `json:"parameters,omitempty"` +} + +type parameters struct { + Dimension int64 `json:"dimension,omitempty"` +} + +type instance struct { + Text *string `json:"text,omitempty"` + Image *image `json:"image,omitempty"` + Video *video `json:"video,omitempty"` +} + +type image struct { + BytesBase64Encoded string `json:"bytesBase64Encoded"` +} + +type video struct { + BytesBase64Encoded string `json:"bytesBase64Encoded"` + VideoSegmentConfig videoSegmentConfig `json:"videoSegmentConfig"` +} + +type videoSegmentConfig struct { + StartOffsetSec *int64 `json:"startOffsetSec,omitempty"` + EndOffsetSec *int64 `json:"endOffsetSec,omitempty"` + IntervalSec *int64 `json:"intervalSec,omitempty"` +} + +type embeddingsResponse struct { + Predictions []prediction `json:"predictions,omitempty"` + Error *googleApiError `json:"error,omitempty"` + DeployedModelId string `json:"deployedModelId,omitempty"` +} + +type prediction struct { + TextEmbedding []float32 `json:"textEmbedding,omitempty"` + ImageEmbedding []float32 `json:"imageEmbedding,omitempty"` + VideoEmbeddings []videoEmbedding `json:"videoEmbeddings,omitempty"` +} + +type videoEmbedding struct { + StartOffsetSec *int64 `json:"startOffsetSec,omitempty"` + EndOffsetSec *int64 `json:"endOffsetSec,omitempty"` + Embedding []float32 `json:"embedding,omitempty"` +} + +type googleApiError struct { + Code int `json:"code"` + Message string `json:"message"` + Status string `json:"status"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/clients/google_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/clients/google_test.go new file mode 100644 index 0000000000000000000000000000000000000000..877f4c2039ddc92963c5219bdc5959bc96f76b92 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/clients/google_test.go @@ -0,0 +1,270 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/modules/multi2vec-google/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents/apikey" +) + +func TestClient(t *testing.T) { + t.Run("when all is fine we vectorize text", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &google{ + apiKey: "apiKey", + httpClient: &http.Client{}, + googleApiKey: apikey.NewGoogleApiKey(), + urlBuilderFn: func(location, projectID, model string) string { + assert.Equal(t, "location", location) + assert.Equal(t, "project", projectID) + assert.Equal(t, "model", model) + return server.URL + }, + logger: nullLogger(), + } + expected := &ent.VectorizationResult{ + TextVectors: [][]float32{{0.1, 0.2, 0.3}}, + } + res, err := c.Vectorize(context.Background(), []string{"This is my text"}, nil, nil, + ent.VectorizationConfig{ + Location: "location", + ProjectID: "project", + Model: "model", + }) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when all is fine we vectorize image", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &google{ + apiKey: "apiKey", + httpClient: &http.Client{}, + googleApiKey: apikey.NewGoogleApiKey(), + urlBuilderFn: func(location, projectID, model string) string { + assert.Equal(t, "location", location) + assert.Equal(t, "project", projectID) + assert.Equal(t, "model", model) + return server.URL + }, + logger: nullLogger(), + } + expected := &ent.VectorizationResult{ + ImageVectors: [][]float32{{0.1, 0.2, 0.3}}, + } + res, err := c.Vectorize(context.Background(), nil, []string{"base64 encoded image"}, nil, + ent.VectorizationConfig{ + Location: "location", + ProjectID: "project", + Model: "model", + }) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when the context is expired", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &google{ + apiKey: "apiKey", + httpClient: &http.Client{}, + googleApiKey: apikey.NewGoogleApiKey(), + urlBuilderFn: func(location, projectID, model string) string { + return server.URL + }, + logger: nullLogger(), + } + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, err := c.Vectorize(ctx, []string{"This is my text"}, nil, nil, ent.VectorizationConfig{}) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "context deadline exceeded") + }) + + t.Run("when the server returns an error", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{ + t: t, + serverError: errors.Errorf("nope, not gonna happen"), + }) + defer server.Close() + c := &google{ + apiKey: "apiKey", + httpClient: &http.Client{}, + urlBuilderFn: func(location, projectID, model string) string { + return server.URL + }, + logger: nullLogger(), + } + _, err := c.Vectorize(context.Background(), []string{"This is my text"}, nil, nil, + ent.VectorizationConfig{}) + + require.NotNil(t, err) + assert.EqualError(t, err, "connection to Google failed with status: 500 error: nope, not gonna happen") + }) + + t.Run("when Palm key is passed using X-Palm-Api-Key header", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &google{ + apiKey: "", + httpClient: &http.Client{}, + googleApiKey: apikey.NewGoogleApiKey(), + urlBuilderFn: func(location, projectID, model string) string { + return server.URL + }, + logger: nullLogger(), + } + ctxWithValue := context.WithValue(context.Background(), + "X-Palm-Api-Key", []string{"some-key"}) + + expected := &ent.VectorizationResult{ + TextVectors: [][]float32{{0.1, 0.2, 0.3}}, + } + res, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, nil, nil, ent.VectorizationConfig{}) + + require.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when Palm key is empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &google{ + apiKey: "", + httpClient: &http.Client{}, + googleApiKey: apikey.NewGoogleApiKey(), + urlBuilderFn: func(location, projectID, model string) string { + return server.URL + }, + logger: nullLogger(), + } + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, err := c.Vectorize(ctx, []string{"This is my text"}, nil, nil, ent.VectorizationConfig{}) + + require.NotNil(t, err) + assert.Equal(t, "Google API Key: no api key found "+ + "neither in request header: X-Palm-Api-Key or X-Goog-Api-Key or X-Goog-Vertex-Api-Key or X-Goog-Studio-Api-Key "+ + "nor in environment variable under PALM_APIKEY or GOOGLE_APIKEY", err.Error()) + }) + + t.Run("when X-Palm-Api-Key header is passed but empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &google{ + apiKey: "", + googleApiKey: apikey.NewGoogleApiKey(), + httpClient: &http.Client{}, + urlBuilderFn: buildURL, + logger: nullLogger(), + } + ctxWithValue := context.WithValue(context.Background(), + "X-Palm-Api-Key", []string{""}) + + _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, nil, nil, ent.VectorizationConfig{}) + + require.NotNil(t, err) + assert.Equal(t, "Google API Key: no api key found "+ + "neither in request header: X-Palm-Api-Key or X-Goog-Api-Key or X-Goog-Vertex-Api-Key or X-Goog-Studio-Api-Key "+ + "nor in environment variable under PALM_APIKEY or GOOGLE_APIKEY", err.Error()) + }) +} + +type fakeHandler struct { + t *testing.T + serverError error +} + +func (f *fakeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.serverError != nil { + embeddingResponse := &embeddingsResponse{ + Error: &googleApiError{ + Code: http.StatusInternalServerError, + Status: "error", + Message: f.serverError.Error(), + }, + } + + outBytes, err := json.Marshal(embeddingResponse) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var req embeddingsRequest + require.Nil(f.t, json.Unmarshal(bodyBytes, &req)) + + require.NotNil(f.t, req) + require.Len(f.t, req.Instances, 1) + + textInput := req.Instances[0].Text + if textInput != nil { + assert.NotEmpty(f.t, *textInput) + } + imageInput := req.Instances[0].Image + if imageInput != nil { + assert.NotEmpty(f.t, *imageInput) + } + + embedding := []float32{0.1, 0.2, 0.3} + + var resp embeddingsResponse + + if textInput != nil { + resp = embeddingsResponse{ + Predictions: []prediction{{TextEmbedding: embedding}}, + } + } + if imageInput != nil { + resp = embeddingsResponse{ + Predictions: []prediction{{ImageEmbedding: embedding}}, + } + } + + outBytes, err := json.Marshal(resp) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..d6a110977e8b62a6d7a2181d6274aa3173444e4b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/clients/meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *google) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Google Multimodal Module", + "documentationHref": "https://cloud.google.com/vertex-ai/generative-ai/docs/embeddings/get-multimodal-embeddings", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/config.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/config.go new file mode 100644 index 0000000000000000000000000000000000000000..2831197703f8d0947a1ad582549ed46808063d91 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/config.go @@ -0,0 +1,41 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modclip + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/multi2vec-google/vectorizer" +) + +func (m *Module) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *Module) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *Module) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + icheck := vectorizer.NewClassSettings(cfg) + return icheck.Validate() +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/ent/vectorization_config.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/ent/vectorization_config.go new file mode 100644 index 0000000000000000000000000000000000000000..23edbc361cd9de39d610b0d46595f01567fadd77 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/ent/vectorization_config.go @@ -0,0 +1,20 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type VectorizationConfig struct { + Location string + ProjectID string + Model string + Dimensions int64 + VideoIntervalSeconds int64 +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/ent/vectorization_result.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/ent/vectorization_result.go new file mode 100644 index 0000000000000000000000000000000000000000..bb05ebd17c90b4f0b3a2f607027bc54ec373f71c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/ent/vectorization_result.go @@ -0,0 +1,22 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type VectorizationResult struct { + TextVectors [][]float32 + ImageVectors [][]float32 + AudioVectors [][]float32 + VideoVectors [][]float32 + IMUVectors [][]float32 + ThermalVectors [][]float32 + DepthVectors [][]float32 +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/module.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/module.go new file mode 100644 index 0000000000000000000000000000000000000000..b1959563a3ecf6af3945d7cc697e9a1bfae0d0a6 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/module.go @@ -0,0 +1,175 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modclip + +import ( + "context" + "os" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + entcfg "github.com/weaviate/weaviate/entities/config" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/multi2vec-google/clients" + "github.com/weaviate/weaviate/modules/multi2vec-google/vectorizer" +) + +const ( + Name = "multi2vec-google" + LegacyName = "multi2vec-palm" +) + +func New() *Module { + return &Module{} +} + +type Module struct { + imageVectorizer imageVectorizer + nearImageGraphqlProvider modulecapabilities.GraphQLArguments + nearImageSearcher modulecapabilities.Searcher[[]float32] + textVectorizer textVectorizer + nearTextGraphqlProvider modulecapabilities.GraphQLArguments + nearTextSearcher modulecapabilities.Searcher[[]float32] + nearVideoGraphqlProvider modulecapabilities.GraphQLArguments + videoVectorizer videoVectorizer + nearVideoSearcher modulecapabilities.Searcher[[]float32] + nearTextTransformer modulecapabilities.TextTransform + metaClient metaClient + logger logrus.FieldLogger +} + +type metaClient interface { + MetaInfo() (map[string]interface{}, error) +} + +type imageVectorizer interface { + Object(ctx context.Context, obj *models.Object, + cfg moduletools.ClassConfig) ([]float32, models.AdditionalProperties, error) + VectorizeImage(ctx context.Context, id, image string, cfg moduletools.ClassConfig) ([]float32, error) +} + +type textVectorizer interface { + Texts(ctx context.Context, input []string, + cfg moduletools.ClassConfig) ([]float32, error) +} + +type videoVectorizer interface { + VectorizeVideo(ctx context.Context, + video string, cfg moduletools.ClassConfig) ([]float32, error) +} + +func (m *Module) Name() string { + return Name +} + +func (m *Module) AltNames() []string { + return []string{LegacyName} +} + +func (m *Module) Type() modulecapabilities.ModuleType { + return modulecapabilities.Multi2Vec +} + +func (m *Module) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initNearImage(); err != nil { + return errors.Wrap(err, "init near image") + } + + if err := m.initNearVideo(); err != nil { + return errors.Wrap(err, "init near video") + } + + return nil +} + +func (m *Module) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init near text") + } + + return nil +} + +func (m *Module) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("GOOGLE_APIKEY") + if apiKey == "" { + apiKey = os.Getenv("PALM_APIKEY") + } + useGoogleAuth := entcfg.Enabled(os.Getenv("USE_GOOGLE_AUTH")) + client := clients.New(apiKey, useGoogleAuth, timeout, logger) + + m.imageVectorizer = vectorizer.New(client) + m.textVectorizer = vectorizer.New(client) + m.videoVectorizer = vectorizer.New(client) + m.metaClient = client + + return nil +} + +func (m *Module) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + return m.imageVectorizer.Object(ctx, obj, cfg) +} + +func (m *Module) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + return batch.VectorizeBatch(ctx, objs, skipObject, cfg, m.logger, m.imageVectorizer.Object) +} + +func (m *Module) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + ichek := vectorizer.NewClassSettings(cfg) + mediaProps, err := ichek.Properties() + return false, mediaProps, err +} + +func (m *Module) MetaInfo() (map[string]interface{}, error) { + return m.metaClient.MetaInfo() +} + +func (m *Module) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + return m.textVectorizer.Texts(ctx, []string{input}, cfg) +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.InputVectorizer[[]float32](New()) + _ = modulecapabilities.ModuleHasAltNames(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/nearArguments.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/nearArguments.go new file mode 100644 index 0000000000000000000000000000000000000000..a91921d16dfd3f913909a468b9579481aa5b5f89 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/nearArguments.go @@ -0,0 +1,70 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modclip + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearImage" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearVideo" +) + +func (m *Module) initNearImage() error { + m.nearImageSearcher = nearImage.NewSearcher(m.imageVectorizer) + m.nearImageGraphqlProvider = nearImage.New() + return nil +} + +func (m *Module) initNearText() error { + m.nearTextSearcher = nearText.NewSearcher(m.textVectorizer) + m.nearTextGraphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *Module) initNearVideo() error { + m.nearVideoSearcher = nearVideo.NewSearcher(m.videoVectorizer) + m.nearVideoGraphqlProvider = nearVideo.New() + return nil +} + +func (m *Module) Arguments() map[string]modulecapabilities.GraphQLArgument { + arguments := map[string]modulecapabilities.GraphQLArgument{} + for name, arg := range m.nearImageGraphqlProvider.Arguments() { + arguments[name] = arg + } + for name, arg := range m.nearTextGraphqlProvider.Arguments() { + arguments[name] = arg + } + for name, arg := range m.nearVideoGraphqlProvider.Arguments() { + arguments[name] = arg + } + return arguments +} + +func (m *Module) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + vectorSearches := map[string]modulecapabilities.VectorForParams[[]float32]{} + for name, arg := range m.nearImageSearcher.VectorSearches() { + vectorSearches[name] = arg + } + for name, arg := range m.nearTextSearcher.VectorSearches() { + vectorSearches[name] = arg + } + for name, arg := range m.nearVideoSearcher.VectorSearches() { + vectorSearches[name] = arg + } + return vectorSearches +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/vectorizer/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/vectorizer/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..508d7606303168739ce23794601dfa4fc6d50a18 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/vectorizer/class_settings.go @@ -0,0 +1,340 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "fmt" + "slices" + "strings" + + "github.com/pkg/errors" + + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + locationProperty = "location" + projectIDProperty = "projectId" + modelIDProperty = "modelId" + dimensionsProperty = "dimensions" + videoIntervalSecondsProperty = "videoIntervalSeconds" +) + +const ( + DefaultVectorizeClassName = false + DefaultPropertyIndexed = true + DefaultVectorizePropertyName = false + DefaultApiEndpoint = "us-central1-aiplatform.googleapis.com" + DefaultModelID = "multimodalembedding@001" +) + +var ( + defaultDimensions1408 = int64(1408) + availableDimensions = []int64{128, 256, 512, defaultDimensions1408} + defaultVideoIntervalSeconds = int64(120) + availableVideoIntervalSeconds = []int64{4, 8, 15, defaultVideoIntervalSeconds} +) + +type classSettings struct { + base *basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{ + cfg: cfg, + base: basesettings.NewBaseClassSettingsWithAltNames(cfg, false, "multi2vec-google", []string{"multi2vec-palm"}, nil), + } +} + +// Google params +func (ic *classSettings) Location() string { + return ic.getStringProperty(locationProperty, "") +} + +func (ic *classSettings) ProjectID() string { + return ic.getStringProperty(projectIDProperty, "") +} + +func (ic *classSettings) ModelID() string { + return ic.getStringProperty(modelIDProperty, DefaultModelID) +} + +func (ic *classSettings) Dimensions() int64 { + return ic.getInt64Property(dimensionsProperty, defaultDimensions1408) +} + +func (ic *classSettings) VideoIntervalSeconds() int64 { + return ic.getInt64Property(videoIntervalSecondsProperty, defaultVideoIntervalSeconds) +} + +// CLIP module specific settings +func (ic *classSettings) ImageField(property string) bool { + return ic.field("imageFields", property) +} + +func (ic *classSettings) ImageFieldsWeights() ([]float32, error) { + return ic.getFieldsWeights("image") +} + +func (ic *classSettings) TextField(property string) bool { + return ic.field("textFields", property) +} + +func (ic *classSettings) TextFieldsWeights() ([]float32, error) { + return ic.getFieldsWeights("text") +} + +func (ic *classSettings) VideoField(property string) bool { + return ic.field("videoFields", property) +} + +func (ic *classSettings) VideoFieldsWeights() ([]float32, error) { + return ic.getFieldsWeights("video") +} + +func (ic *classSettings) Properties() ([]string, error) { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return nil, errors.New("empty config") + } + props := make([]string, 0) + + fields := []string{"textFields", "imageFields", "videoFields"} + + for _, field := range fields { + fields, ok := ic.base.GetSettings()[field] + if !ok { + continue + } + + fieldsArray, ok := fields.([]interface{}) + if !ok { + return nil, errors.Errorf("%s must be an array", field) + } + + for _, value := range fieldsArray { + v, ok := value.(string) + if !ok { + return nil, errors.Errorf("%s must be a string", field) + } + props = append(props, v) + } + } + return props, nil +} + +func (ic *classSettings) field(name, property string) bool { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return false + } + + fields, ok := ic.base.GetSettings()[name] + if !ok { + return false + } + + fieldsArray, ok := fields.([]interface{}) + if !ok { + return false + } + + fieldNames := make([]string, len(fieldsArray)) + for i, value := range fieldsArray { + fieldNames[i] = value.(string) + } + + for i := range fieldNames { + if fieldNames[i] == property { + return true + } + } + + return false +} + +func (ic *classSettings) getStringProperty(name, defaultValue string) string { + return ic.base.GetPropertyAsString(name, defaultValue) +} + +func (ic *classSettings) getInt64Property(name string, defaultValue int64) int64 { + if val := ic.base.GetPropertyAsInt64(name, &defaultValue); val != nil { + return *val + } + return defaultValue +} + +func (ic *classSettings) Validate() error { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + + var errorMessages []string + + model := ic.ModelID() + location := ic.Location() + if location == "" { + errorMessages = append(errorMessages, "location setting needs to be present") + } + + projectID := ic.ProjectID() + if projectID == "" { + errorMessages = append(errorMessages, "projectId setting needs to be present") + } + + dimensions := ic.Dimensions() + if !validateSetting(dimensions, availableDimensions) { + return errors.Errorf("wrong dimensions setting for %s model, available dimensions are: %v", model, availableDimensions) + } + + videoIntervalSeconds := ic.VideoIntervalSeconds() + if !validateSetting(videoIntervalSeconds, availableVideoIntervalSeconds) { + return errors.Errorf("wrong videoIntervalSeconds setting for %s model, available videoIntervalSeconds are: %v", model, availableVideoIntervalSeconds) + } + + imageFields, imageFieldsOk := ic.cfg.Class()["imageFields"] + textFields, textFieldsOk := ic.cfg.Class()["textFields"] + videoFields, videoFieldsOk := ic.cfg.Class()["videoFields"] + if !imageFieldsOk && !textFieldsOk && !videoFieldsOk { + errorMessages = append(errorMessages, "textFields or imageFields or videoFields setting needs to be present") + } + + if videoFieldsOk && dimensions != defaultDimensions1408 { + errorMessages = append(errorMessages, fmt.Sprintf("videoFields support only %d dimensions setting", defaultDimensions1408)) + } + + if imageFieldsOk { + imageFieldsCount, err := ic.validateFields("image", imageFields) + if err != nil { + errorMessages = append(errorMessages, err.Error()) + } + err = ic.validateWeights("image", imageFieldsCount) + if err != nil { + errorMessages = append(errorMessages, err.Error()) + } + } + + if textFieldsOk { + textFieldsCount, err := ic.validateFields("text", textFields) + if err != nil { + errorMessages = append(errorMessages, err.Error()) + } + err = ic.validateWeights("text", textFieldsCount) + if err != nil { + errorMessages = append(errorMessages, err.Error()) + } + } + + if videoFieldsOk { + videoFieldsCount, err := ic.validateFields("video", videoFields) + if err != nil { + errorMessages = append(errorMessages, err.Error()) + } + err = ic.validateWeights("video", videoFieldsCount) + if err != nil { + errorMessages = append(errorMessages, err.Error()) + } + } + + if len(errorMessages) > 0 { + return fmt.Errorf("%s", strings.Join(errorMessages, ", ")) + } + + return nil +} + +func (ic *classSettings) validateFields(name string, fields interface{}) (int, error) { + fieldsArray, ok := fields.([]interface{}) + if !ok { + return 0, errors.Errorf("%sFields must be an array", name) + } + + if len(fieldsArray) == 0 { + return 0, errors.Errorf("must contain at least one %s field name in %sFields", name, name) + } + + for _, value := range fieldsArray { + v, ok := value.(string) + if !ok { + return 0, errors.Errorf("%sField must be a string", name) + } + if len(v) == 0 { + return 0, errors.Errorf("%sField values cannot be empty", name) + } + } + + return len(fieldsArray), nil +} + +func (ic *classSettings) validateWeights(name string, count int) error { + weights, ok := ic.getWeights(name) + if ok { + if len(weights) != count { + return errors.Errorf("weights.%sFields does not equal number of %sFields", name, name) + } + _, err := ic.getWeightsArray(weights) + if err != nil { + return err + } + } + + return nil +} + +func (ic *classSettings) getWeights(name string) ([]interface{}, bool) { + weights, ok := ic.base.GetSettings()["weights"] + if ok { + weightsObject, ok := weights.(map[string]interface{}) + if ok { + fieldWeights, ok := weightsObject[fmt.Sprintf("%sFields", name)] + if ok { + fieldWeightsArray, ok := fieldWeights.([]interface{}) + if ok { + return fieldWeightsArray, ok + } + } + } + } + + return nil, false +} + +func (ic *classSettings) getWeightsArray(weights []interface{}) ([]float32, error) { + weightsArray := make([]float32, len(weights)) + for i := range weights { + weight, err := ic.getNumber(weights[i]) + if err != nil { + return nil, err + } + weightsArray[i] = weight + } + return weightsArray, nil +} + +func (ic *classSettings) getFieldsWeights(name string) ([]float32, error) { + weights, ok := ic.getWeights(name) + if ok { + return ic.getWeightsArray(weights) + } + return nil, nil +} + +func (ic *classSettings) getNumber(in interface{}) (float32, error) { + return ic.base.GetNumber(in) +} + +func validateSetting[T string | int64](value T, availableValues []T) bool { + return slices.Contains(availableValues, value) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/vectorizer/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/vectorizer/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..87e0066654ed19b4abf543e85e8d431a9cfdbb7f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/vectorizer/class_settings_test.go @@ -0,0 +1,250 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "encoding/json" + "testing" + + "github.com/weaviate/weaviate/entities/moduletools" +) + +func Test_classSettings_Validate(t *testing.T) { + type fields struct { + cfg moduletools.ClassConfig + } + tests := []struct { + name string + fields fields + wantErr bool + }{ + { + name: "should not pass with empty config", + wantErr: true, + }, + { + name: "should not pass with nil config", + fields: fields{ + cfg: nil, + }, + wantErr: true, + }, + { + name: "should not pass with nil imageFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("location", "location"). + addSetting("projectId", "projectId").addSetting("imageFields", nil).build(), + }, + wantErr: true, + }, + { + name: "should not pass with fault imageFields value", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("location", "location"). + addSetting("projectId", "projectId").addSetting("imageFields", []string{}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with empty imageFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("location", "location"). + addSetting("projectId", "projectId").addSetting("imageFields", []interface{}{}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with empty string in imageFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("location", "location"). + addSetting("projectId", "projectId").addSetting("imageFields", []interface{}{""}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with int value in imageFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("location", "location"). + addSetting("projectId", "projectId").addSetting("imageFields", []interface{}{1.0}).build(), + }, + wantErr: true, + }, + { + name: "should pass with proper value in imageFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("location", "location"). + addSetting("projectId", "projectId").addSetting("imageFields", []interface{}{"field"}).build(), + }, + }, + { + name: "should pass with proper value in imageFields and textFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("location", "location"). + addSetting("projectId", "projectId"). + addSetting("imageFields", []interface{}{"imageField"}). + addSetting("textFields", []interface{}{"textField"}). + build(), + }, + }, + { + name: "should pass with proper value in 2 imageFields and 2 textFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("location", "location"). + addSetting("projectId", "projectId"). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1", "imageField2"}). + build(), + }, + }, + { + name: "should pass with proper value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("location", "location"). + addSetting("projectId", "projectId"). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1", "imageField2"}). + addWeights([]interface{}{1, 2}, []interface{}{1, 2}). + build(), + }, + }, + { + name: "should pass with proper value in 1 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("location", "location"). + addSetting("projectId", "projectId"). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, 2}, []interface{}{1}). + build(), + }, + }, + { + name: "should pass with proper value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("location", "location"). + addSetting("projectId", "projectId"). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, 2}, []interface{}{1}). + build(), + }, + }, + { + name: "should not pass with proper value in 1 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("location", "location"). + addSetting("projectId", "projectId"). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1}, []interface{}{1}). + build(), + }, + wantErr: true, + }, + { + name: "should not pass with not proper weight value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("location", "location"). + addSetting("projectId", "projectId"). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, "aaaa"}, []interface{}{1}). + build(), + }, + wantErr: true, + }, + { + name: "should pass with not proper weight value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("location", "location"). + addSetting("projectId", "projectId"). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{json.Number("1"), json.Number("2")}, []interface{}{json.Number("3")}). + build(), + }, + }, + { + name: "should not pass with wrong dimensions setting in videoFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("location", "location"). + addSetting("projectId", "projectId"). + addSetting("videoFields", []interface{}{"video1"}). + addSetting("dimensions", 256). + build(), + }, + wantErr: true, + }, + { + name: "should not pass with wrong dimensions setting in videoFields together with image fields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("location", "location"). + addSetting("projectId", "projectId"). + addSetting("videoFields", []interface{}{"video1"}). + addSetting("imageFields", []interface{}{"image1"}). + addSetting("dimensions", 512). + build(), + }, + wantErr: true, + }, + { + name: "should pass with proper dimensions setting in videoFields together with image fields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("location", "location"). + addSetting("projectId", "projectId"). + addSetting("videoFields", []interface{}{"video1"}). + addSetting("imageFields", []interface{}{"image1"}). + addSetting("dimensions", defaultDimensions1408). + build(), + }, + wantErr: false, + }, + { + name: "should not pass with wrong videoIntervalSeconds setting in videoFields together with image fields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("location", "location"). + addSetting("projectId", "projectId"). + addSetting("videoFields", []interface{}{"video1"}). + addSetting("imageFields", []interface{}{"image1"}). + addSetting("videoIntervalSeconds", 7). + build(), + }, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.fields.cfg) + if err := ic.Validate(); (err != nil) != tt.wantErr { + t.Errorf("classSettings.Validate() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/vectorizer/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/vectorizer/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..604e359a84b0343ce8d14a59073941b17f832f12 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/vectorizer/fakes_for_test.go @@ -0,0 +1,97 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/multi2vec-google/ent" + "github.com/weaviate/weaviate/usecases/config" +) + +type builder struct { + fakeClassConfig *fakeClassConfig +} + +func newConfigBuilder() *builder { + return &builder{ + fakeClassConfig: &fakeClassConfig{config: map[string]interface{}{}}, + } +} + +func (b *builder) addSetting(name string, value interface{}) *builder { + b.fakeClassConfig.config[name] = value + return b +} + +func (b *builder) addWeights(textWeights, imageWeights []interface{}) *builder { + if textWeights != nil || imageWeights != nil { + weightSettings := map[string]interface{}{} + if textWeights != nil { + weightSettings["textFields"] = textWeights + } + if imageWeights != nil { + weightSettings["imageFields"] = imageWeights + } + b.fakeClassConfig.config["weights"] = weightSettings + } + return b +} + +func (b *builder) build() *fakeClassConfig { + return b.fakeClassConfig +} + +type fakeClassConfig struct { + config map[string]interface{} +} + +func (c fakeClassConfig) Class() map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) Property(propName string) map[string]interface{} { + return c.config +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} + +type fakeClient struct{} + +func (c *fakeClient) Vectorize(ctx context.Context, + texts, images, videos []string, config ent.VectorizationConfig, +) (*ent.VectorizationResult, error) { + result := &ent.VectorizationResult{ + TextVectors: [][]float32{{1.0, 2.0, 3.0, 4.0, 5.0}}, + ImageVectors: [][]float32{{10.0, 20.0, 30.0, 40.0, 50.0}}, + } + return result, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/vectorizer/texts.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/vectorizer/texts.go new file mode 100644 index 0000000000000000000000000000000000000000..7421a52eb8d4a349e042aa6168a689492c05440f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/vectorizer/texts.go @@ -0,0 +1,33 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/moduletools" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +func (v *Vectorizer) Texts(ctx context.Context, inputs []string, + cfg moduletools.ClassConfig, +) ([]float32, error) { + res, err := v.client.Vectorize(ctx, inputs, nil, nil, v.getVectorizationConfig(cfg)) + if err != nil { + return nil, errors.Wrap(err, "remote client vectorize") + } + if len(inputs) != len(res.TextVectors) { + return nil, errors.New("inputs are not equal to vectors returned") + } + return libvectorizer.CombineVectors(res.TextVectors), nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/vectorizer/vectorizer.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/vectorizer/vectorizer.go new file mode 100644 index 0000000000000000000000000000000000000000..8898dee29f143213027caf53e2d50bd24fb95f30 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/vectorizer/vectorizer.go @@ -0,0 +1,188 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/pkg/errors" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/multi2vec-google/ent" + objectsvectorizer "github.com/weaviate/weaviate/usecases/modulecomponents/vectorizer" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +type Vectorizer struct { + client Client + objectVectorizer *objectsvectorizer.ObjectVectorizer +} + +func New(client Client) *Vectorizer { + return &Vectorizer{ + client: client, + objectVectorizer: objectsvectorizer.New(), + } +} + +type Client interface { + Vectorize(ctx context.Context, + texts, images, videos []string, config ent.VectorizationConfig) (*ent.VectorizationResult, error) +} + +type ClassSettings interface { + ImageField(property string) bool + ImageFieldsWeights() ([]float32, error) + TextField(property string) bool + TextFieldsWeights() ([]float32, error) + VideoField(property string) bool + VideoFieldsWeights() ([]float32, error) +} + +func (v *Vectorizer) Object(ctx context.Context, object *models.Object, + cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + vec, err := v.object(ctx, object, cfg) + return vec, nil, err +} + +func (v *Vectorizer) VectorizeImage(ctx context.Context, id, image string, cfg moduletools.ClassConfig) ([]float32, error) { + res, err := v.client.Vectorize(ctx, nil, []string{image}, nil, v.getVectorizationConfig(cfg)) + if err != nil { + return nil, err + } + if len(res.ImageVectors) != 1 { + return nil, errors.New("empty vector") + } + + return res.ImageVectors[0], nil +} + +func (v *Vectorizer) VectorizeVideo(ctx context.Context, + video string, cfg moduletools.ClassConfig, +) ([]float32, error) { + res, err := v.client.Vectorize(ctx, nil, nil, []string{video}, v.getVectorizationConfig(cfg)) + if err != nil { + return nil, err + } + if len(res.VideoVectors) != 1 { + return nil, errors.New("empty vector") + } + + return res.VideoVectors[0], nil +} + +func (v *Vectorizer) object(ctx context.Context, object *models.Object, + cfg moduletools.ClassConfig, +) ([]float32, error) { + ichek := NewClassSettings(cfg) + + // vectorize image and text + texts := []string{} + images := []string{} + videos := []string{} + + if object.Properties != nil { + schemamap := object.Properties.(map[string]interface{}) + for _, propName := range moduletools.SortStringKeys(schemamap) { + switch val := schemamap[propName].(type) { + case string: + if ichek.ImageField(propName) { + images = append(images, val) + } + if ichek.TextField(propName) { + texts = append(texts, val) + } + if ichek.VideoField(propName) { + videos = append(videos, val) + } + + case []string: + if ichek.TextField(propName) { + texts = append(texts, val...) + } + default: // properties that are not part of the object + + } + } + + } + + vectors := [][]float32{} + if len(texts) > 0 || len(images) > 0 || len(videos) > 0 { + res, err := v.client.Vectorize(ctx, texts, images, videos, v.getVectorizationConfig(cfg)) + if err != nil { + return nil, err + } + vectors = append(vectors, res.TextVectors...) + vectors = append(vectors, res.ImageVectors...) + vectors = append(vectors, res.VideoVectors...) + } + weights, err := v.getWeights(ichek) + if err != nil { + return nil, err + } + + return libvectorizer.CombineVectorsWithWeights(vectors, weights), nil +} + +func (v *Vectorizer) getWeights(ichek ClassSettings) ([]float32, error) { + weights := []float32{} + textFieldsWeights, err := ichek.TextFieldsWeights() + if err != nil { + return nil, err + } + imageFieldsWeights, err := ichek.ImageFieldsWeights() + if err != nil { + return nil, err + } + videoFieldsWeights, err := ichek.VideoFieldsWeights() + if err != nil { + return nil, err + } + + weights = append(weights, textFieldsWeights...) + weights = append(weights, imageFieldsWeights...) + weights = append(weights, videoFieldsWeights...) + + normalizedWeights := v.normalizeWeights(weights) + + return normalizedWeights, nil +} + +func (v *Vectorizer) normalizeWeights(weights []float32) []float32 { + if len(weights) > 0 { + var denominator float32 + for i := range weights { + denominator += weights[i] + } + normalizer := 1 / denominator + normalized := make([]float32, len(weights)) + for i := range weights { + normalized[i] = weights[i] * normalizer + } + return normalized + } + return nil +} + +func (v *Vectorizer) getVectorizationConfig(cfg moduletools.ClassConfig) ent.VectorizationConfig { + settings := NewClassSettings(cfg) + return ent.VectorizationConfig{ + Location: settings.Location(), + ProjectID: settings.ProjectID(), + Model: settings.ModelID(), + Dimensions: settings.Dimensions(), + VideoIntervalSeconds: settings.VideoIntervalSeconds(), + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/vectorizer/vectorizer_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/vectorizer/vectorizer_test.go new file mode 100644 index 0000000000000000000000000000000000000000..304ff48b7de73a4aa12b8b3e5795e6296e5e0115 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-google/vectorizer/vectorizer_test.go @@ -0,0 +1,194 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" +) + +const image = "iVBORw0KGgoAAAANSUhEUgAAAGAAAAA/CAYAAAAfQM0aAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyRpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDpCRjQ5NEM3RDI5QTkxMUUyOTc1NENCMzI4N0QwNDNCOSIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDpCRjQ5NEM3RTI5QTkxMUUyOTc1NENCMzI4N0QwNDNCOSI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOkJGNDk0QzdCMjlBOTExRTI5NzU0Q0IzMjg3RDA0M0I5IiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOkJGNDk0QzdDMjlBOTExRTI5NzU0Q0IzMjg3RDA0M0I5Ii8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+WeGRxAAAB2hJREFUeNrUXFtslUUQ3hJCoQVEKy0k1qQgrRg0vaAJaq1tvJSgaLy8mKDF2IvxBY2Bgm8+iIoxvhB72tTUmKgPigbFKCEtxeKD9hZjAi3GJrYJtqRai7TQB+pMz/zwU/5zzsxe2u4kXwiwZ+bb/Xb/s7v/zEmrra1VTFsFeBRQCtgEuBWwkv5vHPAn4DdAB+B7wBjXcUNDQ8o2dXV1SmDzyhUtLS3tBPyxC9CdrN1ihi/swKuA7YD0BG1uJhQDngdcAnwDeJ86Ole2kLii+J2AFsA+wF9RjRalmEUHaZY8m6RDUYZtn6HPHiRfLm2hck0D7AScAdRH8UokwD2AnwA7UoiUyhaRD/S12dHg+8B1OWA/4BTgqVQCPEJL8haLBNDXEfJt03ziipYH+BJwHFAYJcAWwCeAZQ6CLyPfWyz584nrbCuj74eHwgKsddih2R1ba+jHJ65R1k6PuWNhAd4DZM/BTiWbdhwm5hPXsA0AngY8COgNP4JwSTyu4zE/P18VFhZKP7aNYuouXxFX5Ic8Nc2Ea2D/AfYCNgIORZ0DdusOfnFxcXDwUD09PZKP76alKDUR16KiIlVQUHDl7/39/Uozpg7Xac45YB0dGrQHHw07KVwJpRRbYiKuyCc8+MhXcyXocP2RnvMvJhr8QIBK08EPbGJiQuqq0mX7KD4GIohi4xVPTU0N6/BRamPwu7u7dZb3/RozkW3IB3lZEkGHayeI8FFVVdWaZAIUcD2Wl5fbHHy024XtC6QBkomA/XHIFb8X0Xamp6efASHqt27dGnkVkcNxVlFRoXJycmwOvuLGNmifVATsD/bLZezgKgKE2J+bm3sKHk3XXUWs4Mz87Oxs24OvOLEN26cUAfvFXAkrlKGBCDNXEbAajldXV1+5ijjP+KCrg855x+3nk2uy8SwDdIIIM1cRI6k+0NraqkZGRmzuKAIbFrYf0Q2UaPOA/Wpra3PBNfHhYHq6HbC5qanpGB7ETgPWc0TApTr7eyDolOaj6LRG+/W2Bn94eJg7+DpcowZ+AGb+642NjYfC3wEdXAdI1uK2Du2ksH2HrcHHfggGX4frNVcRMPh7BwcHN8ZiseuuIr4DvKXib29YX2bhmW+wEqYptsREXC2eWXS44oyfuYqYmpra19LSEnkaRgEG6Nj8gGRHESVCRkaG9Kg+IOyTiGtmZqatnZsOV/zMLnjcsF7KH5AIECVCX1+f6u3tlbg4oLmc2VyDy8HgPshg2yzmCo8aFsdAALzpw9dw23REwJkvHPwjSu92UcwVRcAnAd4LaQ6+CVe2AGivAe5WwhcdGp0aoVgmJuIqnBy2uSa18Buxs4AXAJMO401SjLOGfnziyhYg2GrtcNSxSfJ90pI/n7iyBUA7quKv/IYsxhmiZ/ZRy/x94soWAO1nwL0qnhVw2cD/ZfKBvjod9cEnrmwB0DBh9RUVfxHxhYrnUHLtEn2mlHyMOe6HT1wT7oISGSas4ntNzJmsVFczjnMBN1CbfwGD1BYPID8A/lFzbz5xZQsQnmWfExa6ecNVIsBKWuIlgA0qnjG2PLhsou0aZgF3qfil2fg89ssbrhwBNtB+GN/dLUnQ5kbCHYAnAFMAvGpsoY7OlS0krmOhxx7WLHwAeBLwVahN2uIUswgrPB5T8rRv7DxWqDwM+JaCjzue8b5wZe2C7gJ8quKVJqY599vJ1yZHffCJK0uA+wAfAtZYjIO+Gsi3TfOJK0sAfFP/jpKV+HBtKfkutOTPJ64sAVYD3qXgrmwpxVht6McnrmwBMAP4pjlYdRij3tCHT1xZAuDdermOA836gDKKqWNirob1ASZc2eeAl3QH36A+AGP+ohFWxNVSfYAuV9YKyKUTo/bgo2nUB5RQbImJuFqsD9DhyhbAuDgjMI36gFKX7S3XB5S6egSV2Bh8zYyDYjr4SGYi2yzmMIm5YnFGkFOLSQGNjY3X/BtaLBabWQF5XKcO6gOkZT950gAW6wPWuXoEZXEaOqoPyHLcPqkIwvqALFcCZHJmvqP6gEzH7VOKIKgPyHQlwIVUjRzWB1xw3H4+ubIFGE3VyGF9wKjj9ik3D4L6gFFXArCSTlEEzKe3LMIfwvYDNgcf+4P9csSVLUAXt7GD+oBuYfsuW4OvUR/Q7UoA/G2zaRvbOqEI0xRbYiKulusDTrgSYEg6sxKJIKwP6FLyjDYRV4v1ATpc2QKgNZtu6zTqA5o1ObM/h5eDyMvCtrlZObLgNhRv+jAHvkwqQjDzhYPfrvRvF0VcLdQHaHGNxWKrZv0d//hahcqr8Ccww1kRbwPuVMIXHRqd+ptimZiIq0F9gA2urEcQ2jkVf/tz0WG8ixTjnKEfn7iyBQi2WnuULLlV0qE9FrdzPnFlC4CGRQkvqyQ/MqRh6KtO2S948IkrWwC0XwHPAQ4r85z7w+TL1U8Y+8Q14S4oyjA9703AZ4AqFX8RvoTpN8i3/Bi/p+egHz5xZQsQGCasvqGuZhzj76DdpuIZx8FPuOAviWDG8e8qXl0yXxnHPnGdsf8FGAByGwC02iMZswAAAABJRU5ErkJggg==" + +func TestVectorizer(t *testing.T) { + t.Run("should vectorize image", func(t *testing.T) { + // given + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder().addSetting("imageFields", []interface{}{"image"}).build() + + props := map[string]interface{}{ + "image": image, + } + object := &models.Object{ + ID: "some-uuid", + Properties: props, + } + + // when + vector, _, err := vectorizer.Object(context.Background(), object, config) + + // then + require.Nil(t, err) + assert.NotNil(t, vector) + }) + + t.Run("should vectorize 2 image fields", func(t *testing.T) { + // given + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder().addSetting("imageFields", []interface{}{"image1", "image2"}).build() + + props := map[string]interface{}{ + "image1": image, + "image2": image, + } + object := &models.Object{ + ID: "some-uuid", + Properties: props, + } + + // when + vector, _, err := vectorizer.Object(context.Background(), object, config) + + // then + require.Nil(t, err) + assert.NotNil(t, vector) + }) +} + +func TestVectorizerWithDiff(t *testing.T) { + type testCase struct { + name string + input *models.Object + } + + props := map[string]interface{}{ + "image": image, + "text": "text", + "description": "non-vectorizable", + } + + tests := []testCase{ + { + name: "noop comp", + input: &models.Object{ + ID: "some-uuid", + Properties: props, + }, + }, + { + name: "one vectorizable prop changed (1)", + input: &models.Object{ + ID: "some-uuid", + Properties: props, + }, + }, + { + name: "one vectorizable prop changed (2)", + input: &models.Object{ + ID: "some-uuid", + Properties: props, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder(). + addSetting("imageFields", []interface{}{"image"}). + addSetting("textFields", []interface{}{"text"}). + build() + + vector, _, err := vectorizer.Object(context.Background(), test.input, config) + + require.Nil(t, err) + assert.Equal(t, []float32{5.5, 11, 16.5, 22, 27.5}, vector) + }) + } +} + +func TestVectorizerWithWeights(t *testing.T) { + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder(). + addSetting("imageFields", []interface{}{"image"}). + addSetting("textFields", []interface{}{"text"}). + addWeights([]interface{}{0.4}, []interface{}{0.6}). + build() + + props := map[string]interface{}{ + "image": image, + "text": "text", + "description": "non-vectorizable", + } + input := &models.Object{ + ID: "some-uuid", + Properties: props, + } + + vector, _, err := vectorizer.Object(context.Background(), input, config) + + require.Nil(t, err) + assert.Equal(t, []float32{3.2, 6.4, 9.6, 12.8, 16}, vector) + // vectors are defined in Vectorize within fakes_for_test.go + // result calculated with above weights as (textVectors[0][i]*0.4+imageVectors[0][i]*0.6) / 2 +} + +func TestVectorizer_normalizeWeights(t *testing.T) { + tests := []struct { + name string + weights []float32 + }{ + { + name: "normalize example 1", + weights: []float32{200, 100, 0.1}, + }, + { + name: "normalize example 2", + weights: []float32{300.22, 0.7, 17, 54}, + }, + { + name: "normalize example 3", + weights: []float32{300, 0.02, 17}, + }, + { + name: "normalize example 4", + weights: []float32{500, 0.02, 17.4, 180}, + }, + { + name: "normalize example 5", + weights: []float32{500, 0.02, 17.4, 2, 4, 5, .88}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + v := &Vectorizer{} + if got := v.normalizeWeights(tt.weights); !checkNormalization(got) { + t.Errorf("Vectorizer.normalizeWeights() = %v, want %v", got, 1.0) + } + }) + } +} + +func checkNormalization(weights []float32) bool { + var result float32 + for i := range weights { + result += weights[i] + } + return result == 1.0 +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/clients/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/clients/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..66b3424d989ced1b66c11b705780a757ec395d48 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/clients/fakes_for_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizePropertyName bool + skippedProperty string + excludedProperty string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/clients/jinaai.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/clients/jinaai.go new file mode 100644 index 0000000000000000000000000000000000000000..891ad327b14e7eaf60124650720c74ae3c027222 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/clients/jinaai.go @@ -0,0 +1,77 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "time" + + "github.com/weaviate/weaviate/entities/moduletools" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/multi2vec-jinaai/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/jinaai" +) + +const ( + defaultRPM = 500 // from https://jina.ai/embeddings/ + defaultTPM = 1_000_000 +) + +type vectorizer struct { + client *jinaai.Client[[]float32] + logger logrus.FieldLogger +} + +func New(jinaAIApiKey string, timeout time.Duration, logger logrus.FieldLogger) *vectorizer { + return &vectorizer{ + client: jinaai.New[[]float32](jinaAIApiKey, timeout, defaultRPM, defaultTPM, jinaai.EmbeddingsBuildUrlFn, logger), + logger: logger, + } +} + +func (v *vectorizer) Vectorize(ctx context.Context, + texts, images []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationCLIPResult[[]float32], error) { + settings := ent.NewClassSettings(cfg) + res, err := v.client.VectorizeMultiModal(ctx, texts, images, jinaai.Settings{ + BaseURL: settings.BaseURL(), + Model: settings.Model(), + Dimensions: settings.Dimensions(), + Normalized: true, + }) + return res, err +} + +func (v *vectorizer) VectorizeQuery(ctx context.Context, texts []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], error) { + settings := ent.NewClassSettings(cfg) + res, _, _, err := v.client.Vectorize(ctx, texts, jinaai.Settings{ + BaseURL: settings.BaseURL(), + Model: settings.Model(), + Dimensions: settings.Dimensions(), + Task: jinaai.RetrievalQuery, + Normalized: true, + }) + return res, err +} + +func (v *vectorizer) GetApiKeyHash(ctx context.Context, config moduletools.ClassConfig) [32]byte { + return v.client.GetApiKeyHash(ctx, config) +} + +func (v *vectorizer) GetVectorizerRateLimit(ctx context.Context, cfg moduletools.ClassConfig) *modulecomponents.RateLimits { + return v.client.GetVectorizerRateLimit(ctx, cfg) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/clients/jinaai_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/clients/jinaai_test.go new file mode 100644 index 0000000000000000000000000000000000000000..447848ac56b5505e23efc2ddf87196affffebcde --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/clients/jinaai_test.go @@ -0,0 +1,199 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestClient(t *testing.T) { + defaultSettings := func(url string) fakeClassConfig { + return fakeClassConfig{classConfig: map[string]interface{}{"Model": "jina-clip-v2", "baseURL": url}} + } + t.Run("when all is fine and we send text only", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + + c := New("apiKey", 0, nullLogger()) + + expected := &modulecomponents.VectorizationCLIPResult[[]float32]{ + TextVectors: [][]float32{{0.1, 0.2, 0.3}}, + } + res, err := c.Vectorize(context.Background(), []string{"This is my text"}, nil, defaultSettings(server.URL)) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when all is fine and we send image only", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + + c := New("apiKey", 0, nullLogger()) + + expected := &modulecomponents.VectorizationCLIPResult[[]float32]{ + ImageVectors: [][]float32{{0.1, 0.2, 0.3}}, + } + res, err := c.Vectorize(context.Background(), nil, []string{"base64"}, defaultSettings(server.URL)) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when the context is expired", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("apiKey", 0, nullLogger()) + + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, err := c.Vectorize(ctx, []string{"This is my text"}, nil, fakeClassConfig{classConfig: map[string]interface{}{}}) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "context deadline exceeded") + }) + + t.Run("when the server returns an error", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{ + t: t, + serverError: errors.Errorf("nope, not gonna happen"), + }) + defer server.Close() + c := New("apiKey", 0, nullLogger()) + + _, err := c.Vectorize(context.Background(), []string{"This is my text"}, nil, fakeClassConfig{classConfig: map[string]interface{}{"baseURL": server.URL}}) + + require.NotNil(t, err) + assert.EqualError(t, err, "connection to: JinaAI API failed with status: 500 error: nope, not gonna happen") + }) + + t.Run("when JinaAI key is passed using X-Jinaai-Api-Key header", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("", 0, nullLogger()) + + ctxWithValue := context.WithValue(context.Background(), + "X-Jinaai-Api-Key", []string{"some-key"}) + + expected := &modulecomponents.VectorizationCLIPResult[[]float32]{ + TextVectors: [][]float32{{0.1, 0.2, 0.3}}, + } + res, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, nil, defaultSettings(server.URL)) + + require.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when JinaAI key is empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("", 0, nullLogger()) + + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, err := c.Vectorize(ctx, []string{"This is my text"}, nil, fakeClassConfig{classConfig: map[string]interface{}{}}) + + require.NotNil(t, err) + assert.EqualError(t, err, "API Key: no api key found "+ + "neither in request header: X-Jinaai-Api-Key "+ + "nor in environment variable under JINAAI_APIKEY") + }) + + t.Run("when X-Jinaai-Api-Key header is passed but empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("", 0, nullLogger()) + + ctxWithValue := context.WithValue(context.Background(), + "X-Jinaai-Api-Key", []string{""}) + + _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, nil, fakeClassConfig{classConfig: map[string]interface{}{"Model": "jina-embedding-v2"}}) + + require.NotNil(t, err) + assert.EqualError(t, err, "API Key: no api key found "+ + "neither in request header: X-Jinaai-Api-Key "+ + "nor in environment variable under JINAAI_APIKEY") + }) +} + +type fakeHandler struct { + t *testing.T + serverError error +} + +func (f *fakeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.serverError != nil { + embedding := map[string]interface{}{ + "detail": f.serverError.Error(), + } + outBytes, err := json.Marshal(embedding) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b map[string]interface{} + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + textInputArray := b["input"].([]interface{}) + textInput := textInputArray[0].(map[string]interface{}) + assert.Greater(f.t, len(textInput), 0) + obj := textInput["text"] + if textInput["image"] != nil { + obj = textInput["image"] + } + + embeddingTextData := map[string]interface{}{ + "object": obj, + "index": 0, + "embedding": []float32{0.1, 0.2, 0.3}, + } + + embedding := map[string]interface{}{ + "object": "list", + "data": []interface{}{embeddingTextData}, + } + + outBytes, err := json.Marshal(embedding) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..0efa67ece1f9caaa38872c537f9f0e149e336819 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/clients/meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *vectorizer) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "JinaAI CLIP Module", + "documentationHref": "https://jina.ai/embeddings/", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/config.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/config.go new file mode 100644 index 0000000000000000000000000000000000000000..975f897d458368d684b84bd5b52966239aae980b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/config.go @@ -0,0 +1,45 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modclip + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/multi2vec-jinaai/ent" +) + +func (m *Module) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{ + ent.BaseURLProperty: ent.DefaultBaseURL, + ent.ModelProperty: ent.DefaultJinaAIModel, + "vectorizeClassName": ent.DefaultVectorizeClassName, + } +} + +func (m *Module) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *Module) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + icheck := ent.NewClassSettings(cfg) + return icheck.Validate() +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/ent/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/ent/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..250554e6eb4b3384f8204e21877c43421e8f3fc8 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/ent/class_settings.go @@ -0,0 +1,268 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "fmt" + "strings" + + "github.com/pkg/errors" + + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + BaseURLProperty = "baseURL" + ModelProperty = "model" +) + +const ( + DefaultBaseURL = "https://api.jina.ai" + DefaultJinaAIModel = "jina-clip-v2" + DefaultVectorizeClassName = false +) + +type classSettings struct { + base *basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{ + cfg: cfg, + base: basesettings.NewBaseClassSettingsWithAltNames(cfg, false, "multi2vec-jinaai", nil, nil), + } +} + +// JinaAI settings +func (cs *classSettings) Model() string { + return cs.base.GetPropertyAsString(ModelProperty, DefaultJinaAIModel) +} + +func (cs *classSettings) BaseURL() string { + return cs.base.GetPropertyAsString(BaseURLProperty, DefaultBaseURL) +} + +func (cs *classSettings) Dimensions() *int64 { + return cs.base.GetPropertyAsInt64("dimensions", nil) +} + +// CLIP module specific settings +func (ic *classSettings) ImageField(property string) bool { + return ic.field("imageFields", property) +} + +func (ic *classSettings) ImageFieldsWeights() ([]float32, error) { + return ic.getFieldsWeights("image") +} + +func (ic *classSettings) TextField(property string) bool { + return ic.field("textFields", property) +} + +func (ic *classSettings) TextFieldsWeights() ([]float32, error) { + return ic.getFieldsWeights("text") +} + +func (ic *classSettings) Properties() ([]string, error) { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return nil, errors.New("empty config") + } + props := make([]string, 0) + + fields := []string{"textFields", "imageFields"} + + for _, field := range fields { + fields, ok := ic.base.GetSettings()[field] + if !ok { + continue + } + + fieldsArray, ok := fields.([]interface{}) + if !ok { + return nil, errors.Errorf("%s must be an array", field) + } + + for _, value := range fieldsArray { + v, ok := value.(string) + if !ok { + return nil, errors.Errorf("%s must be a string", field) + } + props = append(props, v) + } + } + return props, nil +} + +func (ic *classSettings) field(name, property string) bool { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return false + } + + fields, ok := ic.base.GetSettings()[name] + if !ok { + return false + } + + fieldsArray, ok := fields.([]interface{}) + if !ok { + return false + } + + fieldNames := make([]string, len(fieldsArray)) + for i, value := range fieldsArray { + fieldNames[i] = value.(string) + } + + for i := range fieldNames { + if fieldNames[i] == property { + return true + } + } + + return false +} + +func (ic *classSettings) Validate() error { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + + var errorMessages []string + + imageFields, imageFieldsOk := ic.cfg.Class()["imageFields"] + textFields, textFieldsOk := ic.cfg.Class()["textFields"] + if !imageFieldsOk && !textFieldsOk { + errorMessages = append(errorMessages, "textFields or imageFields setting needs to be present") + } + + if imageFieldsOk { + imageFieldsCount, err := ic.validateFields("image", imageFields) + if err != nil { + errorMessages = append(errorMessages, err.Error()) + } + err = ic.validateWeights("image", imageFieldsCount) + if err != nil { + errorMessages = append(errorMessages, err.Error()) + } + } + + if textFieldsOk { + textFieldsCount, err := ic.validateFields("text", textFields) + if err != nil { + errorMessages = append(errorMessages, err.Error()) + } + err = ic.validateWeights("text", textFieldsCount) + if err != nil { + errorMessages = append(errorMessages, err.Error()) + } + } + + if ic.Dimensions() != nil { + if ic.Model() == DefaultJinaAIModel && (*ic.Dimensions() < 64 || *ic.Dimensions() > 1024) { + errorMessages = append(errorMessages, "dimensions needs to within [64, 1024] range") + } + if ic.Model() == "jina-clip-v1" && *ic.Dimensions() != 768 { + errorMessages = append(errorMessages, "dimensions needs to equal 768") + } + } + + if len(errorMessages) > 0 { + return fmt.Errorf("%s", strings.Join(errorMessages, ", ")) + } + + return nil +} + +func (ic *classSettings) validateFields(name string, fields interface{}) (int, error) { + fieldsArray, ok := fields.([]interface{}) + if !ok { + return 0, errors.Errorf("%sFields must be an array", name) + } + + if len(fieldsArray) == 0 { + return 0, errors.Errorf("must contain at least one %s field name in %sFields", name, name) + } + + for _, value := range fieldsArray { + v, ok := value.(string) + if !ok { + return 0, errors.Errorf("%sField must be a string", name) + } + if len(v) == 0 { + return 0, errors.Errorf("%sField values cannot be empty", name) + } + } + + return len(fieldsArray), nil +} + +func (ic *classSettings) validateWeights(name string, count int) error { + weights, ok := ic.getWeights(name) + if ok { + if len(weights) != count { + return errors.Errorf("weights.%sFields does not equal number of %sFields", name, name) + } + _, err := ic.getWeightsArray(weights) + if err != nil { + return err + } + } + + return nil +} + +func (ic *classSettings) getWeights(name string) ([]interface{}, bool) { + weights, ok := ic.base.GetSettings()["weights"] + if ok { + weightsObject, ok := weights.(map[string]interface{}) + if ok { + fieldWeights, ok := weightsObject[fmt.Sprintf("%sFields", name)] + if ok { + fieldWeightsArray, ok := fieldWeights.([]interface{}) + if ok { + return fieldWeightsArray, ok + } + } + } + } + + return nil, false +} + +func (ic *classSettings) getWeightsArray(weights []interface{}) ([]float32, error) { + weightsArray := make([]float32, len(weights)) + for i := range weights { + weight, err := ic.getNumber(weights[i]) + if err != nil { + return nil, err + } + weightsArray[i] = weight + } + return weightsArray, nil +} + +func (ic *classSettings) getFieldsWeights(name string) ([]float32, error) { + weights, ok := ic.getWeights(name) + if ok { + return ic.getWeightsArray(weights) + } + return nil, nil +} + +func (ic *classSettings) getNumber(in interface{}) (float32, error) { + return ic.base.GetNumber(in) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/ent/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/ent/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..61e9ccc63ba19d68446a9117cbd6086cd0eb85bd --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/ent/class_settings_test.go @@ -0,0 +1,269 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "encoding/json" + "testing" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_classSettings_Validate(t *testing.T) { + type fields struct { + cfg moduletools.ClassConfig + } + tests := []struct { + name string + fields fields + wantErr bool + }{ + { + name: "should not pass with empty config", + wantErr: true, + }, + { + name: "should not pass with nil config", + fields: fields{ + cfg: nil, + }, + wantErr: true, + }, + { + name: "should not pass with nil imageFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("model", "model").addSetting("imageFields", nil).build(), + }, + wantErr: true, + }, + { + name: "should not pass with fault imageFields value", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []string{}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with empty imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with empty string in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{""}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with int value in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{1.0}).build(), + }, + wantErr: true, + }, + { + name: "should pass with proper value in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{"field"}).build(), + }, + }, + { + name: "should pass with proper value in imageFields and textFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("imageFields", []interface{}{"imageField"}). + addSetting("textFields", []interface{}{"textField"}). + build(), + }, + }, + { + name: "should pass with proper value in 2 imageFields and 2 textFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1", "imageField2"}). + build(), + }, + }, + { + name: "should pass with proper value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1", "imageField2"}). + addWeights([]interface{}{1, 2}, []interface{}{1, 2}). + build(), + }, + }, + { + name: "should pass with proper value in 1 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, 2}, []interface{}{1}). + build(), + }, + }, + { + name: "should pass with proper value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, 2}, []interface{}{1}). + build(), + }, + }, + { + name: "should not pass with proper value in 1 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1}, []interface{}{1}). + build(), + }, + wantErr: true, + }, + { + name: "should not pass with not proper weight value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, "aaaa"}, []interface{}{1}). + build(), + }, + wantErr: true, + }, + { + name: "should pass with not proper weight value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{json.Number("1"), json.Number("2")}, []interface{}{json.Number("3")}). + build(), + }, + }, + { + name: "should pass with proper dimensions setting in videoFields together with image fields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("videoFields", []interface{}{"video1"}). + addSetting("imageFields", []interface{}{"image1"}). + build(), + }, + wantErr: false, + }, + { + name: "should not pass with wrong dimensions setting jina-clip-v2", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("model", "jina-clip-v2"). + addSetting("dimensions", int64(63)). + addSetting("imageFields", []interface{}{"imageField"}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with wrong dimensions setting jina-clip-v2", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("model", "jina-clip-v1"). + addSetting("dimensions", int64(769)). + addSetting("imageFields", []interface{}{"imageField"}).build(), + }, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.fields.cfg) + if err := ic.Validate(); (err != nil) != tt.wantErr { + t.Errorf("classSettings.Validate() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} + +type builder struct { + fakeClassConfig *fakeClassConfig +} + +func newConfigBuilder() *builder { + return &builder{ + fakeClassConfig: &fakeClassConfig{config: map[string]interface{}{}}, + } +} + +func (b *builder) addSetting(name string, value interface{}) *builder { + b.fakeClassConfig.config[name] = value + return b +} + +func (b *builder) addWeights(textWeights, imageWeights []interface{}) *builder { + if textWeights != nil || imageWeights != nil { + weightSettings := map[string]interface{}{} + if textWeights != nil { + weightSettings["textFields"] = textWeights + } + if imageWeights != nil { + weightSettings["imageFields"] = imageWeights + } + b.fakeClassConfig.config["weights"] = weightSettings + } + return b +} + +func (b *builder) build() *fakeClassConfig { + return b.fakeClassConfig +} + +type fakeClassConfig struct { + config map[string]interface{} +} + +func (c fakeClassConfig) Class() map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) Property(propName string) map[string]interface{} { + return c.config +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/module.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/module.go new file mode 100644 index 0000000000000000000000000000000000000000..9da72fa4eb643477c0442d202fdb6a6ad592c4c1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/module.go @@ -0,0 +1,137 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modclip + +import ( + "context" + "os" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/multi2vec-jinaai/clients" + "github.com/weaviate/weaviate/modules/multi2vec-jinaai/ent" + "github.com/weaviate/weaviate/modules/multi2vec-jinaai/vectorizer" +) + +const Name = "multi2vec-jinaai" + +func New() *Module { + return &Module{} +} + +type Module struct { + vectorizer *vectorizer.Vectorizer + nearImageGraphqlProvider modulecapabilities.GraphQLArguments + nearImageSearcher modulecapabilities.Searcher[[]float32] + nearTextGraphqlProvider modulecapabilities.GraphQLArguments + nearTextSearcher modulecapabilities.Searcher[[]float32] + nearTextTransformer modulecapabilities.TextTransform + metaClient metaClient + logger logrus.FieldLogger +} + +type metaClient interface { + MetaInfo() (map[string]interface{}, error) +} + +func (m *Module) Name() string { + return Name +} + +func (m *Module) Type() modulecapabilities.ModuleType { + return modulecapabilities.Multi2Vec +} + +func (m *Module) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initNearImage(); err != nil { + return errors.Wrap(err, "init near image") + } + + return nil +} + +func (m *Module) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init near text") + } + + return nil +} + +func (m *Module) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("JINAAI_APIKEY") + client := clients.New(apiKey, timeout, logger) + + m.vectorizer = vectorizer.New(client) + m.metaClient = client + + return nil +} + +func (m *Module) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + return m.vectorizer.Object(ctx, obj, cfg) +} + +func (m *Module) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + return batch.VectorizeBatch(ctx, objs, skipObject, cfg, m.logger, m.vectorizer.Object) +} + +func (m *Module) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + ichek := ent.NewClassSettings(cfg) + mediaProps, err := ichek.Properties() + return false, mediaProps, err +} + +func (m *Module) MetaInfo() (map[string]interface{}, error) { + return m.metaClient.MetaInfo() +} + +func (m *Module) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + return m.vectorizer.Texts(ctx, []string{input}, cfg) +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.InputVectorizer[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/nearArguments.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/nearArguments.go new file mode 100644 index 0000000000000000000000000000000000000000..55e86ecd93bbb8b526a89387a3238344cde98c85 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/nearArguments.go @@ -0,0 +1,57 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modclip + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearImage" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +func (m *Module) initNearImage() error { + m.nearImageSearcher = nearImage.NewSearcher(m.vectorizer) + m.nearImageGraphqlProvider = nearImage.New() + return nil +} + +func (m *Module) initNearText() error { + m.nearTextSearcher = nearText.NewSearcher(m.vectorizer) + m.nearTextGraphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *Module) Arguments() map[string]modulecapabilities.GraphQLArgument { + arguments := map[string]modulecapabilities.GraphQLArgument{} + for name, arg := range m.nearImageGraphqlProvider.Arguments() { + arguments[name] = arg + } + for name, arg := range m.nearTextGraphqlProvider.Arguments() { + arguments[name] = arg + } + return arguments +} + +func (m *Module) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + vectorSearches := map[string]modulecapabilities.VectorForParams[[]float32]{} + for name, arg := range m.nearImageSearcher.VectorSearches() { + vectorSearches[name] = arg + } + for name, arg := range m.nearTextSearcher.VectorSearches() { + vectorSearches[name] = arg + } + return vectorSearches +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/vectorizer/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/vectorizer/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..894e1f93a1de226bce0ad735f515a0646faec106 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/vectorizer/fakes_for_test.go @@ -0,0 +1,107 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/modulecomponents" +) + +type builder struct { + fakeClassConfig *fakeClassConfig +} + +func newConfigBuilder() *builder { + return &builder{ + fakeClassConfig: &fakeClassConfig{config: map[string]interface{}{}}, + } +} + +func (b *builder) addSetting(name string, value interface{}) *builder { + b.fakeClassConfig.config[name] = value + return b +} + +func (b *builder) addWeights(textWeights, imageWeights []interface{}) *builder { + if textWeights != nil || imageWeights != nil { + weightSettings := map[string]interface{}{} + if textWeights != nil { + weightSettings["textFields"] = textWeights + } + if imageWeights != nil { + weightSettings["imageFields"] = imageWeights + } + b.fakeClassConfig.config["weights"] = weightSettings + } + return b +} + +func (b *builder) build() *fakeClassConfig { + return b.fakeClassConfig +} + +type fakeClassConfig struct { + config map[string]interface{} +} + +func (c fakeClassConfig) Class() map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) Property(propName string) map[string]interface{} { + return c.config +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} + +type fakeClient struct{} + +func (c *fakeClient) Vectorize(ctx context.Context, + texts, images []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationCLIPResult[[]float32], error) { + result := &modulecomponents.VectorizationCLIPResult[[]float32]{ + TextVectors: [][]float32{{1.0, 2.0, 3.0, 4.0, 5.0}}, + ImageVectors: [][]float32{{10.0, 20.0, 30.0, 40.0, 50.0}}, + } + return result, nil +} + +func (c *fakeClient) VectorizeQuery(ctx context.Context, texts []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], error) { + result := &modulecomponents.VectorizationResult[[]float32]{ + Vector: [][]float32{{1.0, 2.0, 3.0, 4.0, 5.0}}, + } + return result, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/vectorizer/texts.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/vectorizer/texts.go new file mode 100644 index 0000000000000000000000000000000000000000..0072997a7eb876fa671416ebd2979944f1fc220f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/vectorizer/texts.go @@ -0,0 +1,33 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/moduletools" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +func (v *Vectorizer) Texts(ctx context.Context, inputs []string, + cfg moduletools.ClassConfig, +) ([]float32, error) { + res, err := v.client.VectorizeQuery(ctx, inputs, cfg) + if err != nil { + return nil, errors.Wrap(err, "remote client vectorize") + } + if len(inputs) != len(res.Vector) { + return nil, errors.New("inputs are not equal to vectors returned") + } + return libvectorizer.CombineVectors(res.Vector), nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/vectorizer/vectorizer.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/vectorizer/vectorizer.go new file mode 100644 index 0000000000000000000000000000000000000000..a2799b0f9f0a9cf16815d61c20e849b34c3cfca7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/vectorizer/vectorizer.go @@ -0,0 +1,154 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/pkg/errors" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/multi2vec-jinaai/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents" + objectsvectorizer "github.com/weaviate/weaviate/usecases/modulecomponents/vectorizer" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +type Vectorizer struct { + client Client + objectVectorizer *objectsvectorizer.ObjectVectorizer +} + +func New(client Client) *Vectorizer { + return &Vectorizer{ + client: client, + objectVectorizer: objectsvectorizer.New(), + } +} + +type Client interface { + Vectorize(ctx context.Context, + texts, images []string, cfg moduletools.ClassConfig, + ) (*modulecomponents.VectorizationCLIPResult[[]float32], error) + VectorizeQuery(ctx context.Context, texts []string, + cfg moduletools.ClassConfig) (*modulecomponents.VectorizationResult[[]float32], error) +} + +type ClassSettings interface { + ImageField(property string) bool + ImageFieldsWeights() ([]float32, error) + TextField(property string) bool + TextFieldsWeights() ([]float32, error) +} + +func (v *Vectorizer) Object(ctx context.Context, object *models.Object, + cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + vec, err := v.object(ctx, object, cfg) + return vec, nil, err +} + +func (v *Vectorizer) VectorizeImage(ctx context.Context, id, image string, cfg moduletools.ClassConfig) ([]float32, error) { + res, err := v.client.Vectorize(ctx, nil, []string{image}, cfg) + if err != nil { + return nil, err + } + if len(res.ImageVectors) != 1 { + return nil, errors.New("empty vector") + } + + return res.ImageVectors[0], nil +} + +func (v *Vectorizer) object(ctx context.Context, object *models.Object, + cfg moduletools.ClassConfig, +) ([]float32, error) { + ichek := ent.NewClassSettings(cfg) + + // vectorize image and text + texts := []string{} + images := []string{} + + if object.Properties != nil { + schemamap := object.Properties.(map[string]interface{}) + for _, propName := range moduletools.SortStringKeys(schemamap) { + switch val := schemamap[propName].(type) { + case string: + if ichek.ImageField(propName) { + images = append(images, val) + } + if ichek.TextField(propName) { + texts = append(texts, val) + } + case []string: + if ichek.TextField(propName) { + texts = append(texts, val...) + } + default: // properties that are not part of the object + + } + } + + } + + vectors := [][]float32{} + if len(texts) > 0 || len(images) > 0 { + res, err := v.client.Vectorize(ctx, texts, images, cfg) + if err != nil { + return nil, err + } + vectors = append(vectors, res.TextVectors...) + vectors = append(vectors, res.ImageVectors...) + } + weights, err := v.getWeights(ichek) + if err != nil { + return nil, err + } + + return libvectorizer.CombineVectorsWithWeights(vectors, weights), nil +} + +func (v *Vectorizer) getWeights(ichek ClassSettings) ([]float32, error) { + weights := []float32{} + textFieldsWeights, err := ichek.TextFieldsWeights() + if err != nil { + return nil, err + } + imageFieldsWeights, err := ichek.ImageFieldsWeights() + if err != nil { + return nil, err + } + + weights = append(weights, textFieldsWeights...) + weights = append(weights, imageFieldsWeights...) + + normalizedWeights := v.normalizeWeights(weights) + + return normalizedWeights, nil +} + +func (v *Vectorizer) normalizeWeights(weights []float32) []float32 { + if len(weights) > 0 { + var denominator float32 + for i := range weights { + denominator += weights[i] + } + normalizer := 1 / denominator + normalized := make([]float32, len(weights)) + for i := range weights { + normalized[i] = weights[i] * normalizer + } + return normalized + } + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/vectorizer/vectorizer_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/vectorizer/vectorizer_test.go new file mode 100644 index 0000000000000000000000000000000000000000..304ff48b7de73a4aa12b8b3e5795e6296e5e0115 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-jinaai/vectorizer/vectorizer_test.go @@ -0,0 +1,194 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" +) + +const image = "iVBORw0KGgoAAAANSUhEUgAAAGAAAAA/CAYAAAAfQM0aAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyRpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDpCRjQ5NEM3RDI5QTkxMUUyOTc1NENCMzI4N0QwNDNCOSIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDpCRjQ5NEM3RTI5QTkxMUUyOTc1NENCMzI4N0QwNDNCOSI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOkJGNDk0QzdCMjlBOTExRTI5NzU0Q0IzMjg3RDA0M0I5IiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOkJGNDk0QzdDMjlBOTExRTI5NzU0Q0IzMjg3RDA0M0I5Ii8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+WeGRxAAAB2hJREFUeNrUXFtslUUQ3hJCoQVEKy0k1qQgrRg0vaAJaq1tvJSgaLy8mKDF2IvxBY2Bgm8+iIoxvhB72tTUmKgPigbFKCEtxeKD9hZjAi3GJrYJtqRai7TQB+pMz/zwU/5zzsxe2u4kXwiwZ+bb/Xb/s7v/zEmrra1VTFsFeBRQCtgEuBWwkv5vHPAn4DdAB+B7wBjXcUNDQ8o2dXV1SmDzyhUtLS3tBPyxC9CdrN1ihi/swKuA7YD0BG1uJhQDngdcAnwDeJ86Ole2kLii+J2AFsA+wF9RjRalmEUHaZY8m6RDUYZtn6HPHiRfLm2hck0D7AScAdRH8UokwD2AnwA7UoiUyhaRD/S12dHg+8B1OWA/4BTgqVQCPEJL8haLBNDXEfJt03ziipYH+BJwHFAYJcAWwCeAZQ6CLyPfWyz584nrbCuj74eHwgKsddih2R1ba+jHJ65R1k6PuWNhAd4DZM/BTiWbdhwm5hPXsA0AngY8COgNP4JwSTyu4zE/P18VFhZKP7aNYuouXxFX5Ic8Nc2Ea2D/AfYCNgIORZ0DdusOfnFxcXDwUD09PZKP76alKDUR16KiIlVQUHDl7/39/Uozpg7Xac45YB0dGrQHHw07KVwJpRRbYiKuyCc8+MhXcyXocP2RnvMvJhr8QIBK08EPbGJiQuqq0mX7KD4GIohi4xVPTU0N6/BRamPwu7u7dZb3/RozkW3IB3lZEkGHayeI8FFVVdWaZAIUcD2Wl5fbHHy024XtC6QBkomA/XHIFb8X0Xamp6efASHqt27dGnkVkcNxVlFRoXJycmwOvuLGNmifVATsD/bLZezgKgKE2J+bm3sKHk3XXUWs4Mz87Oxs24OvOLEN26cUAfvFXAkrlKGBCDNXEbAajldXV1+5ijjP+KCrg855x+3nk2uy8SwDdIIIM1cRI6k+0NraqkZGRmzuKAIbFrYf0Q2UaPOA/Wpra3PBNfHhYHq6HbC5qanpGB7ETgPWc0TApTr7eyDolOaj6LRG+/W2Bn94eJg7+DpcowZ+AGb+642NjYfC3wEdXAdI1uK2Du2ksH2HrcHHfggGX4frNVcRMPh7BwcHN8ZiseuuIr4DvKXib29YX2bhmW+wEqYptsREXC2eWXS44oyfuYqYmpra19LSEnkaRgEG6Nj8gGRHESVCRkaG9Kg+IOyTiGtmZqatnZsOV/zMLnjcsF7KH5AIECVCX1+f6u3tlbg4oLmc2VyDy8HgPshg2yzmCo8aFsdAALzpw9dw23REwJkvHPwjSu92UcwVRcAnAd4LaQ6+CVe2AGivAe5WwhcdGp0aoVgmJuIqnBy2uSa18Buxs4AXAJMO401SjLOGfnziyhYg2GrtcNSxSfJ90pI/n7iyBUA7quKv/IYsxhmiZ/ZRy/x94soWAO1nwL0qnhVw2cD/ZfKBvjod9cEnrmwB0DBh9RUVfxHxhYrnUHLtEn2mlHyMOe6HT1wT7oISGSas4ntNzJmsVFczjnMBN1CbfwGD1BYPID8A/lFzbz5xZQsQnmWfExa6ecNVIsBKWuIlgA0qnjG2PLhsou0aZgF3qfil2fg89ssbrhwBNtB+GN/dLUnQ5kbCHYAnAFMAvGpsoY7OlS0krmOhxx7WLHwAeBLwVahN2uIUswgrPB5T8rRv7DxWqDwM+JaCjzue8b5wZe2C7gJ8quKVJqY599vJ1yZHffCJK0uA+wAfAtZYjIO+Gsi3TfOJK0sAfFP/jpKV+HBtKfkutOTPJ64sAVYD3qXgrmwpxVht6McnrmwBMAP4pjlYdRij3tCHT1xZAuDdermOA836gDKKqWNirob1ASZc2eeAl3QH36A+AGP+ohFWxNVSfYAuV9YKyKUTo/bgo2nUB5RQbImJuFqsD9DhyhbAuDgjMI36gFKX7S3XB5S6egSV2Bh8zYyDYjr4SGYi2yzmMIm5YnFGkFOLSQGNjY3X/BtaLBabWQF5XKcO6gOkZT950gAW6wPWuXoEZXEaOqoPyHLcPqkIwvqALFcCZHJmvqP6gEzH7VOKIKgPyHQlwIVUjRzWB1xw3H4+ubIFGE3VyGF9wKjj9ik3D4L6gFFXArCSTlEEzKe3LMIfwvYDNgcf+4P9csSVLUAXt7GD+oBuYfsuW4OvUR/Q7UoA/G2zaRvbOqEI0xRbYiKulusDTrgSYEg6sxKJIKwP6FLyjDYRV4v1ATpc2QKgNZtu6zTqA5o1ObM/h5eDyMvCtrlZObLgNhRv+jAHvkwqQjDzhYPfrvRvF0VcLdQHaHGNxWKrZv0d//hahcqr8Ccww1kRbwPuVMIXHRqd+ptimZiIq0F9gA2urEcQ2jkVf/tz0WG8ixTjnKEfn7iyBQi2WnuULLlV0qE9FrdzPnFlC4CGRQkvqyQ/MqRh6KtO2S948IkrWwC0XwHPAQ4r85z7w+TL1U8Y+8Q14S4oyjA9703AZ4AqFX8RvoTpN8i3/Bi/p+egHz5xZQsQGCasvqGuZhzj76DdpuIZx8FPuOAviWDG8e8qXl0yXxnHPnGdsf8FGAByGwC02iMZswAAAABJRU5ErkJggg==" + +func TestVectorizer(t *testing.T) { + t.Run("should vectorize image", func(t *testing.T) { + // given + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder().addSetting("imageFields", []interface{}{"image"}).build() + + props := map[string]interface{}{ + "image": image, + } + object := &models.Object{ + ID: "some-uuid", + Properties: props, + } + + // when + vector, _, err := vectorizer.Object(context.Background(), object, config) + + // then + require.Nil(t, err) + assert.NotNil(t, vector) + }) + + t.Run("should vectorize 2 image fields", func(t *testing.T) { + // given + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder().addSetting("imageFields", []interface{}{"image1", "image2"}).build() + + props := map[string]interface{}{ + "image1": image, + "image2": image, + } + object := &models.Object{ + ID: "some-uuid", + Properties: props, + } + + // when + vector, _, err := vectorizer.Object(context.Background(), object, config) + + // then + require.Nil(t, err) + assert.NotNil(t, vector) + }) +} + +func TestVectorizerWithDiff(t *testing.T) { + type testCase struct { + name string + input *models.Object + } + + props := map[string]interface{}{ + "image": image, + "text": "text", + "description": "non-vectorizable", + } + + tests := []testCase{ + { + name: "noop comp", + input: &models.Object{ + ID: "some-uuid", + Properties: props, + }, + }, + { + name: "one vectorizable prop changed (1)", + input: &models.Object{ + ID: "some-uuid", + Properties: props, + }, + }, + { + name: "one vectorizable prop changed (2)", + input: &models.Object{ + ID: "some-uuid", + Properties: props, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder(). + addSetting("imageFields", []interface{}{"image"}). + addSetting("textFields", []interface{}{"text"}). + build() + + vector, _, err := vectorizer.Object(context.Background(), test.input, config) + + require.Nil(t, err) + assert.Equal(t, []float32{5.5, 11, 16.5, 22, 27.5}, vector) + }) + } +} + +func TestVectorizerWithWeights(t *testing.T) { + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder(). + addSetting("imageFields", []interface{}{"image"}). + addSetting("textFields", []interface{}{"text"}). + addWeights([]interface{}{0.4}, []interface{}{0.6}). + build() + + props := map[string]interface{}{ + "image": image, + "text": "text", + "description": "non-vectorizable", + } + input := &models.Object{ + ID: "some-uuid", + Properties: props, + } + + vector, _, err := vectorizer.Object(context.Background(), input, config) + + require.Nil(t, err) + assert.Equal(t, []float32{3.2, 6.4, 9.6, 12.8, 16}, vector) + // vectors are defined in Vectorize within fakes_for_test.go + // result calculated with above weights as (textVectors[0][i]*0.4+imageVectors[0][i]*0.6) / 2 +} + +func TestVectorizer_normalizeWeights(t *testing.T) { + tests := []struct { + name string + weights []float32 + }{ + { + name: "normalize example 1", + weights: []float32{200, 100, 0.1}, + }, + { + name: "normalize example 2", + weights: []float32{300.22, 0.7, 17, 54}, + }, + { + name: "normalize example 3", + weights: []float32{300, 0.02, 17}, + }, + { + name: "normalize example 4", + weights: []float32{500, 0.02, 17.4, 180}, + }, + { + name: "normalize example 5", + weights: []float32{500, 0.02, 17.4, 2, 4, 5, .88}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + v := &Vectorizer{} + if got := v.normalizeWeights(tt.weights); !checkNormalization(got) { + t.Errorf("Vectorizer.normalizeWeights() = %v, want %v", got, 1.0) + } + }) + } +} + +func checkNormalization(weights []float32) bool { + var result float32 + for i := range weights { + result += weights[i] + } + return result == 1.0 +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..0d1429bbadb184a13dd0e8627941a2d7d30c3853 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/clients/meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *vectorizer) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "NVIDIA CLIP Module", + "documentationHref": "https://docs.api.nvidia.com/nim/reference/retrieval-apis", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/clients/nvidia.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/clients/nvidia.go new file mode 100644 index 0000000000000000000000000000000000000000..d1e9d73913d463ac06517a037d40eadfd98c14ef --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/clients/nvidia.go @@ -0,0 +1,90 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/usecases/modulecomponents" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/nvidia" + + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/multi2vec-nvidia/ent" +) + +type vectorizer struct { + client *nvidia.Client + logger logrus.FieldLogger +} + +func New(apiKey string, timeout time.Duration, logger logrus.FieldLogger) *vectorizer { + return &vectorizer{ + client: nvidia.New(apiKey, timeout, logger), + logger: logger, + } +} + +func (v *vectorizer) Vectorize(ctx context.Context, + texts, images []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationCLIPResult[[]float32], error) { + return v.vectorize(ctx, texts, images, cfg) +} + +func (v *vectorizer) VectorizeQuery(ctx context.Context, + input []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationCLIPResult[[]float32], error) { + return v.vectorize(ctx, input, nil, cfg) +} + +func (v *vectorizer) vectorize(ctx context.Context, + texts, images []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationCLIPResult[[]float32], error) { + var textVectors [][]float32 + var imageVectors [][]float32 + settings := ent.NewClassSettings(cfg) + if len(texts) > 0 { + textEmbeddings, err := v.client.Vectorize(ctx, texts, nvidia.Settings{ + BaseURL: settings.BaseURL(), + Model: settings.Model(), + }) + if err != nil { + return nil, err + } + textVectors = textEmbeddings.Vector + } + if len(images) > 0 { + inputs := make([]string, len(images)) + for i := range images { + if !strings.HasPrefix(images[i], "data:") { + inputs[i] = fmt.Sprintf("data:image/png;base64,%s", images[i]) + } else { + inputs[i] = images[i] + } + } + imageEmbeddings, err := v.client.Vectorize(ctx, inputs, nvidia.Settings{ + Model: settings.Model(), + BaseURL: settings.BaseURL(), + }) + if err != nil { + return nil, err + } + imageVectors = imageEmbeddings.Vector + } + return &modulecomponents.VectorizationCLIPResult[[]float32]{ + TextVectors: textVectors, + ImageVectors: imageVectors, + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/config.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/config.go new file mode 100644 index 0000000000000000000000000000000000000000..68f36fd605bf2dc81c03d5321b293ef0466748e4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/config.go @@ -0,0 +1,41 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modclip + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/multi2vec-nvidia/ent" +) + +func (m *Module) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *Module) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *Module) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + icheck := ent.NewClassSettings(cfg) + return icheck.Validate() +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/ent/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/ent/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..db15ee7bba46c1e0f6bbcce27e577e25c7ac1b4d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/ent/class_settings.go @@ -0,0 +1,259 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "fmt" + "strings" + + "github.com/pkg/errors" + + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + baseURLProperty = "baseURL" + modelProperty = "model" + truncateProperty = "truncate" +) + +const ( + DefaultBaseURL = "https://integrate.api.nvidia.com" + DefaultNvidiaModel = "nvidia/nvclip" + DefaultVectorizeClassName = false + DefaultPropertyIndexed = true + DefaultVectorizePropertyName = false + LowerCaseInput = false +) + +type classSettings struct { + base *basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{ + cfg: cfg, + base: basesettings.NewBaseClassSettingsWithAltNames(cfg, LowerCaseInput, "multi2vec-nvidia", nil, nil), + } +} + +// Nvidia settings +func (cs *classSettings) Model() string { + return cs.base.GetPropertyAsString(modelProperty, DefaultNvidiaModel) +} + +func (cs *classSettings) BaseURL() string { + return cs.base.GetPropertyAsString(baseURLProperty, DefaultBaseURL) +} + +// CLIP module specific settings +func (ic *classSettings) ImageField(property string) bool { + return ic.field("imageFields", property) +} + +func (ic *classSettings) ImageFieldsWeights() ([]float32, error) { + return ic.getFieldsWeights("image") +} + +func (ic *classSettings) TextField(property string) bool { + return ic.field("textFields", property) +} + +func (ic *classSettings) TextFieldsWeights() ([]float32, error) { + return ic.getFieldsWeights("text") +} + +func (ic *classSettings) Properties() ([]string, error) { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return nil, errors.New("empty config") + } + props := make([]string, 0) + + fields := []string{"textFields", "imageFields"} + + for _, field := range fields { + fields, ok := ic.base.GetSettings()[field] + if !ok { + continue + } + + fieldsArray, ok := fields.([]interface{}) + if !ok { + return nil, errors.Errorf("%s must be an array", field) + } + + for _, value := range fieldsArray { + v, ok := value.(string) + if !ok { + return nil, errors.Errorf("%s must be a string", field) + } + props = append(props, v) + } + } + return props, nil +} + +func (ic *classSettings) field(name, property string) bool { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return false + } + + fields, ok := ic.base.GetSettings()[name] + if !ok { + return false + } + + fieldsArray, ok := fields.([]interface{}) + if !ok { + return false + } + + fieldNames := make([]string, len(fieldsArray)) + for i, value := range fieldsArray { + fieldNames[i] = value.(string) + } + + for i := range fieldNames { + if fieldNames[i] == property { + return true + } + } + + return false +} + +func (ic *classSettings) Validate() error { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + + var errorMessages []string + + imageFields, imageFieldsOk := ic.cfg.Class()["imageFields"] + textFields, textFieldsOk := ic.cfg.Class()["textFields"] + if !imageFieldsOk && !textFieldsOk { + errorMessages = append(errorMessages, "textFields or imageFields setting needs to be present") + } + + if imageFieldsOk { + imageFieldsCount, err := ic.validateFields("image", imageFields) + if err != nil { + errorMessages = append(errorMessages, err.Error()) + } + err = ic.validateWeights("image", imageFieldsCount) + if err != nil { + errorMessages = append(errorMessages, err.Error()) + } + } + + if textFieldsOk { + textFieldsCount, err := ic.validateFields("text", textFields) + if err != nil { + errorMessages = append(errorMessages, err.Error()) + } + err = ic.validateWeights("text", textFieldsCount) + if err != nil { + errorMessages = append(errorMessages, err.Error()) + } + } + + if len(errorMessages) > 0 { + return fmt.Errorf("%s", strings.Join(errorMessages, ", ")) + } + + return nil +} + +func (ic *classSettings) validateFields(name string, fields interface{}) (int, error) { + fieldsArray, ok := fields.([]interface{}) + if !ok { + return 0, errors.Errorf("%sFields must be an array", name) + } + + if len(fieldsArray) == 0 { + return 0, errors.Errorf("must contain at least one %s field name in %sFields", name, name) + } + + for _, value := range fieldsArray { + v, ok := value.(string) + if !ok { + return 0, errors.Errorf("%sField must be a string", name) + } + if len(v) == 0 { + return 0, errors.Errorf("%sField values cannot be empty", name) + } + } + + return len(fieldsArray), nil +} + +func (ic *classSettings) validateWeights(name string, count int) error { + weights, ok := ic.getWeights(name) + if ok { + if len(weights) != count { + return errors.Errorf("weights.%sFields does not equal number of %sFields", name, name) + } + _, err := ic.getWeightsArray(weights) + if err != nil { + return err + } + } + + return nil +} + +func (ic *classSettings) getWeights(name string) ([]interface{}, bool) { + weights, ok := ic.base.GetSettings()["weights"] + if ok { + weightsObject, ok := weights.(map[string]interface{}) + if ok { + fieldWeights, ok := weightsObject[fmt.Sprintf("%sFields", name)] + if ok { + fieldWeightsArray, ok := fieldWeights.([]interface{}) + if ok { + return fieldWeightsArray, ok + } + } + } + } + + return nil, false +} + +func (ic *classSettings) getWeightsArray(weights []interface{}) ([]float32, error) { + weightsArray := make([]float32, len(weights)) + for i := range weights { + weight, err := ic.getNumber(weights[i]) + if err != nil { + return nil, err + } + weightsArray[i] = weight + } + return weightsArray, nil +} + +func (ic *classSettings) getFieldsWeights(name string) ([]float32, error) { + weights, ok := ic.getWeights(name) + if ok { + return ic.getWeightsArray(weights) + } + return nil, nil +} + +func (ic *classSettings) getNumber(in interface{}) (float32, error) { + return ic.base.GetNumber(in) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/ent/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/ent/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..00ed00ac67c8787f66101a09f703765c0e4a1c06 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/ent/class_settings_test.go @@ -0,0 +1,249 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "encoding/json" + "testing" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_classSettings_Validate(t *testing.T) { + type fields struct { + cfg moduletools.ClassConfig + } + tests := []struct { + name string + fields fields + wantErr bool + }{ + { + name: "should not pass with empty config", + wantErr: true, + }, + { + name: "should not pass with nil config", + fields: fields{ + cfg: nil, + }, + wantErr: true, + }, + { + name: "should not pass with nil imageFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("model", "model").addSetting("imageFields", nil).build(), + }, + wantErr: true, + }, + { + name: "should not pass with fault imageFields value", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []string{}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with empty imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with empty string in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{""}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with int value in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{1.0}).build(), + }, + wantErr: true, + }, + { + name: "should pass with proper value in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{"field"}).build(), + }, + }, + { + name: "should pass with proper value in imageFields and textFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("imageFields", []interface{}{"imageField"}). + addSetting("textFields", []interface{}{"textField"}). + build(), + }, + }, + { + name: "should pass with proper value in 2 imageFields and 2 textFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1", "imageField2"}). + build(), + }, + }, + { + name: "should pass with proper value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1", "imageField2"}). + addWeights([]interface{}{1, 2}, []interface{}{1, 2}). + build(), + }, + }, + { + name: "should pass with proper value in 1 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, 2}, []interface{}{1}). + build(), + }, + }, + { + name: "should pass with proper value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, 2}, []interface{}{1}). + build(), + }, + }, + { + name: "should not pass with proper value in 1 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1}, []interface{}{1}). + build(), + }, + wantErr: true, + }, + { + name: "should not pass with not proper weight value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, "aaaa"}, []interface{}{1}). + build(), + }, + wantErr: true, + }, + { + name: "should pass with not proper weight value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{json.Number("1"), json.Number("2")}, []interface{}{json.Number("3")}). + build(), + }, + }, + { + name: "should pass with proper dimensions setting in videoFields together with image fields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("videoFields", []interface{}{"video1"}). + addSetting("imageFields", []interface{}{"image1"}). + build(), + }, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.fields.cfg) + if err := ic.Validate(); (err != nil) != tt.wantErr { + t.Errorf("classSettings.Validate() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} + +type builder struct { + fakeClassConfig *fakeClassConfig +} + +func newConfigBuilder() *builder { + return &builder{ + fakeClassConfig: &fakeClassConfig{config: map[string]interface{}{}}, + } +} + +func (b *builder) addSetting(name string, value interface{}) *builder { + b.fakeClassConfig.config[name] = value + return b +} + +func (b *builder) addWeights(textWeights, imageWeights []interface{}) *builder { + if textWeights != nil || imageWeights != nil { + weightSettings := map[string]interface{}{} + if textWeights != nil { + weightSettings["textFields"] = textWeights + } + if imageWeights != nil { + weightSettings["imageFields"] = imageWeights + } + b.fakeClassConfig.config["weights"] = weightSettings + } + return b +} + +func (b *builder) build() *fakeClassConfig { + return b.fakeClassConfig +} + +type fakeClassConfig struct { + config map[string]interface{} +} + +func (c fakeClassConfig) Class() map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) Property(propName string) map[string]interface{} { + return c.config +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/module.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/module.go new file mode 100644 index 0000000000000000000000000000000000000000..52bb16f3b71b55f50b30a4fb1e6313ae6d4badba --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/module.go @@ -0,0 +1,137 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modclip + +import ( + "context" + "os" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/multi2vec-nvidia/clients" + "github.com/weaviate/weaviate/modules/multi2vec-nvidia/ent" + "github.com/weaviate/weaviate/modules/multi2vec-nvidia/vectorizer" +) + +const Name = "multi2vec-nvidia" + +func New() *Module { + return &Module{} +} + +type Module struct { + vectorizer *vectorizer.Vectorizer + nearImageGraphqlProvider modulecapabilities.GraphQLArguments + nearImageSearcher modulecapabilities.Searcher[[]float32] + nearTextGraphqlProvider modulecapabilities.GraphQLArguments + nearTextSearcher modulecapabilities.Searcher[[]float32] + nearTextTransformer modulecapabilities.TextTransform + metaClient metaClient + logger logrus.FieldLogger +} + +type metaClient interface { + MetaInfo() (map[string]interface{}, error) +} + +func (m *Module) Name() string { + return Name +} + +func (m *Module) Type() modulecapabilities.ModuleType { + return modulecapabilities.Multi2Vec +} + +func (m *Module) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initNearImage(); err != nil { + return errors.Wrap(err, "init near image") + } + + return nil +} + +func (m *Module) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init near text") + } + + return nil +} + +func (m *Module) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("NVIDIA_APIKEY") + client := clients.New(apiKey, timeout, logger) + + m.vectorizer = vectorizer.New(client) + m.metaClient = client + + return nil +} + +func (m *Module) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + return m.vectorizer.Object(ctx, obj, cfg) +} + +func (m *Module) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + return batch.VectorizeBatch(ctx, objs, skipObject, cfg, m.logger, m.vectorizer.Object) +} + +func (m *Module) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + ichek := ent.NewClassSettings(cfg) + mediaProps, err := ichek.Properties() + return false, mediaProps, err +} + +func (m *Module) MetaInfo() (map[string]interface{}, error) { + return m.metaClient.MetaInfo() +} + +func (m *Module) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + return m.vectorizer.Texts(ctx, []string{input}, cfg) +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.InputVectorizer[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/nearArguments.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/nearArguments.go new file mode 100644 index 0000000000000000000000000000000000000000..55e86ecd93bbb8b526a89387a3238344cde98c85 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/nearArguments.go @@ -0,0 +1,57 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modclip + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearImage" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +func (m *Module) initNearImage() error { + m.nearImageSearcher = nearImage.NewSearcher(m.vectorizer) + m.nearImageGraphqlProvider = nearImage.New() + return nil +} + +func (m *Module) initNearText() error { + m.nearTextSearcher = nearText.NewSearcher(m.vectorizer) + m.nearTextGraphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *Module) Arguments() map[string]modulecapabilities.GraphQLArgument { + arguments := map[string]modulecapabilities.GraphQLArgument{} + for name, arg := range m.nearImageGraphqlProvider.Arguments() { + arguments[name] = arg + } + for name, arg := range m.nearTextGraphqlProvider.Arguments() { + arguments[name] = arg + } + return arguments +} + +func (m *Module) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + vectorSearches := map[string]modulecapabilities.VectorForParams[[]float32]{} + for name, arg := range m.nearImageSearcher.VectorSearches() { + vectorSearches[name] = arg + } + for name, arg := range m.nearTextSearcher.VectorSearches() { + vectorSearches[name] = arg + } + return vectorSearches +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/vectorizer/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/vectorizer/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..f42ead07c7bd08f956ea6871e146ac9c0c7fcb5c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/vectorizer/fakes_for_test.go @@ -0,0 +1,98 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/modulecomponents" +) + +type builder struct { + fakeClassConfig *fakeClassConfig +} + +func newConfigBuilder() *builder { + return &builder{ + fakeClassConfig: &fakeClassConfig{config: map[string]interface{}{}}, + } +} + +func (b *builder) addSetting(name string, value interface{}) *builder { + b.fakeClassConfig.config[name] = value + return b +} + +func (b *builder) addWeights(textWeights, imageWeights []interface{}) *builder { + if textWeights != nil || imageWeights != nil { + weightSettings := map[string]interface{}{} + if textWeights != nil { + weightSettings["textFields"] = textWeights + } + if imageWeights != nil { + weightSettings["imageFields"] = imageWeights + } + b.fakeClassConfig.config["weights"] = weightSettings + } + return b +} + +func (b *builder) build() *fakeClassConfig { + return b.fakeClassConfig +} + +type fakeClassConfig struct { + config map[string]interface{} +} + +func (c fakeClassConfig) Class() map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) Property(propName string) map[string]interface{} { + return c.config +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} + +type fakeClient struct{} + +func (c *fakeClient) Vectorize(ctx context.Context, + texts, images []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationCLIPResult[[]float32], error) { + result := &modulecomponents.VectorizationCLIPResult[[]float32]{ + TextVectors: [][]float32{{1.0, 2.0, 3.0, 4.0, 5.0}}, + ImageVectors: [][]float32{{10.0, 20.0, 30.0, 40.0, 50.0}}, + } + return result, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/vectorizer/texts.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/vectorizer/texts.go new file mode 100644 index 0000000000000000000000000000000000000000..9600d9caac59f5f7884587d90f2733431242c192 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/vectorizer/texts.go @@ -0,0 +1,33 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/moduletools" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +func (v *Vectorizer) Texts(ctx context.Context, inputs []string, + cfg moduletools.ClassConfig, +) ([]float32, error) { + res, err := v.client.Vectorize(ctx, inputs, nil, cfg) + if err != nil { + return nil, errors.Wrap(err, "remote client vectorize") + } + if len(inputs) != len(res.TextVectors) { + return nil, errors.New("inputs are not equal to vectors returned") + } + return libvectorizer.CombineVectors(res.TextVectors), nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/vectorizer/vectorizer.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/vectorizer/vectorizer.go new file mode 100644 index 0000000000000000000000000000000000000000..e9c5196bf766550dec8922a33dc311e9cfdbcb05 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/vectorizer/vectorizer.go @@ -0,0 +1,151 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/pkg/errors" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/multi2vec-nvidia/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents" + objectsvectorizer "github.com/weaviate/weaviate/usecases/modulecomponents/vectorizer" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +type Vectorizer struct { + client Client + objectVectorizer *objectsvectorizer.ObjectVectorizer +} + +func New(client Client) *Vectorizer { + return &Vectorizer{ + client: client, + objectVectorizer: objectsvectorizer.New(), + } +} + +type Client interface { + Vectorize(ctx context.Context, + texts, images []string, cfg moduletools.ClassConfig) (*modulecomponents.VectorizationCLIPResult[[]float32], error) +} + +type ClassSettings interface { + ImageField(property string) bool + ImageFieldsWeights() ([]float32, error) + TextField(property string) bool + TextFieldsWeights() ([]float32, error) +} + +func (v *Vectorizer) Object(ctx context.Context, object *models.Object, + cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + vec, err := v.object(ctx, object, cfg) + return vec, nil, err +} + +func (v *Vectorizer) VectorizeImage(ctx context.Context, id, image string, cfg moduletools.ClassConfig) ([]float32, error) { + res, err := v.client.Vectorize(ctx, nil, []string{image}, cfg) + if err != nil { + return nil, err + } + if len(res.ImageVectors) != 1 { + return nil, errors.New("empty vector") + } + + return res.ImageVectors[0], nil +} + +func (v *Vectorizer) object(ctx context.Context, object *models.Object, + cfg moduletools.ClassConfig, +) ([]float32, error) { + ichek := ent.NewClassSettings(cfg) + + // vectorize image and text + texts := []string{} + images := []string{} + + if object.Properties != nil { + schemamap := object.Properties.(map[string]interface{}) + for _, propName := range moduletools.SortStringKeys(schemamap) { + switch val := schemamap[propName].(type) { + case string: + if ichek.ImageField(propName) { + images = append(images, val) + } + if ichek.TextField(propName) { + texts = append(texts, val) + } + case []string: + if ichek.TextField(propName) { + texts = append(texts, val...) + } + default: // properties that are not part of the object + + } + } + + } + + vectors := [][]float32{} + if len(texts) > 0 || len(images) > 0 { + res, err := v.client.Vectorize(ctx, texts, images, cfg) + if err != nil { + return nil, err + } + vectors = append(vectors, res.TextVectors...) + vectors = append(vectors, res.ImageVectors...) + } + weights, err := v.getWeights(ichek) + if err != nil { + return nil, err + } + + return libvectorizer.CombineVectorsWithWeights(vectors, weights), nil +} + +func (v *Vectorizer) getWeights(ichek ClassSettings) ([]float32, error) { + weights := []float32{} + textFieldsWeights, err := ichek.TextFieldsWeights() + if err != nil { + return nil, err + } + imageFieldsWeights, err := ichek.ImageFieldsWeights() + if err != nil { + return nil, err + } + + weights = append(weights, textFieldsWeights...) + weights = append(weights, imageFieldsWeights...) + + normalizedWeights := v.normalizeWeights(weights) + + return normalizedWeights, nil +} + +func (v *Vectorizer) normalizeWeights(weights []float32) []float32 { + if len(weights) > 0 { + var denominator float32 + for i := range weights { + denominator += weights[i] + } + normalizer := 1 / denominator + normalized := make([]float32, len(weights)) + for i := range weights { + normalized[i] = weights[i] * normalizer + } + return normalized + } + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/vectorizer/vectorizer_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/vectorizer/vectorizer_test.go new file mode 100644 index 0000000000000000000000000000000000000000..304ff48b7de73a4aa12b8b3e5795e6296e5e0115 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-nvidia/vectorizer/vectorizer_test.go @@ -0,0 +1,194 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" +) + +const image = "iVBORw0KGgoAAAANSUhEUgAAAGAAAAA/CAYAAAAfQM0aAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyRpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDpCRjQ5NEM3RDI5QTkxMUUyOTc1NENCMzI4N0QwNDNCOSIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDpCRjQ5NEM3RTI5QTkxMUUyOTc1NENCMzI4N0QwNDNCOSI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOkJGNDk0QzdCMjlBOTExRTI5NzU0Q0IzMjg3RDA0M0I5IiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOkJGNDk0QzdDMjlBOTExRTI5NzU0Q0IzMjg3RDA0M0I5Ii8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+WeGRxAAAB2hJREFUeNrUXFtslUUQ3hJCoQVEKy0k1qQgrRg0vaAJaq1tvJSgaLy8mKDF2IvxBY2Bgm8+iIoxvhB72tTUmKgPigbFKCEtxeKD9hZjAi3GJrYJtqRai7TQB+pMz/zwU/5zzsxe2u4kXwiwZ+bb/Xb/s7v/zEmrra1VTFsFeBRQCtgEuBWwkv5vHPAn4DdAB+B7wBjXcUNDQ8o2dXV1SmDzyhUtLS3tBPyxC9CdrN1ihi/swKuA7YD0BG1uJhQDngdcAnwDeJ86Ole2kLii+J2AFsA+wF9RjRalmEUHaZY8m6RDUYZtn6HPHiRfLm2hck0D7AScAdRH8UokwD2AnwA7UoiUyhaRD/S12dHg+8B1OWA/4BTgqVQCPEJL8haLBNDXEfJt03ziipYH+BJwHFAYJcAWwCeAZQ6CLyPfWyz584nrbCuj74eHwgKsddih2R1ba+jHJ65R1k6PuWNhAd4DZM/BTiWbdhwm5hPXsA0AngY8COgNP4JwSTyu4zE/P18VFhZKP7aNYuouXxFX5Ic8Nc2Ea2D/AfYCNgIORZ0DdusOfnFxcXDwUD09PZKP76alKDUR16KiIlVQUHDl7/39/Uozpg7Xac45YB0dGrQHHw07KVwJpRRbYiKuyCc8+MhXcyXocP2RnvMvJhr8QIBK08EPbGJiQuqq0mX7KD4GIohi4xVPTU0N6/BRamPwu7u7dZb3/RozkW3IB3lZEkGHayeI8FFVVdWaZAIUcD2Wl5fbHHy024XtC6QBkomA/XHIFb8X0Xamp6efASHqt27dGnkVkcNxVlFRoXJycmwOvuLGNmifVATsD/bLZezgKgKE2J+bm3sKHk3XXUWs4Mz87Oxs24OvOLEN26cUAfvFXAkrlKGBCDNXEbAajldXV1+5ijjP+KCrg855x+3nk2uy8SwDdIIIM1cRI6k+0NraqkZGRmzuKAIbFrYf0Q2UaPOA/Wpra3PBNfHhYHq6HbC5qanpGB7ETgPWc0TApTr7eyDolOaj6LRG+/W2Bn94eJg7+DpcowZ+AGb+642NjYfC3wEdXAdI1uK2Du2ksH2HrcHHfggGX4frNVcRMPh7BwcHN8ZiseuuIr4DvKXib29YX2bhmW+wEqYptsREXC2eWXS44oyfuYqYmpra19LSEnkaRgEG6Nj8gGRHESVCRkaG9Kg+IOyTiGtmZqatnZsOV/zMLnjcsF7KH5AIECVCX1+f6u3tlbg4oLmc2VyDy8HgPshg2yzmCo8aFsdAALzpw9dw23REwJkvHPwjSu92UcwVRcAnAd4LaQ6+CVe2AGivAe5WwhcdGp0aoVgmJuIqnBy2uSa18Buxs4AXAJMO401SjLOGfnziyhYg2GrtcNSxSfJ90pI/n7iyBUA7quKv/IYsxhmiZ/ZRy/x94soWAO1nwL0qnhVw2cD/ZfKBvjod9cEnrmwB0DBh9RUVfxHxhYrnUHLtEn2mlHyMOe6HT1wT7oISGSas4ntNzJmsVFczjnMBN1CbfwGD1BYPID8A/lFzbz5xZQsQnmWfExa6ecNVIsBKWuIlgA0qnjG2PLhsou0aZgF3qfil2fg89ssbrhwBNtB+GN/dLUnQ5kbCHYAnAFMAvGpsoY7OlS0krmOhxx7WLHwAeBLwVahN2uIUswgrPB5T8rRv7DxWqDwM+JaCjzue8b5wZe2C7gJ8quKVJqY599vJ1yZHffCJK0uA+wAfAtZYjIO+Gsi3TfOJK0sAfFP/jpKV+HBtKfkutOTPJ64sAVYD3qXgrmwpxVht6McnrmwBMAP4pjlYdRij3tCHT1xZAuDdermOA836gDKKqWNirob1ASZc2eeAl3QH36A+AGP+ohFWxNVSfYAuV9YKyKUTo/bgo2nUB5RQbImJuFqsD9DhyhbAuDgjMI36gFKX7S3XB5S6egSV2Bh8zYyDYjr4SGYi2yzmMIm5YnFGkFOLSQGNjY3X/BtaLBabWQF5XKcO6gOkZT950gAW6wPWuXoEZXEaOqoPyHLcPqkIwvqALFcCZHJmvqP6gEzH7VOKIKgPyHQlwIVUjRzWB1xw3H4+ubIFGE3VyGF9wKjj9ik3D4L6gFFXArCSTlEEzKe3LMIfwvYDNgcf+4P9csSVLUAXt7GD+oBuYfsuW4OvUR/Q7UoA/G2zaRvbOqEI0xRbYiKulusDTrgSYEg6sxKJIKwP6FLyjDYRV4v1ATpc2QKgNZtu6zTqA5o1ObM/h5eDyMvCtrlZObLgNhRv+jAHvkwqQjDzhYPfrvRvF0VcLdQHaHGNxWKrZv0d//hahcqr8Ccww1kRbwPuVMIXHRqd+ptimZiIq0F9gA2urEcQ2jkVf/tz0WG8ixTjnKEfn7iyBQi2WnuULLlV0qE9FrdzPnFlC4CGRQkvqyQ/MqRh6KtO2S948IkrWwC0XwHPAQ4r85z7w+TL1U8Y+8Q14S4oyjA9703AZ4AqFX8RvoTpN8i3/Bi/p+egHz5xZQsQGCasvqGuZhzj76DdpuIZx8FPuOAviWDG8e8qXl0yXxnHPnGdsf8FGAByGwC02iMZswAAAABJRU5ErkJggg==" + +func TestVectorizer(t *testing.T) { + t.Run("should vectorize image", func(t *testing.T) { + // given + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder().addSetting("imageFields", []interface{}{"image"}).build() + + props := map[string]interface{}{ + "image": image, + } + object := &models.Object{ + ID: "some-uuid", + Properties: props, + } + + // when + vector, _, err := vectorizer.Object(context.Background(), object, config) + + // then + require.Nil(t, err) + assert.NotNil(t, vector) + }) + + t.Run("should vectorize 2 image fields", func(t *testing.T) { + // given + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder().addSetting("imageFields", []interface{}{"image1", "image2"}).build() + + props := map[string]interface{}{ + "image1": image, + "image2": image, + } + object := &models.Object{ + ID: "some-uuid", + Properties: props, + } + + // when + vector, _, err := vectorizer.Object(context.Background(), object, config) + + // then + require.Nil(t, err) + assert.NotNil(t, vector) + }) +} + +func TestVectorizerWithDiff(t *testing.T) { + type testCase struct { + name string + input *models.Object + } + + props := map[string]interface{}{ + "image": image, + "text": "text", + "description": "non-vectorizable", + } + + tests := []testCase{ + { + name: "noop comp", + input: &models.Object{ + ID: "some-uuid", + Properties: props, + }, + }, + { + name: "one vectorizable prop changed (1)", + input: &models.Object{ + ID: "some-uuid", + Properties: props, + }, + }, + { + name: "one vectorizable prop changed (2)", + input: &models.Object{ + ID: "some-uuid", + Properties: props, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder(). + addSetting("imageFields", []interface{}{"image"}). + addSetting("textFields", []interface{}{"text"}). + build() + + vector, _, err := vectorizer.Object(context.Background(), test.input, config) + + require.Nil(t, err) + assert.Equal(t, []float32{5.5, 11, 16.5, 22, 27.5}, vector) + }) + } +} + +func TestVectorizerWithWeights(t *testing.T) { + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder(). + addSetting("imageFields", []interface{}{"image"}). + addSetting("textFields", []interface{}{"text"}). + addWeights([]interface{}{0.4}, []interface{}{0.6}). + build() + + props := map[string]interface{}{ + "image": image, + "text": "text", + "description": "non-vectorizable", + } + input := &models.Object{ + ID: "some-uuid", + Properties: props, + } + + vector, _, err := vectorizer.Object(context.Background(), input, config) + + require.Nil(t, err) + assert.Equal(t, []float32{3.2, 6.4, 9.6, 12.8, 16}, vector) + // vectors are defined in Vectorize within fakes_for_test.go + // result calculated with above weights as (textVectors[0][i]*0.4+imageVectors[0][i]*0.6) / 2 +} + +func TestVectorizer_normalizeWeights(t *testing.T) { + tests := []struct { + name string + weights []float32 + }{ + { + name: "normalize example 1", + weights: []float32{200, 100, 0.1}, + }, + { + name: "normalize example 2", + weights: []float32{300.22, 0.7, 17, 54}, + }, + { + name: "normalize example 3", + weights: []float32{300, 0.02, 17}, + }, + { + name: "normalize example 4", + weights: []float32{500, 0.02, 17.4, 180}, + }, + { + name: "normalize example 5", + weights: []float32{500, 0.02, 17.4, 2, 4, 5, .88}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + v := &Vectorizer{} + if got := v.normalizeWeights(tt.weights); !checkNormalization(got) { + t.Errorf("Vectorizer.normalizeWeights() = %v, want %v", got, 1.0) + } + }) + } +} + +func checkNormalization(weights []float32) bool { + var result float32 + for i := range weights { + result += weights[i] + } + return result == 1.0 +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..3f1b1480bd1b2488a4d40d054398e4b786a57f23 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/clients/meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *vectorizer) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "VoyageAI Multi Modal Module", + "documentationHref": "https://docs.voyageai.com/docs/multimodal-embeddings", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/clients/voyageai.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/clients/voyageai.go new file mode 100644 index 0000000000000000000000000000000000000000..904e7a37a1fcb3441d75fe3b23c37c13ae2d23ed --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/clients/voyageai.go @@ -0,0 +1,89 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "fmt" + "time" + + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/multi2vec-voyageai/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/voyageai" +) + +type voyageaiUrlBuilder struct { + origin string + pathMask string +} + +func newVoyageAIUrlBuilder() *voyageaiUrlBuilder { + return &voyageaiUrlBuilder{ + origin: ent.DefaultBaseURL, + pathMask: "/multimodalembeddings", + } +} + +func (c *voyageaiUrlBuilder) URL(baseURL string) string { + if baseURL != "" { + return fmt.Sprintf("%s%s", baseURL, c.pathMask) + } + return fmt.Sprintf("%s%s", c.origin, c.pathMask) +} + +type vectorizer struct { + client *voyageai.Client +} + +func New(apiKey string, timeout time.Duration, logger logrus.FieldLogger) *vectorizer { + return &vectorizer{ + client: voyageai.New(apiKey, timeout, newVoyageAIUrlBuilder(), logger), + } +} + +func (v *vectorizer) Vectorize(ctx context.Context, + texts, images []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationCLIPResult[[]float32], error) { + settings := ent.NewClassSettings(cfg) + return v.client.VectorizeMultiModal(ctx, texts, images, voyageai.Settings{ + BaseURL: settings.BaseURL(), + Model: settings.Model(), + Truncate: settings.Truncate(), + InputType: voyageai.Document, + }) +} + +func (v *vectorizer) VectorizeQuery(ctx context.Context, + input []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationCLIPResult[[]float32], error) { + settings := ent.NewClassSettings(cfg) + return v.client.VectorizeMultiModal(ctx, input, nil, voyageai.Settings{ + BaseURL: settings.BaseURL(), + Model: settings.Model(), + Truncate: settings.Truncate(), + InputType: voyageai.Query, + }) +} + +func (v *vectorizer) VectorizeImageQuery(ctx context.Context, + images []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationCLIPResult[[]float32], error) { + settings := ent.NewClassSettings(cfg) + return v.client.VectorizeMultiModal(ctx, nil, images, voyageai.Settings{ + BaseURL: settings.BaseURL(), + Model: settings.Model(), + Truncate: settings.Truncate(), + InputType: voyageai.Query, + }) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/clients/voyageai_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/clients/voyageai_test.go new file mode 100644 index 0000000000000000000000000000000000000000..02bd38b1f13797bc73c4c90a6fea915744bb9fdd --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/clients/voyageai_test.go @@ -0,0 +1,195 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/modulecomponents" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/voyageai" +) + +func TestClient(t *testing.T) { + t.Run("when all is fine", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{voyageai.New("apiKey", 0, &voyageaiUrlBuilder{origin: server.URL, pathMask: "/multimodalembeddings"}, nullLogger())} + expected := &modulecomponents.VectorizationCLIPResult[[]float32]{ + TextVectors: [][]float32{{0.1, 0.2, 0.3}}, + ImageVectors: [][]float32{{0.4, 0.5, 0.6}}, + } + res, err := c.Vectorize(context.Background(), + []string{"This is my text"}, []string{"base64image"}, + fakeClassConfig{classConfig: map[string]interface{}{"Model": "voyage-multimodal-3", "baseURL": server.URL}}, + ) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when the context is expired", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{voyageai.New("apiKey", 0, &voyageaiUrlBuilder{origin: server.URL, pathMask: "/multimodalembeddings"}, nullLogger())} + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, err := c.Vectorize(ctx, []string{"text"}, []string{"image"}, + fakeClassConfig{classConfig: map[string]interface{}{"Model": "voyage-multimodal-3"}}, + ) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "context deadline exceeded") + }) + + t.Run("when the server returns an error", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{ + t: t, + serverError: "nope, not gonna happen", + }) + defer server.Close() + c := &vectorizer{voyageai.New("apiKey", 0, &voyageaiUrlBuilder{origin: server.URL, pathMask: "/multimodalembeddings"}, nullLogger())} + _, err := c.Vectorize(context.Background(), []string{"text"}, []string{"image"}, + fakeClassConfig{classConfig: map[string]interface{}{"Model": "voyage-multimodal-3", "baseURL": server.URL}}, + ) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "nope, not gonna happen") + }) + + t.Run("when VoyageAI key is passed using header", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{voyageai.New("apiKey", 0, &voyageaiUrlBuilder{origin: server.URL, pathMask: "/multimodalembeddings"}, nullLogger())} + ctxWithValue := context.WithValue(context.Background(), + "X-Voyageai-Api-Key", []string{"some-key"}) + + expected := &modulecomponents.VectorizationCLIPResult[[]float32]{ + TextVectors: [][]float32{{0.1, 0.2, 0.3}}, + ImageVectors: [][]float32{{0.4, 0.5, 0.6}}, + } + res, err := c.Vectorize(ctxWithValue, []string{"text"}, []string{"image"}, + fakeClassConfig{classConfig: map[string]interface{}{"Model": "voyage-multimodal-3", "baseURL": server.URL}}, + ) + + require.Nil(t, err) + assert.Equal(t, expected, res) + }) +} + +type fakeHandler struct { + t *testing.T + serverError string +} + +func (f *fakeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.serverError != "" { + resp := map[string]interface{}{ + "detail": f.serverError, + } + outBytes, err := json.Marshal(resp) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var req map[string]interface{} + require.Nil(f.t, json.Unmarshal(bodyBytes, &req)) + assert.NotNil(f.t, req) + + resp := map[string]interface{}{ + "data": []map[string]interface{}{ + {"embedding": []float32{0.1, 0.2, 0.3}}, + {"embedding": []float32{0.4, 0.5, 0.6}}, + }, + } + + outBytes, err := json.Marshal(resp) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizePropertyName bool + skippedProperty string + excludedProperty string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/config.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/config.go new file mode 100644 index 0000000000000000000000000000000000000000..1f5833280ce78d778e2fbde26084a487f9a5b5c6 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/config.go @@ -0,0 +1,46 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modclip + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/multi2vec-voyageai/ent" +) + +func (m *Module) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{ + "vectorizeClassName": ent.DefaultVectorizeClassName, + "baseURL": ent.DefaultBaseURL, + "model": ent.DefaultVoyageAIModel, + "truncate": ent.DefaultTruncate, + } +} + +func (m *Module) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *Module) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + icheck := ent.NewClassSettings(cfg) + return icheck.Validate() +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/ent/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/ent/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..399679555cb80728635bed08ad019eaaca101e7b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/ent/class_settings.go @@ -0,0 +1,263 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "fmt" + "strings" + + "github.com/pkg/errors" + + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + baseURLProperty = "baseURL" + modelProperty = "model" + truncateProperty = "truncate" +) + +const ( + DefaultBaseURL = "https://api.voyageai.com/v1" + DefaultVoyageAIModel = "voyage-multimodal-3" + DefaultTruncate = true + DefaultVectorizeClassName = false + DefaultPropertyIndexed = true + DefaultVectorizePropertyName = false +) + +type classSettings struct { + base *basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{ + cfg: cfg, + base: basesettings.NewBaseClassSettingsWithAltNames(cfg, false, "multi2vec-voyageai", nil, nil), + } +} + +// VoyageAI settings +func (cs *classSettings) Model() string { + return cs.base.GetPropertyAsString(modelProperty, DefaultVoyageAIModel) +} + +func (cs classSettings) Truncate() bool { + return cs.base.GetPropertyAsBool("truncateProperty", DefaultTruncate) +} + +func (cs *classSettings) BaseURL() string { + return cs.base.GetPropertyAsString(baseURLProperty, DefaultBaseURL) +} + +// CLIP module specific settings +func (ic *classSettings) ImageField(property string) bool { + return ic.field("imageFields", property) +} + +func (ic *classSettings) ImageFieldsWeights() ([]float32, error) { + return ic.getFieldsWeights("image") +} + +func (ic *classSettings) TextField(property string) bool { + return ic.field("textFields", property) +} + +func (ic *classSettings) TextFieldsWeights() ([]float32, error) { + return ic.getFieldsWeights("text") +} + +func (ic *classSettings) Properties() ([]string, error) { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return nil, errors.New("empty config") + } + props := make([]string, 0) + + fields := []string{"textFields", "imageFields"} + + for _, field := range fields { + fields, ok := ic.base.GetSettings()[field] + if !ok { + continue + } + + fieldsArray, ok := fields.([]interface{}) + if !ok { + return nil, errors.Errorf("%s must be an array", field) + } + + for _, value := range fieldsArray { + v, ok := value.(string) + if !ok { + return nil, errors.Errorf("%s must be a string", field) + } + props = append(props, v) + } + } + return props, nil +} + +func (ic *classSettings) field(name, property string) bool { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return false + } + + fields, ok := ic.base.GetSettings()[name] + if !ok { + return false + } + + fieldsArray, ok := fields.([]interface{}) + if !ok { + return false + } + + fieldNames := make([]string, len(fieldsArray)) + for i, value := range fieldsArray { + fieldNames[i] = value.(string) + } + + for i := range fieldNames { + if fieldNames[i] == property { + return true + } + } + + return false +} + +func (ic *classSettings) Validate() error { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + + var errorMessages []string + + imageFields, imageFieldsOk := ic.cfg.Class()["imageFields"] + textFields, textFieldsOk := ic.cfg.Class()["textFields"] + if !imageFieldsOk && !textFieldsOk { + errorMessages = append(errorMessages, "textFields or imageFields setting needs to be present") + } + + if imageFieldsOk { + imageFieldsCount, err := ic.validateFields("image", imageFields) + if err != nil { + errorMessages = append(errorMessages, err.Error()) + } + err = ic.validateWeights("image", imageFieldsCount) + if err != nil { + errorMessages = append(errorMessages, err.Error()) + } + } + + if textFieldsOk { + textFieldsCount, err := ic.validateFields("text", textFields) + if err != nil { + errorMessages = append(errorMessages, err.Error()) + } + err = ic.validateWeights("text", textFieldsCount) + if err != nil { + errorMessages = append(errorMessages, err.Error()) + } + } + + if len(errorMessages) > 0 { + return fmt.Errorf("%s", strings.Join(errorMessages, ", ")) + } + + return nil +} + +func (ic *classSettings) validateFields(name string, fields interface{}) (int, error) { + fieldsArray, ok := fields.([]interface{}) + if !ok { + return 0, errors.Errorf("%sFields must be an array", name) + } + + if len(fieldsArray) == 0 { + return 0, errors.Errorf("must contain at least one %s field name in %sFields", name, name) + } + + for _, value := range fieldsArray { + v, ok := value.(string) + if !ok { + return 0, errors.Errorf("%sField must be a string", name) + } + if len(v) == 0 { + return 0, errors.Errorf("%sField values cannot be empty", name) + } + } + + return len(fieldsArray), nil +} + +func (ic *classSettings) validateWeights(name string, count int) error { + weights, ok := ic.getWeights(name) + if ok { + if len(weights) != count { + return errors.Errorf("weights.%sFields does not equal number of %sFields", name, name) + } + _, err := ic.getWeightsArray(weights) + if err != nil { + return err + } + } + + return nil +} + +func (ic *classSettings) getWeights(name string) ([]interface{}, bool) { + weights, ok := ic.base.GetSettings()["weights"] + if ok { + weightsObject, ok := weights.(map[string]interface{}) + if ok { + fieldWeights, ok := weightsObject[fmt.Sprintf("%sFields", name)] + if ok { + fieldWeightsArray, ok := fieldWeights.([]interface{}) + if ok { + return fieldWeightsArray, ok + } + } + } + } + + return nil, false +} + +func (ic *classSettings) getWeightsArray(weights []interface{}) ([]float32, error) { + weightsArray := make([]float32, len(weights)) + for i := range weights { + weight, err := ic.getNumber(weights[i]) + if err != nil { + return nil, err + } + weightsArray[i] = weight + } + return weightsArray, nil +} + +func (ic *classSettings) getFieldsWeights(name string) ([]float32, error) { + weights, ok := ic.getWeights(name) + if ok { + return ic.getWeightsArray(weights) + } + return nil, nil +} + +func (ic *classSettings) getNumber(in interface{}) (float32, error) { + return ic.base.GetNumber(in) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/ent/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/ent/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..00ed00ac67c8787f66101a09f703765c0e4a1c06 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/ent/class_settings_test.go @@ -0,0 +1,249 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "encoding/json" + "testing" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_classSettings_Validate(t *testing.T) { + type fields struct { + cfg moduletools.ClassConfig + } + tests := []struct { + name string + fields fields + wantErr bool + }{ + { + name: "should not pass with empty config", + wantErr: true, + }, + { + name: "should not pass with nil config", + fields: fields{ + cfg: nil, + }, + wantErr: true, + }, + { + name: "should not pass with nil imageFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("model", "model").addSetting("imageFields", nil).build(), + }, + wantErr: true, + }, + { + name: "should not pass with fault imageFields value", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []string{}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with empty imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with empty string in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{""}).build(), + }, + wantErr: true, + }, + { + name: "should not pass with int value in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{1.0}).build(), + }, + wantErr: true, + }, + { + name: "should pass with proper value in imageFields", + fields: fields{ + cfg: newConfigBuilder().addSetting("imageFields", []interface{}{"field"}).build(), + }, + }, + { + name: "should pass with proper value in imageFields and textFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("imageFields", []interface{}{"imageField"}). + addSetting("textFields", []interface{}{"textField"}). + build(), + }, + }, + { + name: "should pass with proper value in 2 imageFields and 2 textFields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1", "imageField2"}). + build(), + }, + }, + { + name: "should pass with proper value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1", "imageField2"}). + addWeights([]interface{}{1, 2}, []interface{}{1, 2}). + build(), + }, + }, + { + name: "should pass with proper value in 1 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, 2}, []interface{}{1}). + build(), + }, + }, + { + name: "should pass with proper value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, 2}, []interface{}{1}). + build(), + }, + }, + { + name: "should not pass with proper value in 1 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1}, []interface{}{1}). + build(), + }, + wantErr: true, + }, + { + name: "should not pass with not proper weight value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{1, "aaaa"}, []interface{}{1}). + build(), + }, + wantErr: true, + }, + { + name: "should pass with not proper weight value in 2 imageFields and 2 textFields and weights", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("textFields", []interface{}{"textField1", "textField2"}). + addSetting("imageFields", []interface{}{"imageField1"}). + addWeights([]interface{}{json.Number("1"), json.Number("2")}, []interface{}{json.Number("3")}). + build(), + }, + }, + { + name: "should pass with proper dimensions setting in videoFields together with image fields", + fields: fields{ + cfg: newConfigBuilder(). + addSetting("videoFields", []interface{}{"video1"}). + addSetting("imageFields", []interface{}{"image1"}). + build(), + }, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.fields.cfg) + if err := ic.Validate(); (err != nil) != tt.wantErr { + t.Errorf("classSettings.Validate() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} + +type builder struct { + fakeClassConfig *fakeClassConfig +} + +func newConfigBuilder() *builder { + return &builder{ + fakeClassConfig: &fakeClassConfig{config: map[string]interface{}{}}, + } +} + +func (b *builder) addSetting(name string, value interface{}) *builder { + b.fakeClassConfig.config[name] = value + return b +} + +func (b *builder) addWeights(textWeights, imageWeights []interface{}) *builder { + if textWeights != nil || imageWeights != nil { + weightSettings := map[string]interface{}{} + if textWeights != nil { + weightSettings["textFields"] = textWeights + } + if imageWeights != nil { + weightSettings["imageFields"] = imageWeights + } + b.fakeClassConfig.config["weights"] = weightSettings + } + return b +} + +func (b *builder) build() *fakeClassConfig { + return b.fakeClassConfig +} + +type fakeClassConfig struct { + config map[string]interface{} +} + +func (c fakeClassConfig) Class() map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) Property(propName string) map[string]interface{} { + return c.config +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/module.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/module.go new file mode 100644 index 0000000000000000000000000000000000000000..44c906b62a02bd17aa5be1d56bf10ed4cbf737c8 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/module.go @@ -0,0 +1,137 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modclip + +import ( + "context" + "os" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/multi2vec-voyageai/clients" + "github.com/weaviate/weaviate/modules/multi2vec-voyageai/ent" + "github.com/weaviate/weaviate/modules/multi2vec-voyageai/vectorizer" +) + +const Name = "multi2vec-voyageai" + +func New() *Module { + return &Module{} +} + +type Module struct { + vectorizer *vectorizer.Vectorizer + nearImageGraphqlProvider modulecapabilities.GraphQLArguments + nearImageSearcher modulecapabilities.Searcher[[]float32] + nearTextGraphqlProvider modulecapabilities.GraphQLArguments + nearTextSearcher modulecapabilities.Searcher[[]float32] + nearTextTransformer modulecapabilities.TextTransform + metaClient metaClient + logger logrus.FieldLogger +} + +type metaClient interface { + MetaInfo() (map[string]interface{}, error) +} + +func (m *Module) Name() string { + return Name +} + +func (m *Module) Type() modulecapabilities.ModuleType { + return modulecapabilities.Multi2Vec +} + +func (m *Module) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initNearImage(); err != nil { + return errors.Wrap(err, "init near image") + } + + return nil +} + +func (m *Module) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init near text") + } + + return nil +} + +func (m *Module) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("VOYAGEAI_APIKEY") + client := clients.New(apiKey, timeout, logger) + + m.vectorizer = vectorizer.New(client) + m.metaClient = client + + return nil +} + +func (m *Module) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + return m.vectorizer.Object(ctx, obj, cfg) +} + +func (m *Module) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + return batch.VectorizeBatch(ctx, objs, skipObject, cfg, m.logger, m.vectorizer.Object) +} + +func (m *Module) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + ichek := ent.NewClassSettings(cfg) + mediaProps, err := ichek.Properties() + return false, mediaProps, err +} + +func (m *Module) MetaInfo() (map[string]interface{}, error) { + return m.metaClient.MetaInfo() +} + +func (m *Module) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + return m.vectorizer.Texts(ctx, []string{input}, cfg) +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.InputVectorizer[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/nearArguments.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/nearArguments.go new file mode 100644 index 0000000000000000000000000000000000000000..55e86ecd93bbb8b526a89387a3238344cde98c85 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/nearArguments.go @@ -0,0 +1,57 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modclip + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearImage" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +func (m *Module) initNearImage() error { + m.nearImageSearcher = nearImage.NewSearcher(m.vectorizer) + m.nearImageGraphqlProvider = nearImage.New() + return nil +} + +func (m *Module) initNearText() error { + m.nearTextSearcher = nearText.NewSearcher(m.vectorizer) + m.nearTextGraphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *Module) Arguments() map[string]modulecapabilities.GraphQLArgument { + arguments := map[string]modulecapabilities.GraphQLArgument{} + for name, arg := range m.nearImageGraphqlProvider.Arguments() { + arguments[name] = arg + } + for name, arg := range m.nearTextGraphqlProvider.Arguments() { + arguments[name] = arg + } + return arguments +} + +func (m *Module) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + vectorSearches := map[string]modulecapabilities.VectorForParams[[]float32]{} + for name, arg := range m.nearImageSearcher.VectorSearches() { + vectorSearches[name] = arg + } + for name, arg := range m.nearTextSearcher.VectorSearches() { + vectorSearches[name] = arg + } + return vectorSearches +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/vectorizer/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/vectorizer/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..d5b71c4bf0a725f3cba7a9736713eb2f992bd8f0 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/vectorizer/fakes_for_test.go @@ -0,0 +1,116 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/modulecomponents" +) + +type builder struct { + fakeClassConfig *fakeClassConfig +} + +func newConfigBuilder() *builder { + return &builder{ + fakeClassConfig: &fakeClassConfig{config: map[string]interface{}{}}, + } +} + +func (b *builder) addSetting(name string, value interface{}) *builder { + b.fakeClassConfig.config[name] = value + return b +} + +func (b *builder) addWeights(textWeights, imageWeights []interface{}) *builder { + if textWeights != nil || imageWeights != nil { + weightSettings := map[string]interface{}{} + if textWeights != nil { + weightSettings["textFields"] = textWeights + } + if imageWeights != nil { + weightSettings["imageFields"] = imageWeights + } + b.fakeClassConfig.config["weights"] = weightSettings + } + return b +} + +func (b *builder) build() *fakeClassConfig { + return b.fakeClassConfig +} + +type fakeClassConfig struct { + config map[string]interface{} +} + +func (c fakeClassConfig) Class() map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return c.config +} + +func (c fakeClassConfig) Property(propName string) map[string]interface{} { + return c.config +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} + +type fakeClient struct{} + +func (c *fakeClient) Vectorize(ctx context.Context, + texts, images []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationCLIPResult[[]float32], error) { + result := &modulecomponents.VectorizationCLIPResult[[]float32]{ + TextVectors: [][]float32{{1.0, 2.0, 3.0, 4.0, 5.0}}, + ImageVectors: [][]float32{{10.0, 20.0, 30.0, 40.0, 50.0}}, + } + return result, nil +} + +func (c *fakeClient) VectorizeQuery(ctx context.Context, + input []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationCLIPResult[[]float32], error) { + result := &modulecomponents.VectorizationCLIPResult[[]float32]{ + TextVectors: [][]float32{{1.0, 2.0, 3.0, 4.0, 5.0}}, + } + return result, nil +} + +func (c *fakeClient) VectorizeImageQuery(ctx context.Context, + images []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationCLIPResult[[]float32], error) { + result := &modulecomponents.VectorizationCLIPResult[[]float32]{ + ImageVectors: [][]float32{{10.0, 20.0, 30.0, 40.0, 50.0}}, + } + return result, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/vectorizer/texts.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/vectorizer/texts.go new file mode 100644 index 0000000000000000000000000000000000000000..f81f72e55551da7f49340cdbec0c0a9b765d020f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/vectorizer/texts.go @@ -0,0 +1,33 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/moduletools" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +func (v *Vectorizer) Texts(ctx context.Context, inputs []string, + cfg moduletools.ClassConfig, +) ([]float32, error) { + res, err := v.client.VectorizeQuery(ctx, inputs, cfg) + if err != nil { + return nil, errors.Wrap(err, "remote client vectorize") + } + if len(inputs) != len(res.TextVectors) { + return nil, errors.New("inputs are not equal to vectors returned") + } + return libvectorizer.CombineVectors(res.TextVectors), nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/vectorizer/vectorizer.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/vectorizer/vectorizer.go new file mode 100644 index 0000000000000000000000000000000000000000..75c29a1ddf064be96217c7e27e54f810e34eaa94 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/vectorizer/vectorizer.go @@ -0,0 +1,158 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/pkg/errors" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/multi2vec-voyageai/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents" + objectsvectorizer "github.com/weaviate/weaviate/usecases/modulecomponents/vectorizer" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +type Vectorizer struct { + client Client + objectVectorizer *objectsvectorizer.ObjectVectorizer +} + +func New(client Client) *Vectorizer { + return &Vectorizer{ + client: client, + objectVectorizer: objectsvectorizer.New(), + } +} + +type Client interface { + Vectorize(ctx context.Context, + texts, images []string, cfg moduletools.ClassConfig, + ) (*modulecomponents.VectorizationCLIPResult[[]float32], error) + VectorizeQuery(ctx context.Context, + input []string, cfg moduletools.ClassConfig, + ) (*modulecomponents.VectorizationCLIPResult[[]float32], error) + VectorizeImageQuery(ctx context.Context, + images []string, cfg moduletools.ClassConfig, + ) (*modulecomponents.VectorizationCLIPResult[[]float32], error) +} + +type ClassSettings interface { + ImageField(property string) bool + ImageFieldsWeights() ([]float32, error) + TextField(property string) bool + TextFieldsWeights() ([]float32, error) +} + +func (v *Vectorizer) Object(ctx context.Context, object *models.Object, + cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + vec, err := v.object(ctx, object, cfg) + return vec, nil, err +} + +func (v *Vectorizer) VectorizeImage(ctx context.Context, id, image string, cfg moduletools.ClassConfig) ([]float32, error) { + res, err := v.client.VectorizeImageQuery(ctx, []string{image}, cfg) + if err != nil { + return nil, err + } + if len(res.ImageVectors) != 1 { + return nil, errors.New("empty vector") + } + + return res.ImageVectors[0], nil +} + +func (v *Vectorizer) object(ctx context.Context, object *models.Object, + cfg moduletools.ClassConfig, +) ([]float32, error) { + ichek := ent.NewClassSettings(cfg) + + // vectorize image and text + texts := []string{} + images := []string{} + + if object.Properties != nil { + schemamap := object.Properties.(map[string]interface{}) + for _, propName := range moduletools.SortStringKeys(schemamap) { + switch val := schemamap[propName].(type) { + case string: + if ichek.ImageField(propName) { + images = append(images, val) + } + if ichek.TextField(propName) { + texts = append(texts, val) + } + case []string: + if ichek.TextField(propName) { + texts = append(texts, val...) + } + default: // properties that are not part of the object + + } + } + + } + + vectors := [][]float32{} + if len(texts) > 0 || len(images) > 0 { + res, err := v.client.Vectorize(ctx, texts, images, cfg) + if err != nil { + return nil, err + } + vectors = append(vectors, res.TextVectors...) + vectors = append(vectors, res.ImageVectors...) + } + weights, err := v.getWeights(ichek) + if err != nil { + return nil, err + } + + return libvectorizer.CombineVectorsWithWeights(vectors, weights), nil +} + +func (v *Vectorizer) getWeights(ichek ClassSettings) ([]float32, error) { + weights := []float32{} + textFieldsWeights, err := ichek.TextFieldsWeights() + if err != nil { + return nil, err + } + imageFieldsWeights, err := ichek.ImageFieldsWeights() + if err != nil { + return nil, err + } + + weights = append(weights, textFieldsWeights...) + weights = append(weights, imageFieldsWeights...) + + normalizedWeights := v.normalizeWeights(weights) + + return normalizedWeights, nil +} + +func (v *Vectorizer) normalizeWeights(weights []float32) []float32 { + if len(weights) > 0 { + var denominator float32 + for i := range weights { + denominator += weights[i] + } + normalizer := 1 / denominator + normalized := make([]float32, len(weights)) + for i := range weights { + normalized[i] = weights[i] * normalizer + } + return normalized + } + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/vectorizer/vectorizer_test.go b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/vectorizer/vectorizer_test.go new file mode 100644 index 0000000000000000000000000000000000000000..304ff48b7de73a4aa12b8b3e5795e6296e5e0115 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/multi2vec-voyageai/vectorizer/vectorizer_test.go @@ -0,0 +1,194 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" +) + +const image = "iVBORw0KGgoAAAANSUhEUgAAAGAAAAA/CAYAAAAfQM0aAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyRpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDpCRjQ5NEM3RDI5QTkxMUUyOTc1NENCMzI4N0QwNDNCOSIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDpCRjQ5NEM3RTI5QTkxMUUyOTc1NENCMzI4N0QwNDNCOSI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOkJGNDk0QzdCMjlBOTExRTI5NzU0Q0IzMjg3RDA0M0I5IiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOkJGNDk0QzdDMjlBOTExRTI5NzU0Q0IzMjg3RDA0M0I5Ii8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+WeGRxAAAB2hJREFUeNrUXFtslUUQ3hJCoQVEKy0k1qQgrRg0vaAJaq1tvJSgaLy8mKDF2IvxBY2Bgm8+iIoxvhB72tTUmKgPigbFKCEtxeKD9hZjAi3GJrYJtqRai7TQB+pMz/zwU/5zzsxe2u4kXwiwZ+bb/Xb/s7v/zEmrra1VTFsFeBRQCtgEuBWwkv5vHPAn4DdAB+B7wBjXcUNDQ8o2dXV1SmDzyhUtLS3tBPyxC9CdrN1ihi/swKuA7YD0BG1uJhQDngdcAnwDeJ86Ole2kLii+J2AFsA+wF9RjRalmEUHaZY8m6RDUYZtn6HPHiRfLm2hck0D7AScAdRH8UokwD2AnwA7UoiUyhaRD/S12dHg+8B1OWA/4BTgqVQCPEJL8haLBNDXEfJt03ziipYH+BJwHFAYJcAWwCeAZQ6CLyPfWyz584nrbCuj74eHwgKsddih2R1ba+jHJ65R1k6PuWNhAd4DZM/BTiWbdhwm5hPXsA0AngY8COgNP4JwSTyu4zE/P18VFhZKP7aNYuouXxFX5Ic8Nc2Ea2D/AfYCNgIORZ0DdusOfnFxcXDwUD09PZKP76alKDUR16KiIlVQUHDl7/39/Uozpg7Xac45YB0dGrQHHw07KVwJpRRbYiKuyCc8+MhXcyXocP2RnvMvJhr8QIBK08EPbGJiQuqq0mX7KD4GIohi4xVPTU0N6/BRamPwu7u7dZb3/RozkW3IB3lZEkGHayeI8FFVVdWaZAIUcD2Wl5fbHHy024XtC6QBkomA/XHIFb8X0Xamp6efASHqt27dGnkVkcNxVlFRoXJycmwOvuLGNmifVATsD/bLZezgKgKE2J+bm3sKHk3XXUWs4Mz87Oxs24OvOLEN26cUAfvFXAkrlKGBCDNXEbAajldXV1+5ijjP+KCrg855x+3nk2uy8SwDdIIIM1cRI6k+0NraqkZGRmzuKAIbFrYf0Q2UaPOA/Wpra3PBNfHhYHq6HbC5qanpGB7ETgPWc0TApTr7eyDolOaj6LRG+/W2Bn94eJg7+DpcowZ+AGb+642NjYfC3wEdXAdI1uK2Du2ksH2HrcHHfggGX4frNVcRMPh7BwcHN8ZiseuuIr4DvKXib29YX2bhmW+wEqYptsREXC2eWXS44oyfuYqYmpra19LSEnkaRgEG6Nj8gGRHESVCRkaG9Kg+IOyTiGtmZqatnZsOV/zMLnjcsF7KH5AIECVCX1+f6u3tlbg4oLmc2VyDy8HgPshg2yzmCo8aFsdAALzpw9dw23REwJkvHPwjSu92UcwVRcAnAd4LaQ6+CVe2AGivAe5WwhcdGp0aoVgmJuIqnBy2uSa18Buxs4AXAJMO401SjLOGfnziyhYg2GrtcNSxSfJ90pI/n7iyBUA7quKv/IYsxhmiZ/ZRy/x94soWAO1nwL0qnhVw2cD/ZfKBvjod9cEnrmwB0DBh9RUVfxHxhYrnUHLtEn2mlHyMOe6HT1wT7oISGSas4ntNzJmsVFczjnMBN1CbfwGD1BYPID8A/lFzbz5xZQsQnmWfExa6ecNVIsBKWuIlgA0qnjG2PLhsou0aZgF3qfil2fg89ssbrhwBNtB+GN/dLUnQ5kbCHYAnAFMAvGpsoY7OlS0krmOhxx7WLHwAeBLwVahN2uIUswgrPB5T8rRv7DxWqDwM+JaCjzue8b5wZe2C7gJ8quKVJqY599vJ1yZHffCJK0uA+wAfAtZYjIO+Gsi3TfOJK0sAfFP/jpKV+HBtKfkutOTPJ64sAVYD3qXgrmwpxVht6McnrmwBMAP4pjlYdRij3tCHT1xZAuDdermOA836gDKKqWNirob1ASZc2eeAl3QH36A+AGP+ohFWxNVSfYAuV9YKyKUTo/bgo2nUB5RQbImJuFqsD9DhyhbAuDgjMI36gFKX7S3XB5S6egSV2Bh8zYyDYjr4SGYi2yzmMIm5YnFGkFOLSQGNjY3X/BtaLBabWQF5XKcO6gOkZT950gAW6wPWuXoEZXEaOqoPyHLcPqkIwvqALFcCZHJmvqP6gEzH7VOKIKgPyHQlwIVUjRzWB1xw3H4+ubIFGE3VyGF9wKjj9ik3D4L6gFFXArCSTlEEzKe3LMIfwvYDNgcf+4P9csSVLUAXt7GD+oBuYfsuW4OvUR/Q7UoA/G2zaRvbOqEI0xRbYiKulusDTrgSYEg6sxKJIKwP6FLyjDYRV4v1ATpc2QKgNZtu6zTqA5o1ObM/h5eDyMvCtrlZObLgNhRv+jAHvkwqQjDzhYPfrvRvF0VcLdQHaHGNxWKrZv0d//hahcqr8Ccww1kRbwPuVMIXHRqd+ptimZiIq0F9gA2urEcQ2jkVf/tz0WG8ixTjnKEfn7iyBQi2WnuULLlV0qE9FrdzPnFlC4CGRQkvqyQ/MqRh6KtO2S948IkrWwC0XwHPAQ4r85z7w+TL1U8Y+8Q14S4oyjA9703AZ4AqFX8RvoTpN8i3/Bi/p+egHz5xZQsQGCasvqGuZhzj76DdpuIZx8FPuOAviWDG8e8qXl0yXxnHPnGdsf8FGAByGwC02iMZswAAAABJRU5ErkJggg==" + +func TestVectorizer(t *testing.T) { + t.Run("should vectorize image", func(t *testing.T) { + // given + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder().addSetting("imageFields", []interface{}{"image"}).build() + + props := map[string]interface{}{ + "image": image, + } + object := &models.Object{ + ID: "some-uuid", + Properties: props, + } + + // when + vector, _, err := vectorizer.Object(context.Background(), object, config) + + // then + require.Nil(t, err) + assert.NotNil(t, vector) + }) + + t.Run("should vectorize 2 image fields", func(t *testing.T) { + // given + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder().addSetting("imageFields", []interface{}{"image1", "image2"}).build() + + props := map[string]interface{}{ + "image1": image, + "image2": image, + } + object := &models.Object{ + ID: "some-uuid", + Properties: props, + } + + // when + vector, _, err := vectorizer.Object(context.Background(), object, config) + + // then + require.Nil(t, err) + assert.NotNil(t, vector) + }) +} + +func TestVectorizerWithDiff(t *testing.T) { + type testCase struct { + name string + input *models.Object + } + + props := map[string]interface{}{ + "image": image, + "text": "text", + "description": "non-vectorizable", + } + + tests := []testCase{ + { + name: "noop comp", + input: &models.Object{ + ID: "some-uuid", + Properties: props, + }, + }, + { + name: "one vectorizable prop changed (1)", + input: &models.Object{ + ID: "some-uuid", + Properties: props, + }, + }, + { + name: "one vectorizable prop changed (2)", + input: &models.Object{ + ID: "some-uuid", + Properties: props, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder(). + addSetting("imageFields", []interface{}{"image"}). + addSetting("textFields", []interface{}{"text"}). + build() + + vector, _, err := vectorizer.Object(context.Background(), test.input, config) + + require.Nil(t, err) + assert.Equal(t, []float32{5.5, 11, 16.5, 22, 27.5}, vector) + }) + } +} + +func TestVectorizerWithWeights(t *testing.T) { + client := &fakeClient{} + vectorizer := New(client) + config := newConfigBuilder(). + addSetting("imageFields", []interface{}{"image"}). + addSetting("textFields", []interface{}{"text"}). + addWeights([]interface{}{0.4}, []interface{}{0.6}). + build() + + props := map[string]interface{}{ + "image": image, + "text": "text", + "description": "non-vectorizable", + } + input := &models.Object{ + ID: "some-uuid", + Properties: props, + } + + vector, _, err := vectorizer.Object(context.Background(), input, config) + + require.Nil(t, err) + assert.Equal(t, []float32{3.2, 6.4, 9.6, 12.8, 16}, vector) + // vectors are defined in Vectorize within fakes_for_test.go + // result calculated with above weights as (textVectors[0][i]*0.4+imageVectors[0][i]*0.6) / 2 +} + +func TestVectorizer_normalizeWeights(t *testing.T) { + tests := []struct { + name string + weights []float32 + }{ + { + name: "normalize example 1", + weights: []float32{200, 100, 0.1}, + }, + { + name: "normalize example 2", + weights: []float32{300.22, 0.7, 17, 54}, + }, + { + name: "normalize example 3", + weights: []float32{300, 0.02, 17}, + }, + { + name: "normalize example 4", + weights: []float32{500, 0.02, 17.4, 180}, + }, + { + name: "normalize example 5", + weights: []float32{500, 0.02, 17.4, 2, 4, 5, .88}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + v := &Vectorizer{} + if got := v.normalizeWeights(tt.weights); !checkNormalization(got) { + t.Errorf("Vectorizer.normalizeWeights() = %v, want %v", got, 1.0) + } + }) + } +} + +func checkNormalization(weights []float32) bool { + var result float32 + for i := range weights { + result += weights[i] + } + return result == 1.0 +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/models/models.go b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/models/models.go new file mode 100644 index 0000000000000000000000000000000000000000..473ea8fe32ae05817570638758c67493ae7987a2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/models/models.go @@ -0,0 +1,23 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package models + +// Tokens used in NER module to represent +// the found entities in a given string property value +type Token struct { + Property string `json:"property,omitempty"` + Entity string `json:"entity,omitempty"` + Certainty float64 `json:"certainty,omitempty"` + Word string `json:"word,omitempty"` + StartPosition int `json:"startPosition,omitempty"` + EndPosition int `json:"endPosition,omitempty"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/provider.go b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/provider.go new file mode 100644 index 0000000000000000000000000000000000000000..80b3087b8e3e54683520c6418fef8c24bfc493c7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/provider.go @@ -0,0 +1,60 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package additional + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + + "github.com/weaviate/weaviate/entities/moduletools" + + "github.com/tailor-inc/graphql" + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/search" +) + +type AdditionalProperty interface { + AdditionalPropertyFn(ctx context.Context, + in []search.Result, params interface{}, limit *int, + argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig) ([]search.Result, error) + ExtractAdditionalFn(param []*ast.Argument, class *models.Class) interface{} + AdditionalPropertyDefaultValue() interface{} + AdditionalFieldFn(classname string) *graphql.Field +} + +type GraphQLAdditionalArgumentsProvider struct { + tokensProvider AdditionalProperty +} + +func New(tokensProvider AdditionalProperty) *GraphQLAdditionalArgumentsProvider { + return &GraphQLAdditionalArgumentsProvider{tokensProvider} +} + +func (p *GraphQLAdditionalArgumentsProvider) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + additionalProperties := map[string]modulecapabilities.AdditionalProperty{} + additionalProperties["tokens"] = p.getTokens() + return additionalProperties +} + +func (p *GraphQLAdditionalArgumentsProvider) getTokens() modulecapabilities.AdditionalProperty { + return modulecapabilities.AdditionalProperty{ + GraphQLNames: []string{"tokens"}, + GraphQLFieldFunction: p.tokensProvider.AdditionalFieldFn, + GraphQLExtractFunction: p.tokensProvider.ExtractAdditionalFn, + SearchFunctions: modulecapabilities.AdditionalSearch{ + ExploreGet: p.tokensProvider.AdditionalPropertyFn, + ExploreList: p.tokensProvider.AdditionalPropertyFn, + }, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/tokens/tokens.go b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/tokens/tokens.go new file mode 100644 index 0000000000000000000000000000000000000000..5491dc2c205d899a6cf213ad0fa4dfa2c8e6ed62 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/tokens/tokens.go @@ -0,0 +1,60 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package tokens + +import ( + "context" + "errors" + + "github.com/weaviate/weaviate/entities/models" + + "github.com/weaviate/weaviate/entities/moduletools" + + "github.com/tailor-inc/graphql" + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/modules/ner-transformers/ent" +) + +type nerClient interface { + GetTokens(ctx context.Context, property, text string) ([]ent.TokenResult, error) +} + +type TokenProvider struct { + ner nerClient +} + +func New(ner nerClient) *TokenProvider { + return &TokenProvider{ner} +} + +func (p *TokenProvider) AdditionalPropertyDefaultValue() interface{} { + return &Params{} +} + +func (p *TokenProvider) ExtractAdditionalFn(param []*ast.Argument, class *models.Class) interface{} { + return p.parseTokenArguments(param) +} + +func (p *TokenProvider) AdditionalFieldFn(classname string) *graphql.Field { + return p.additionalTokensField(classname) +} + +func (p *TokenProvider) AdditionalPropertyFn(ctx context.Context, + in []search.Result, params interface{}, limit *int, + argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig, +) ([]search.Result, error) { + if parameters, ok := params.(*Params); ok { + return p.findTokens(ctx, in, parameters) + } + return nil, errors.New("wrong parameters") +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/tokens/tokens_graphql_field.go b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/tokens/tokens_graphql_field.go new file mode 100644 index 0000000000000000000000000000000000000000..b95384ad567965824233038f1cb56bf0e96ebcc1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/tokens/tokens_graphql_field.go @@ -0,0 +1,58 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package tokens + +import ( + "fmt" + + "github.com/tailor-inc/graphql" + "github.com/weaviate/weaviate/adapters/handlers/graphql/descriptions" +) + +func (p *TokenProvider) additionalTokensField(classname string) *graphql.Field { + return &graphql.Field{ + Args: graphql.FieldConfigArgument{ + "properties": &graphql.ArgumentConfig{ + Description: "Properties which contains text", + Type: graphql.NewList(graphql.String), + DefaultValue: nil, + }, + "certainty": &graphql.ArgumentConfig{ + Description: descriptions.Certainty, + Type: graphql.Float, + DefaultValue: nil, + }, + "distance": &graphql.ArgumentConfig{ + Description: descriptions.Distance, + Type: graphql.Float, + DefaultValue: nil, + }, + "limit": &graphql.ArgumentConfig{ + Type: graphql.Int, + Description: descriptions.Limit, + DefaultValue: nil, + }, + }, + Type: graphql.NewList(graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%sAdditionalTokens", classname), + Fields: graphql.Fields{ + "property": &graphql.Field{Type: graphql.String}, + "entity": &graphql.Field{Type: graphql.String}, + "certainty": &graphql.Field{Type: graphql.Float}, + "distance": &graphql.Field{Type: graphql.Float}, + "word": &graphql.Field{Type: graphql.String}, + "startPosition": &graphql.Field{Type: graphql.Int}, + "endPosition": &graphql.Field{Type: graphql.Int}, + }, + })), + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/tokens/tokens_graphql_field_test.go b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/tokens/tokens_graphql_field_test.go new file mode 100644 index 0000000000000000000000000000000000000000..fe133cc2fb07e2edab8cf06c48329a0ea8d44a18 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/tokens/tokens_graphql_field_test.go @@ -0,0 +1,69 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package tokens + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/tailor-inc/graphql" +) + +func Test_additionalTokensField(t *testing.T) { + // given + tokenProvider := &TokenProvider{} + classname := "Class" + + // when + tokens := tokenProvider.additionalTokensField(classname) + + // then + // the built graphQL field needs to support this structure: + // Args: { + // "properties": ["summary"], + // "limit": 1, + // "distance": 0.7 + // } + // Type: { + // tokens: { + // "property": "summary", + // "entity": "I-PER", + // "distance": 0.8, + // "word": "original word", + // "startPosition": 1, + // "endPosition": 2, + // } + // } + + assert.NotNil(t, tokens) + assert.Equal(t, "ClassAdditionalTokens", tokens.Type.Name()) + assert.NotNil(t, tokens.Type) + tokensObjectList, tokensObjectListOK := tokens.Type.(*graphql.List) + assert.True(t, tokensObjectListOK) + tokensObject, tokensObjectOK := tokensObjectList.OfType.(*graphql.Object) + assert.True(t, tokensObjectOK) + assert.Equal(t, 7, len(tokensObject.Fields())) + assert.NotNil(t, tokensObject.Fields()["property"]) + assert.NotNil(t, tokensObject.Fields()["entity"]) + assert.NotNil(t, tokensObject.Fields()["certainty"]) + assert.NotNil(t, tokensObject.Fields()["distance"]) + assert.NotNil(t, tokensObject.Fields()["word"]) + assert.NotNil(t, tokensObject.Fields()["startPosition"]) + assert.NotNil(t, tokensObject.Fields()["endPosition"]) + + assert.NotNil(t, tokens.Args) + assert.Equal(t, 4, len(tokens.Args)) + assert.NotNil(t, tokens.Args["certainty"]) + assert.NotNil(t, tokens.Args["distance"]) + assert.NotNil(t, tokens.Args["limit"]) + assert.NotNil(t, tokens.Args["properties"]) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/tokens/tokens_params.go b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/tokens/tokens_params.go new file mode 100644 index 0000000000000000000000000000000000000000..7394850704e25d33d27987536573774c9e5c6bad --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/tokens/tokens_params.go @@ -0,0 +1,31 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package tokens + +type Params struct { + Limit *int // optional parameter + Certainty *float64 // optional parameter + Distance *float64 // optional parameter + Properties []string +} + +func (n Params) GetCertainty() *float64 { + return n.Certainty +} + +func (n Params) GetLimit() *int { + return n.Limit +} + +func (n Params) GetProperties() []string { + return n.Properties +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/tokens/tokens_params_extractor.go b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/tokens/tokens_params_extractor.go new file mode 100644 index 0000000000000000000000000000000000000000..374a5ab30c35779826ba1585e418d9c7cec19c7c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/tokens/tokens_params_extractor.go @@ -0,0 +1,52 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package tokens + +import ( + "strconv" + + "github.com/tailor-inc/graphql/language/ast" +) + +func (p *TokenProvider) parseTokenArguments(args []*ast.Argument) *Params { + out := &Params{} + + for _, arg := range args { + switch arg.Name.Value { + case "limit": + asInt, _ := strconv.Atoi(arg.Value.GetValue().(string)) + out.Limit = ptInt(asInt) + case "certainty": + asFloat, _ := strconv.ParseFloat(arg.Value.GetValue().(string), 64) + out.Certainty = &asFloat + case "distance": + asFloat, _ := strconv.ParseFloat(arg.Value.GetValue().(string), 64) + out.Distance = &asFloat + case "properties": + inp := arg.Value.GetValue().([]ast.Value) + out.Properties = make([]string, len(inp)) + + for i, value := range inp { + out.Properties[i] = value.(*ast.StringValue).Value + } + + default: + // ignore what we don't recognize + } + } + + return out +} + +func ptInt(in int) *int { + return &in +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/tokens/tokens_params_extractor_test.go b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/tokens/tokens_params_extractor_test.go new file mode 100644 index 0000000000000000000000000000000000000000..67221f34587f04a218cdb43a1a63027fa12bc0e5 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/tokens/tokens_params_extractor_test.go @@ -0,0 +1,123 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package tokens + +import ( + "reflect" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/tailor-inc/graphql/language/ast" +) + +func Test_parseTokenArguments(t *testing.T) { + type args struct { + args []*ast.Argument + } + tests := []struct { + name string + args args + want *Params + }{ + { + name: "Should create with no params", + args: args{}, + want: &Params{}, + }, + { + name: "Should create with all params (and distance)", + args: args{ + args: []*ast.Argument{ + createArg("distance", "0.4"), + createArg("limit", "1"), + createListArg("properties", []string{"prop1", "prop2"}), + }, + }, + want: &Params{ + Properties: []string{"prop1", "prop2"}, + Distance: ptFloat64(0.4), + Limit: ptInt(1), + }, + }, + { + name: "Should create with all params (and certainty)", + args: args{ + args: []*ast.Argument{ + createArg("certainty", "0.4"), + createArg("limit", "1"), + createListArg("properties", []string{"prop1", "prop2"}), + }, + }, + want: &Params{ + Properties: []string{"prop1", "prop2"}, + Certainty: ptFloat64(0.4), + Limit: ptInt(1), + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + p := &TokenProvider{} + if got := p.parseTokenArguments(tt.args.args); !reflect.DeepEqual(got, tt.want) { + t.Errorf("parseTokenArguments() = %v, want %v", got, tt.want) + } + actual := p.parseTokenArguments(tt.args.args) + assert.Equal(t, tt.want, actual) + }) + } +} + +func createArg(name string, value string) *ast.Argument { + n := ast.Name{ + Value: name, + } + val := ast.StringValue{ + Kind: "Kind", + Value: value, + } + arg := ast.Argument{ + Name: ast.NewName(&n), + Kind: "Kind", + Value: ast.NewStringValue(&val), + } + a := ast.NewArgument(&arg) + return a +} + +func createListArg(name string, valuesIn []string) *ast.Argument { + n := ast.Name{ + Value: name, + } + + valuesAst := make([]ast.Value, len(valuesIn)) + for i, value := range valuesIn { + valuesAst[i] = &ast.StringValue{ + Kind: "Kind", + Value: value, + } + } + vals := ast.ListValue{ + Kind: "Kind", + Values: valuesAst, + } + arg := ast.Argument{ + Name: ast.NewName(&n), + Kind: "Kind", + Value: &vals, + } + a := ast.NewArgument(&arg) + return a +} + +func ptFloat64(in float64) *float64 { + return &in +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/tokens/tokens_result.go b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/tokens/tokens_result.go new file mode 100644 index 0000000000000000000000000000000000000000..1d97eee60344b3bc4db4da86a422cf3950cc1838 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/additional/tokens/tokens_result.go @@ -0,0 +1,121 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package tokens + +import ( + "context" + "errors" + "fmt" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/modules/ner-transformers/ent" +) + +func (p *TokenProvider) findTokens(ctx context.Context, + in []search.Result, params *Params, +) ([]search.Result, error) { + if len(in) > 0 { + + if len(in) == 0 { + return nil, nil + } + + if params == nil { + return nil, fmt.Errorf("no params provided") + } + + properties := params.GetProperties() + + // check if user parameter values are valid + if len(properties) == 0 { + return in, errors.New("no properties provided") + } + + for i := range in { // for each result of the general GraphQL Query + ap := in[i].AdditionalProperties + if ap == nil { + ap = models.AdditionalProperties{} + } + + // check if the schema of the GraphQL data object contains the properties and they are text or string values + textProperties := map[string]string{} + schema := in[i].Object().Properties.(map[string]interface{}) + for property, value := range schema { + if p.containsProperty(property, properties) { + if valueString, ok := value.(string); ok && len(valueString) > 0 { + textProperties[property] = valueString + } + } + } + + certainty := params.GetCertainty() + limit := params.GetLimit() + tokensList := []ent.TokenResult{} + + // for each text property result, call the NER function and add to additional result + for property, value := range textProperties { + + if limit != nil && len(tokensList) > *limit { + break + } + + tokens, err := p.ner.GetTokens(ctx, property, value) + if err != nil { + return in, err + } + + tokens = cutOffByCertainty(tokens, certainty) + + tokensList = append(tokensList, tokens...) + } + + if limit != nil && len(tokensList) > *limit { + ap["tokens"] = tokensList[:*limit] + } else { + ap["tokens"] = tokensList + } + + in[i].AdditionalProperties = ap + } + } + return in, nil +} + +func cutOffByCertainty(tokens []ent.TokenResult, certainty *float64) []ent.TokenResult { + minCertainty := 0.0 + if certainty != nil { + minCertainty = *certainty + } + a := 0 + for _, x := range tokens { + if x.Certainty >= minCertainty { + tokens[a] = x + a++ + } + } + tokens = tokens[:a] + + return tokens +} + +func (p *TokenProvider) containsProperty(property string, properties []string) bool { + if len(properties) == 0 { + return true + } + for i := range properties { + if properties[i] == property { + return true + } + } + return false +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ner-transformers/clients/ner.go b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/clients/ner.go new file mode 100644 index 0000000000000000000000000000000000000000..8743a825e1b824ddcc19524bd7923818a7a07967 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/clients/ner.go @@ -0,0 +1,116 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/ner-transformers/ent" +) + +type ner struct { + origin string + httpClient *http.Client + logger logrus.FieldLogger +} + +type nerInput struct { + Text string `json:"text"` +} + +type tokenResponse struct { + // Property string `json:"property"` + Entity string `json:"entity"` + Certainty float64 `json:"certainty"` + Distance float64 `json:"distance"` + Word string `json:"word"` + StartPosition int `json:"startPosition"` + EndPosition int `json:"endPosition"` +} + +type nerResponse struct { + Error string + nerInput + Tokens []tokenResponse `json:"tokens"` +} + +func New(origin string, timeout time.Duration, logger logrus.FieldLogger) *ner { + return &ner{ + origin: origin, + httpClient: &http.Client{Timeout: timeout}, + logger: logger, + } +} + +func (n *ner) GetTokens(ctx context.Context, property, + text string, +) ([]ent.TokenResult, error) { + body, err := json.Marshal(nerInput{ + Text: text, + }) + if err != nil { + return nil, errors.Wrapf(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", n.url("/ner/"), + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + + res, err := n.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + var resBody nerResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if res.StatusCode > 399 { + return nil, errors.Errorf("fail with status %d: %s", res.StatusCode, resBody.Error) + } + + out := make([]ent.TokenResult, len(resBody.Tokens)) + + for i, elem := range resBody.Tokens { + out[i].Certainty = elem.Certainty + out[i].Distance = elem.Distance + out[i].Entity = elem.Entity + out[i].Word = elem.Word + out[i].StartPosition = elem.StartPosition + out[i].EndPosition = elem.EndPosition + out[i].Property = property + } + + // format resBody to nerResult + return out, nil +} + +func (n *ner) url(path string) string { + return fmt.Sprintf("%s%s", n.origin, path) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ner-transformers/clients/ner_meta.go b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/clients/ner_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..1c31cde28521ca10396845fbb265ef6b31d6301f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/clients/ner_meta.go @@ -0,0 +1,45 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + + "github.com/pkg/errors" +) + +func (n *ner) MetaInfo() (map[string]interface{}, error) { + req, err := http.NewRequestWithContext(context.Background(), "GET", n.url("/meta"), nil) + if err != nil { + return nil, errors.Wrap(err, "create GET meta request") + } + + res, err := n.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send GET meta request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read meta response body") + } + + var resBody map[string]interface{} + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, "unmarshal meta response body") + } + return resBody, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ner-transformers/clients/ner_meta_test.go b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/clients/ner_meta_test.go new file mode 100644 index 0000000000000000000000000000000000000000..00e4d186df251492b8a1a93cf8b403aa7cff12b5 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/clients/ner_meta_test.go @@ -0,0 +1,156 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestGetMeta(t *testing.T) { + t.Run("when the server is providing meta", func(t *testing.T) { + server := httptest.NewServer(&testMetaHandler{t: t}) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + meta, err := c.MetaInfo() + + assert.Nil(t, err) + assert.NotNil(t, meta) + metaModel := meta["model"] + assert.True(t, metaModel != nil) + model, modelOK := metaModel.(map[string]interface{}) + assert.True(t, modelOK) + assert.True(t, model["_name_or_path"] != nil) + assert.True(t, model["architectures"] != nil) + }) +} + +type testMetaHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testMetaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/meta", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.Write([]byte(f.metaInfo())) +} + +func (f *testMetaHandler) metaInfo() string { + return `{ + "model": { + "_name_or_path": "dbmdz/bert-large-cased-finetuned-conll03-english", + "_num_labels": 9, + "add_cross_attention": false, + "architectures": [ + "BertForTokenClassification" + ], + "attention_probs_dropout_prob": 0.1, + "bad_words_ids": null, + "bos_token_id": null, + "chunk_size_feed_forward": 0, + "decoder_start_token_id": null, + "directionality": "bidi", + "diversity_penalty": 0, + "do_sample": false, + "early_stopping": false, + "encoder_no_repeat_ngram_size": 0, + "eos_token_id": null, + "finetuning_task": null, + "forced_bos_token_id": null, + "forced_eos_token_id": null, + "gradient_checkpointing": false, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 1024, + "id2label": { + "0": "O", + "1": "B-MISC", + "2": "I-MISC", + "3": "B-PER", + "4": "I-PER", + "5": "B-ORG", + "6": "I-ORG", + "7": "B-LOC", + "8": "I-LOC" + }, + "initializer_range": 0.02, + "intermediate_size": 4096, + "is_decoder": false, + "is_encoder_decoder": false, + "label2id": { + "B-LOC": 7, + "B-MISC": 1, + "B-ORG": 5, + "B-PER": 3, + "I-LOC": 8, + "I-MISC": 2, + "I-ORG": 6, + "I-PER": 4, + "O": 0 + }, + "layer_norm_eps": 1e-12, + "length_penalty": 1, + "max_length": 20, + "max_position_embeddings": 512, + "min_length": 0, + "model_type": "bert", + "no_repeat_ngram_size": 0, + "num_attention_heads": 16, + "num_beam_groups": 1, + "num_beams": 1, + "num_hidden_layers": 24, + "num_return_sequences": 1, + "output_attentions": false, + "output_hidden_states": false, + "output_scores": false, + "pad_token_id": 0, + "pooler_fc_size": 768, + "pooler_num_attention_heads": 12, + "pooler_num_fc_layers": 3, + "pooler_size_per_head": 128, + "pooler_type": "first_token_transform", + "position_embedding_type": "absolute", + "prefix": null, + "problem_type": null, + "pruned_heads": {}, + "remove_invalid_values": false, + "repetition_penalty": 1, + "return_dict": true, + "return_dict_in_generate": false, + "sep_token_id": null, + "task_specific_params": null, + "temperature": 1, + "tie_encoder_decoder": false, + "tie_word_embeddings": true, + "tokenizer_class": null, + "top_k": 50, + "top_p": 1, + "torchscript": false, + "transformers_version": "4.6.1", + "type_vocab_size": 2, + "use_bfloat16": false, + "use_cache": true, + "vocab_size": 28996 + } + }` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ner-transformers/clients/ner_test.go b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/clients/ner_test.go new file mode 100644 index 0000000000000000000000000000000000000000..0433e4fa33c475795c16d882561c95d989aca459 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/clients/ner_test.go @@ -0,0 +1,132 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/modules/ner-transformers/ent" +) + +func TestGetAnswer(t *testing.T) { + t.Run("when the server has a successful answer (with distance)", func(t *testing.T) { + server := httptest.NewServer(&testNERHandler{ + t: t, + res: nerResponse{ + nerInput: nerInput{ + Text: "I work at Apple", + }, + Tokens: []tokenResponse{ + { + Entity: "I-ORG", + Distance: 0.3, + Word: "Apple", + StartPosition: 20, + EndPosition: 25, + }, + }, + }, + }) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + res, err := c.GetTokens(context.Background(), "prop", + "I work at Apple") + + assert.Nil(t, err) + assert.Equal(t, []ent.TokenResult{ + { + Entity: "I-ORG", + Distance: 0.3, + Word: "Apple", + StartPosition: 20, + EndPosition: 25, + Property: "prop", + }, + }, res) + }) + + t.Run("when the server has a successful answer (with certainty)", func(t *testing.T) { + server := httptest.NewServer(&testNERHandler{ + t: t, + res: nerResponse{ + nerInput: nerInput{ + Text: "I work at Apple", + }, + Tokens: []tokenResponse{ + { + Entity: "I-ORG", + Certainty: 0.7, + Word: "Apple", + StartPosition: 20, + EndPosition: 25, + }, + }, + }, + }) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + res, err := c.GetTokens(context.Background(), "prop", + "I work at Apple") + + assert.Nil(t, err) + assert.Equal(t, []ent.TokenResult{ + { + Entity: "I-ORG", + Certainty: 0.7, + Word: "Apple", + StartPosition: 20, + EndPosition: 25, + Property: "prop", + }, + }, res) + }) + + t.Run("when the server has a an error", func(t *testing.T) { + server := httptest.NewServer(&testNERHandler{ + t: t, + res: nerResponse{ + Error: "some error from the server", + }, + }) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + _, err := c.GetTokens(context.Background(), "prop", + "I work at Apple") + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "some error from the server") + }) +} + +type testNERHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + res nerResponse +} + +func (f *testNERHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/ner/", r.URL.String()) + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.res.Error != "" { + w.WriteHeader(500) + } + + jsonBytes, _ := json.Marshal(f.res) + w.Write(jsonBytes) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ner-transformers/clients/startup.go b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/clients/startup.go new file mode 100644 index 0000000000000000000000000000000000000000..a7b5de821a6c74677f67cd9007c2cc6c1dd69b38 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/clients/startup.go @@ -0,0 +1,68 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "net/http" + "time" + + "github.com/pkg/errors" +) + +func (n *ner) WaitForStartup(initCtx context.Context, + interval time.Duration, +) error { + t := time.NewTicker(interval) + defer t.Stop() + expired := initCtx.Done() + var lastErr error + for { + select { + case <-t.C: + lastErr = n.checkReady(initCtx) + if lastErr == nil { + return nil + } + n.logger. + WithField("action", "ner_remote_wait_for_startup"). + WithError(lastErr).Warnf("ner remote service not ready") + case <-expired: + return errors.Wrapf(lastErr, "init context expired before remote was ready") + } + } +} + +func (n *ner) checkReady(initCtx context.Context) error { + // spawn a new context (derived on the overall context) which is used to + // consider an individual request timed out + requestCtx, cancel := context.WithTimeout(initCtx, 500*time.Millisecond) + defer cancel() + + req, err := http.NewRequestWithContext(requestCtx, http.MethodGet, + n.url("/.well-known/ready"), nil) + if err != nil { + return errors.Wrap(err, "create check ready request") + } + + res, err := n.httpClient.Do(req) + if err != nil { + return errors.Wrap(err, "send check ready request") + } + + defer res.Body.Close() + if res.StatusCode > 299 { + return errors.Errorf("not ready: status %d", res.StatusCode) + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ner-transformers/clients/startup_test.go b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/clients/startup_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ff65558b1e87f7f143c6c43afdaf422dd3829c34 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/clients/startup_test.go @@ -0,0 +1,98 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestWaitForStartup(t *testing.T) { + t.Run("when the server is immediately ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{t: t}) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + err := c.WaitForStartup(context.Background(), 50*time.Millisecond) + + assert.Nil(t, err) + }) + + t.Run("when the server is down", func(t *testing.T) { + c := New("http://nothing-running-at-this-url", 0, nullLogger()) + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 150*time.Millisecond) + + require.NotNil(t, err, nullLogger()) + assert.Contains(t, err.Error(), "expired before remote was ready") + }) + + t.Run("when the server is alive, but not ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(1 * time.Minute), + }) + c := New(server.URL, 0, nullLogger()) + defer server.Close() + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 50*time.Millisecond) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "expired before remote was ready") + }) + + t.Run("when the server is initially not ready, but then becomes ready", + func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(100 * time.Millisecond), + }) + c := New(server.URL, 0, nullLogger()) + defer server.Close() + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 50*time.Millisecond) + + require.Nil(t, err) + }) +} + +type testReadyHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testReadyHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/.well-known/ready", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.WriteHeader(http.StatusNoContent) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ner-transformers/config.go b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/config.go new file mode 100644 index 0000000000000000000000000000000000000000..4a92ed2da68c046f7636a0c2197f087efb6007da --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/config.go @@ -0,0 +1,39 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modner + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *NERModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *NERModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *NERModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + return nil +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/ner-transformers/ent/ner_result.go b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/ent/ner_result.go new file mode 100644 index 0000000000000000000000000000000000000000..de6fd05baffd874adaf1990908ee1edf0e378342 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/ent/ner_result.go @@ -0,0 +1,26 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type TokenResult struct { + Property string + Word string + Entity string + Certainty float64 + Distance float64 + StartPosition int + EndPosition int +} + +type NerResult struct { + Tokens []TokenResult +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ner-transformers/module.go b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/module.go new file mode 100644 index 0000000000000000000000000000000000000000..1d8237e7beb663a8655f63bfcdc813d1cacb6c71 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ner-transformers/module.go @@ -0,0 +1,104 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modner + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + entcfg "github.com/weaviate/weaviate/entities/config" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + neradditional "github.com/weaviate/weaviate/modules/ner-transformers/additional" + neradditionaltoken "github.com/weaviate/weaviate/modules/ner-transformers/additional/tokens" + "github.com/weaviate/weaviate/modules/ner-transformers/clients" + "github.com/weaviate/weaviate/modules/ner-transformers/ent" +) + +const Name = "ner-transformers" + +func New() *NERModule { + return &NERModule{} +} + +type NERModule struct { + ner nerClient + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +type nerClient interface { + GetTokens(ctx context.Context, property, text string) ([]ent.TokenResult, error) + MetaInfo() (map[string]interface{}, error) +} + +func (m *NERModule) Name() string { + return Name +} + +func (m *NERModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextNER +} + +func (m *NERModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrap(err, "init additional") + } + return nil +} + +func (m *NERModule) initAdditional(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + uri := os.Getenv("NER_INFERENCE_API") + if uri == "" { + return errors.Errorf("required variable NER_INFERENCE_API is not set") + } + + waitForStartup := true + if envWaitForStartup := os.Getenv("NER_WAIT_FOR_STARTUP"); envWaitForStartup != "" { + waitForStartup = entcfg.Enabled(envWaitForStartup) + } + + client := clients.New(uri, timeout, logger) + if waitForStartup { + if err := client.WaitForStartup(ctx, 1*time.Second); err != nil { + return errors.Wrap(err, "init remote ner module") + } + } + + m.ner = client + + tokenProvider := neradditionaltoken.New(m.ner) + m.additionalPropertiesProvider = neradditional.New(tokenProvider) + + return nil +} + +func (m *NERModule) MetaInfo() (map[string]interface{}, error) { + return m.ner.MetaInfo() +} + +func (m *NERModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.AdditionalProperties(New()) + _ = modulecapabilities.MetaProvider(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/offload-s3/module.go b/platform/dbops/binaries/weaviate-src/modules/offload-s3/module.go new file mode 100644 index 0000000000000000000000000000000000000000..e4d26c29ce8ba867c3c4f6a4637ec600f6e7b9d5 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/offload-s3/module.go @@ -0,0 +1,411 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modsloads3 + +import ( + "context" + "fmt" + "os" + "path/filepath" + "strconv" + "strings" + "sync/atomic" + "time" + + "github.com/prometheus/client_golang/prometheus" + "github.com/sirupsen/logrus" + "github.com/urfave/cli/v2" + "github.com/weaviate/s5cmd/v2/command" + "github.com/weaviate/s5cmd/v2/log" + "github.com/weaviate/s5cmd/v2/log/stat" + "github.com/weaviate/s5cmd/v2/parallel" + entcfg "github.com/weaviate/weaviate/entities/config" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/monitoring" +) + +const ( + Name = "offload-s3" + s3Endpoint = "OFFLOAD_S3_ENDPOINT" + s3BucketAutoCreate = "OFFLOAD_S3_BUCKET_AUTO_CREATE" + s3Bucket = "OFFLOAD_S3_BUCKET" + concurrency = "OFFLOAD_S3_CONCURRENCY" + timeout = "OFFLOAD_TIMEOUT" +) + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(&Module{}) +) + +type Module struct { + Endpoint string + Bucket string + BucketExists atomic.Bool + Concurrency int + DataPath string + logger logrus.FieldLogger + timeout time.Duration + app *cli.App + + metrics *monitoring.TenantOffloadMetrics +} + +func New() *Module { + return &Module{ + Endpoint: "", + Bucket: "weaviate-offload", + Concurrency: 25, + DataPath: config.DefaultPersistenceDataPath, + timeout: 120 * time.Second, + // we use custom cli app to avoid some bugs in underlying dependencies + // specially with .After implementation. + metrics: monitoring.NewTenantOffloadMetrics(monitoring.Config{ + MetricsNamespace: "weaviate", + }, prometheus.DefaultRegisterer), + app: &cli.App{ + Name: "weaviate-s5cmd", + Usage: "weaviate fast S3 and local filesystem execution tool", + EnableBashCompletion: true, + Commands: command.Commands(), + Flags: []cli.Flag{ + &cli.IntFlag{ + Name: "numworkers", + Value: 256, + Usage: "number of workers execute operation on each object", + }, + &cli.IntFlag{ + Name: "retry-count", + Aliases: []string{"r"}, + Value: 10, + Usage: "number of times that a request will be retried for failures", + }, + &cli.StringFlag{ + Name: "endpoint-url", + Usage: "override default S3 host for custom services", + EnvVars: []string{"OFFLOAD_S3_ENDPOINT"}, + }, + &cli.BoolFlag{ + Name: "no-verify-ssl", + Usage: "disable SSL certificate verification", + }, + }, + Before: func(c *cli.Context) error { + retryCount := c.Int("retry-count") + workerCount := c.Int("numworkers") + isStat := c.Bool("stat") + endpointURL := c.String("endpoint-url") + + log.Init("error", false) // print level error only + parallel.Init(workerCount) + + if retryCount < 0 { + err := fmt.Errorf("retry count cannot be a negative value") + return err + } + if c.Bool("no-sign-request") && c.String("profile") != "" { + err := fmt.Errorf(`"no-sign-request" and "profile" flags cannot be used together`) + return err + } + if c.Bool("no-sign-request") && c.String("credentials-file") != "" { + err := fmt.Errorf(`"no-sign-request" and "credentials-file" flags cannot be used together`) + return err + } + + if isStat { + stat.InitStat() + } + + if endpointURL != "" { + if !strings.HasPrefix(endpointURL, "http") { + err := fmt.Errorf(`bad value for --endpoint-url %v: scheme is missing. Must be of the form http:/// or https:///`, endpointURL) + return err + } + } + + return nil + }, + Action: func(c *cli.Context) error { + if c.Bool("install-completion") { + return nil + } + args := c.Args() + if args.Present() { + cli.ShowCommandHelp(c, args.First()) + return cli.Exit("", 1) + } + + return cli.ShowAppHelp(c) + }, + After: func(c *cli.Context) error { + if c.Bool("stat") && len(stat.Statistics()) > 0 { + log.Stat(stat.Statistics()) + } + return nil + }, + }, + } +} + +func (m *Module) Name() string { + return Name +} + +func (m *Module) Type() modulecapabilities.ModuleType { + return modulecapabilities.Offload +} + +func (m *Module) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + + if path := os.Getenv("PERSISTENCE_DATA_PATH"); path != "" { + m.DataPath = path + } + + if bucket := os.Getenv(s3Bucket); bucket != "" { + m.Bucket = bucket + } + + if endpoint := os.Getenv(s3Endpoint); endpoint != "" { + m.Endpoint = endpoint + } + + if eTimeout := os.Getenv(timeout); eTimeout != "" { + timeoutN, err := time.ParseDuration(fmt.Sprintf("%ss", eTimeout)) + if err != nil { + return err + } + m.timeout = time.Duration(timeoutN.Seconds()) * time.Second + } + + if concc := os.Getenv(concurrency); concc != "" { + conccN, err := strconv.Atoi(concc) + if err != nil { + return err + } + m.Concurrency = conccN + } + + if entcfg.Enabled(os.Getenv(s3BucketAutoCreate)) { + if err := m.create(ctx); err != nil && !strings.Contains(err.Error(), "BucketAlreadyOwnedByYou") { + return fmt.Errorf("can't create offload bucket: %s at endpoint %s %w", m.Bucket, m.Endpoint, err) + } + } + + m.logger.WithFields(logrus.Fields{ + concurrency: m.Concurrency, + timeout: m.timeout, + s3Endpoint: m.Endpoint, + s3Bucket: m.Bucket, + "PERSISTENCE_DATA_PATH": m.DataPath, + }).Info("offload module loaded") + return nil +} + +func (m *Module) VerifyBucket(ctx context.Context) error { + if m.BucketExists.Load() { + return nil + } + + ctx, cancel := context.WithTimeout(ctx, m.timeout) + defer cancel() + cmd := []string{ + fmt.Sprintf("--endpoint-url=%s", m.Endpoint), + "ls", + fmt.Sprintf("s3://%s", m.Bucket), + } + if err := m.app.RunContext(ctx, cmd); err != nil { + return err + } + m.BucketExists.Store(true) + return nil +} + +func (m *Module) create(ctx context.Context) error { + ctx, cancel := context.WithTimeout(ctx, m.timeout) + defer cancel() + cmd := []string{ + fmt.Sprintf("--endpoint-url=%s", m.Endpoint), + "mb", + fmt.Sprintf("s3://%s", m.Bucket), + } + + return m.app.RunContext(ctx, cmd) +} + +// Upload uploads the content of a shard assigned to specific node to +// cloud provider (S3, Azure Blob storage, Google cloud storage) +// {cloud_provider}://{configured_bucket}/{className}/{shardName}/{nodeName}/{shard content} +func (m *Module) Upload(ctx context.Context, className, shardName, nodeName string) error { + start := time.Now() + + if err := validate(className, shardName, nodeName); err != nil { + return err + } + + ctx, cancel := context.WithTimeout(ctx, m.timeout) + defer cancel() + + localPath := fmt.Sprintf("%s/%s/%s", m.DataPath, strings.ToLower(className), shardName) + cmd := []string{ + fmt.Sprintf("--endpoint-url=%s", m.Endpoint), + "cp", + fmt.Sprintf("--concurrency=%s", fmt.Sprintf("%d", m.Concurrency)), + fmt.Sprintf("%s/*", localPath), + fmt.Sprintf("s3://%s/%s/%s/%s/", m.Bucket, strings.ToLower(className), shardName, nodeName), + } + + var err error + defer func() { + // Update few useful metrics + size, _ := dirSize(localPath) + m.metrics.FetchedBytes.Add(float64(size)) + status := "success" + if err != nil { + status = "failed" + } + m.metrics.OpsDuration.WithLabelValues("upload", status).Observe(time.Since(start).Seconds()) + }() + + err = m.app.RunContext(ctx, cmd) + return err +} + +// Download downloads the content of a shard to desired node from +// cloud provider (S3, Azure Blob storage, Google cloud storage) +// {dataPath}/{className}/{shardName}/{content} +func (m *Module) Download(ctx context.Context, className, shardName, nodeName string) error { + localPath := fmt.Sprintf("%s/%s/%s", m.DataPath, strings.ToLower(className), shardName) + return m.DownloadToPath(ctx, className, shardName, nodeName, localPath) +} + +func (m *Module) DownloadToPath(ctx context.Context, className, shardName, nodeName, localPath string) error { + start := time.Now() + + if err := validate(className, shardName, nodeName); err != nil { + return err + } + + ctx, cancel := context.WithTimeout(ctx, m.timeout) + defer cancel() + + cmd := []string{ + fmt.Sprintf("--endpoint-url=%s", m.Endpoint), + "cp", + fmt.Sprintf("--concurrency=%s", fmt.Sprintf("%d", m.Concurrency)), + fmt.Sprintf("s3://%s/%s/%s/%s/*", m.Bucket, strings.ToLower(className), shardName, nodeName), + fmt.Sprintf("%s/", localPath), + } + + var err error + + defer func() { + // Update few useful metrics + size, _ := dirSize(localPath) + m.metrics.FetchedBytes.Add(float64(size)) + status := "success" + if err != nil { + status = "failed" + } + m.metrics.OpsDuration.WithLabelValues("download", status).Observe(time.Since(start).Seconds()) + }() + + err = m.app.RunContext(ctx, cmd) + + return err +} + +// Delete deletes content of a shard assigned to specific node in +// cloud provider (S3, Azure Blob storage, Google cloud storage) +// Careful: if shardName and nodeName is passed empty it will delete all class frozen shards in cloud storage +// {cloud_provider}://{configured_bucket}/{className}/{shardName}/{nodeName}/{shard content} +func (m *Module) Delete(ctx context.Context, className, shardName, nodeName string) error { + start := time.Now() + + if className == "" { + return fmt.Errorf("can't pass empty class name") + } + + if shardName == "" && nodeName != "" { + return fmt.Errorf("can't pass empty shard name") + } + + if nodeName == "" && shardName != "" { + return fmt.Errorf("can't pass empty node name") + } + + ctx, cancel := context.WithTimeout(ctx, m.timeout) + defer cancel() + + cloudPath := fmt.Sprintf("s3://%s/%s/%s/%s/*", m.Bucket, strings.ToLower(className), shardName, nodeName) + + // update cloud path on deleting a class + if shardName == "" && nodeName == "" { + cloudPath = fmt.Sprintf("s3://%s/%s/*", m.Bucket, strings.ToLower(className)) + } + + cmd := []string{ + fmt.Sprintf("--endpoint-url=%s", m.Endpoint), + "rm", + cloudPath, + } + + var err error + defer func() { + // Update few useful metrics + status := "success" + if err != nil { + status = "failed" + } + m.metrics.OpsDuration.WithLabelValues("delete", status).Observe(time.Since(start).Seconds()) + }() + + err = m.app.RunContext(ctx, cmd) + if err != nil && !strings.Contains(err.Error(), "no object found") { + return err + } + return nil +} + +func dirSize(path string) (int64, error) { + var size int64 + err := filepath.Walk(path, func(_ string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if !info.IsDir() { + size += info.Size() + } + return err + }) + return size, err +} + +func validate(className, shardName, nodeName string) error { + if className == "" { + return fmt.Errorf("can't pass empty class name") + } + + if shardName == "" { + return fmt.Errorf("can't pass empty tenant name") + } + + if nodeName == "" { + return fmt.Errorf("can't pass empty node name") + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/additional/answer/answer.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/additional/answer/answer.go new file mode 100644 index 0000000000000000000000000000000000000000..aaa2814ab2b17991078ebe99aff4e7ddeed571df --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/additional/answer/answer.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package answer + +import ( + "context" + "errors" + + "github.com/weaviate/weaviate/entities/models" + + "github.com/tailor-inc/graphql" + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/modules/qna-openai/ent" +) + +type Params struct{} + +type qnaClient interface { + Answer(ctx context.Context, text, question string, cfg moduletools.ClassConfig) (*ent.AnswerResult, error) +} + +type paramsHelper interface { + GetQuestion(params interface{}) string + GetProperties(params interface{}) []string +} + +type AnswerProvider struct { + qna qnaClient + paramsHelper +} + +func New(qna qnaClient, paramsHelper paramsHelper) *AnswerProvider { + return &AnswerProvider{qna, paramsHelper} +} + +func (p *AnswerProvider) AdditionalPropertyDefaultValue() interface{} { + return &Params{} +} + +func (p *AnswerProvider) ExtractAdditionalFn(param []*ast.Argument, class *models.Class) interface{} { + return &Params{} +} + +func (p *AnswerProvider) AdditionalFieldFn(classname string) *graphql.Field { + return p.additionalAnswerField(classname) +} + +func (p *AnswerProvider) AdditionalPropertyFn(ctx context.Context, + in []search.Result, params interface{}, limit *int, + argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig, +) ([]search.Result, error) { + if parameters, ok := params.(*Params); ok { + return p.findAnswer(ctx, in, parameters, limit, argumentModuleParams, cfg) + } + return nil, errors.New("wrong parameters") +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/additional/answer/answer_graphql_field.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/additional/answer/answer_graphql_field.go new file mode 100644 index 0000000000000000000000000000000000000000..f2772419499f4a0748b0e1714bea7184a3e80fb2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/additional/answer/answer_graphql_field.go @@ -0,0 +1,33 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package answer + +import ( + "fmt" + + "github.com/tailor-inc/graphql" +) + +func (p *AnswerProvider) additionalAnswerField(classname string) *graphql.Field { + return &graphql.Field{ + Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%sAdditionalAnswer", classname), + Fields: graphql.Fields{ + "result": &graphql.Field{Type: graphql.String}, + "startPosition": &graphql.Field{Type: graphql.Int}, + "endPosition": &graphql.Field{Type: graphql.Int}, + "property": &graphql.Field{Type: graphql.String}, + "hasAnswer": &graphql.Field{Type: graphql.Boolean}, + }, + }), + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/additional/answer/answer_graphql_field_test.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/additional/answer/answer_graphql_field_test.go new file mode 100644 index 0000000000000000000000000000000000000000..c1ac51408c4958e636803e4adffb1b26da8646a5 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/additional/answer/answer_graphql_field_test.go @@ -0,0 +1,54 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package answer + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/tailor-inc/graphql" +) + +func TestAnswerField(t *testing.T) { + t.Run("should generate answer argument properly", func(t *testing.T) { + // given + answerProvider := &AnswerProvider{} + classname := "Class" + + // when + answer := answerProvider.additionalAnswerField(classname) + + // then + // the built graphQL field needs to support this structure: + // Type: { + // answer: { + // result: "answer", + // startPosition: 1 + // endPosition: 2 + // distance: 0.2 + // property: "propName" + // hasAnswer: true + // } + // } + assert.NotNil(t, answer) + assert.Equal(t, "ClassAdditionalAnswer", answer.Type.Name()) + assert.NotNil(t, answer.Type) + answerObject, answerObjectOK := answer.Type.(*graphql.Object) + assert.True(t, answerObjectOK) + assert.Equal(t, 5, len(answerObject.Fields())) + assert.NotNil(t, answerObject.Fields()["result"]) + assert.NotNil(t, answerObject.Fields()["startPosition"]) + assert.NotNil(t, answerObject.Fields()["endPosition"]) + assert.NotNil(t, answerObject.Fields()["property"]) + assert.NotNil(t, answerObject.Fields()["hasAnswer"]) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/additional/answer/answer_result.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/additional/answer/answer_result.go new file mode 100644 index 0000000000000000000000000000000000000000..80602d48def68917ea1dd22e8b7a33e0470b9f99 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/additional/answer/answer_result.go @@ -0,0 +1,107 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package answer + +import ( + "context" + "errors" + "strings" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/search" + qnamodels "github.com/weaviate/weaviate/modules/qna-openai/additional/models" +) + +func (p *AnswerProvider) findAnswer(ctx context.Context, in []search.Result, params *Params, limit *int, argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig) ([]search.Result, error) { + if len(in) == 0 { + return in, nil + } + question := p.paramsHelper.GetQuestion(argumentModuleParams["ask"]) + if question == "" { + return in, errors.New("empty question") + } + properties := p.paramsHelper.GetProperties(argumentModuleParams["ask"]) + + for i := range in { + textProperties := map[string]string{} + schema := in[i].Object().Properties.(map[string]interface{}) + for property, value := range schema { + if p.containsProperty(property, properties) { + if valueString, ok := value.(string); ok && len(valueString) > 0 { + textProperties[property] = valueString + } + } + } + + var texts []string + for _, value := range textProperties { + texts = append(texts, value) + } + text := strings.Join(texts, " ") + if len(text) == 0 { + return in, errors.New("empty content") + } + + answer, err := p.qna.Answer(ctx, text, question, cfg) + if err != nil { + return in, err + } + + ap := in[i].AdditionalProperties + if ap == nil { + ap = models.AdditionalProperties{} + } + propertyName, startPos, endPos := p.findProperty(answer.Answer, textProperties) + ap["answer"] = &qnamodels.Answer{ + Result: answer.Answer, + Property: propertyName, + StartPosition: startPos, + EndPosition: endPos, + HasAnswer: answer.Answer != nil, + } + + in[i].AdditionalProperties = ap + } + + return in, nil +} + +func (p *AnswerProvider) containsProperty(property string, properties []string) bool { + if len(properties) == 0 { + return true + } + for i := range properties { + if properties[i] == property { + return true + } + } + return false +} + +func (p *AnswerProvider) findProperty(answer *string, textProperties map[string]string) (*string, int, int) { + if answer == nil { + return nil, 0, 0 + } + lowercaseAnswer := strings.ToLower(*answer) + if len(lowercaseAnswer) > 0 { + for property, value := range textProperties { + lowercaseValue := strings.ToLower(strings.ReplaceAll(value, "\n", " ")) + if strings.Contains(lowercaseValue, lowercaseAnswer) { + startIndex := strings.Index(lowercaseValue, lowercaseAnswer) + return &property, startIndex, startIndex + len(lowercaseAnswer) + } + } + } + propertyNotFound := "" + return &propertyNotFound, 0, 0 +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/additional/answer/answer_test.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/additional/answer/answer_test.go new file mode 100644 index 0000000000000000000000000000000000000000..e9ac77809497558fbc20a10278d906fddddda3ff --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/additional/answer/answer_test.go @@ -0,0 +1,187 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package answer + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/search" + qnamodels "github.com/weaviate/weaviate/modules/qna-openai/additional/models" + "github.com/weaviate/weaviate/modules/qna-openai/ent" +) + +func TestAdditionalAnswerProvider(t *testing.T) { + t.Run("should fail with empty content", func(t *testing.T) { + // given + qnaClient := &fakeQnAClient{} + fakeHelper := &fakeParamsHelper{} + answerProvider := New(qnaClient, fakeHelper) + in := []search.Result{ + { + ID: "some-uuid", + }, + } + fakeParams := &Params{} + limit := 1 + argumentModuleParams := map[string]interface{}{} + + // when + out, err := answerProvider.AdditionalPropertyFn(context.Background(), in, fakeParams, &limit, argumentModuleParams, nil) + + // then + require.NotNil(t, err) + require.NotEmpty(t, out) + assert.Error(t, err, "empty content") + }) + + t.Run("should fail with empty question", func(t *testing.T) { + // given + qnaClient := &fakeQnAClient{} + fakeHelper := &fakeParamsHelper{} + answerProvider := New(qnaClient, fakeHelper) + in := []search.Result{ + { + ID: "some-uuid", + Schema: map[string]interface{}{ + "content": "content", + }, + }, + } + fakeParams := &Params{} + limit := 1 + argumentModuleParams := map[string]interface{}{} + + // when + out, err := answerProvider.AdditionalPropertyFn(context.Background(), in, fakeParams, &limit, argumentModuleParams, nil) + + // then + require.NotNil(t, err) + require.NotEmpty(t, out) + assert.Error(t, err, "empty content") + }) + + t.Run("should answer", func(t *testing.T) { + // given + qnaClient := &fakeQnAClient{} + fakeHelper := &fakeParamsHelper{} + answerProvider := New(qnaClient, fakeHelper) + in := []search.Result{ + { + ID: "some-uuid", + Schema: map[string]interface{}{ + "content": "content", + }, + }, + } + fakeParams := &Params{} + limit := 1 + argumentModuleParams := map[string]interface{}{ + "ask": map[string]interface{}{ + "question": "question", + }, + } + + // when + out, err := answerProvider.AdditionalPropertyFn(context.Background(), in, fakeParams, &limit, argumentModuleParams, nil) + + // then + require.Nil(t, err) + require.NotEmpty(t, out) + assert.Equal(t, 1, len(in)) + answer, answerOK := in[0].AdditionalProperties["answer"] + assert.True(t, answerOK) + assert.NotNil(t, answer) + answerAdditional, answerAdditionalOK := answer.(*qnamodels.Answer) + assert.True(t, answerAdditionalOK) + assert.Equal(t, "answer", *answerAdditional.Result) + }) + + t.Run("should answer with property", func(t *testing.T) { + // given + qnaClient := &fakeQnAClient{} + fakeHelper := &fakeParamsHelper{} + answerProvider := New(qnaClient, fakeHelper) + in := []search.Result{ + { + ID: "some-uuid", + Schema: map[string]interface{}{ + "content": "content with answer", + "content2": "this one is just a title", + }, + }, + } + fakeParams := &Params{} + limit := 1 + argumentModuleParams := map[string]interface{}{ + "ask": map[string]interface{}{ + "question": "question", + "properties": []string{"content", "content2"}, + }, + } + + // when + out, err := answerProvider.AdditionalPropertyFn(context.Background(), in, fakeParams, &limit, argumentModuleParams, nil) + + // then + require.Nil(t, err) + require.NotEmpty(t, out) + assert.Equal(t, 1, len(in)) + answer, answerOK := in[0].AdditionalProperties["answer"] + assert.True(t, answerOK) + assert.NotNil(t, answer) + answerAdditional, answerAdditionalOK := answer.(*qnamodels.Answer) + assert.True(t, answerAdditionalOK) + assert.Equal(t, "answer", *answerAdditional.Result) + assert.Equal(t, "content", *answerAdditional.Property) + assert.Equal(t, 13, answerAdditional.StartPosition) + assert.Equal(t, 19, answerAdditional.EndPosition) + assert.Equal(t, true, answerAdditional.HasAnswer) + }) +} + +type fakeQnAClient struct{} + +func (c *fakeQnAClient) Answer(ctx context.Context, text, question string, cfg moduletools.ClassConfig) (*ent.AnswerResult, error) { + return c.getAnswer(question, "answer"), nil +} + +func (c *fakeQnAClient) getAnswer(question, answer string) *ent.AnswerResult { + return &ent.AnswerResult{ + Text: question, + Question: question, + Answer: &answer, + } +} + +type fakeParamsHelper struct{} + +func (h *fakeParamsHelper) GetQuestion(params interface{}) string { + if fakeParamsMap, ok := params.(map[string]interface{}); ok { + if question, ok := fakeParamsMap["question"].(string); ok { + return question + } + } + return "" +} + +func (h *fakeParamsHelper) GetProperties(params interface{}) []string { + if fakeParamsMap, ok := params.(map[string]interface{}); ok { + if properties, ok := fakeParamsMap["properties"].([]string); ok { + return properties + } + } + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/additional/models/models.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/additional/models/models.go new file mode 100644 index 0000000000000000000000000000000000000000..b9a5da596647f4ae59392d003bc18c2b29badc09 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/additional/models/models.go @@ -0,0 +1,22 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package models + +// Answer used in qna module to represent +// the answer to a given question +type Answer struct { + Result *string `json:"result,omitempty"` + Property *string `json:"property,omitempty"` + StartPosition int `json:"startPosition,omitempty"` + EndPosition int `json:"endPosition,omitempty"` + HasAnswer bool `json:"hasAnswer,omitempty"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/additional/provider.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/additional/provider.go new file mode 100644 index 0000000000000000000000000000000000000000..ed115442e664633b9235c4a0ffd8be00d993cfc2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/additional/provider.go @@ -0,0 +1,59 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package additional + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + + "github.com/tailor-inc/graphql" + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/search" +) + +type AdditionalProperty interface { + AdditionalPropertyFn(ctx context.Context, + in []search.Result, params interface{}, limit *int, + argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig) ([]search.Result, error) + ExtractAdditionalFn(param []*ast.Argument, class *models.Class) interface{} + AdditionalPropertyDefaultValue() interface{} + AdditionalFieldFn(classname string) *graphql.Field +} + +type GraphQLAdditionalArgumentsProvider struct { + answerProvider AdditionalProperty +} + +func New(answerProvider AdditionalProperty) *GraphQLAdditionalArgumentsProvider { + return &GraphQLAdditionalArgumentsProvider{answerProvider} +} + +func (p *GraphQLAdditionalArgumentsProvider) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + additionalProperties := map[string]modulecapabilities.AdditionalProperty{} + additionalProperties["answer"] = p.getAnswer() + return additionalProperties +} + +func (p *GraphQLAdditionalArgumentsProvider) getAnswer() modulecapabilities.AdditionalProperty { + return modulecapabilities.AdditionalProperty{ + GraphQLNames: []string{"answer"}, + GraphQLFieldFunction: p.answerProvider.AdditionalFieldFn, + GraphQLExtractFunction: p.answerProvider.ExtractAdditionalFn, + SearchFunctions: modulecapabilities.AdditionalSearch{ + ExploreGet: p.answerProvider.AdditionalPropertyFn, + ExploreList: p.answerProvider.AdditionalPropertyFn, + }, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask.go new file mode 100644 index 0000000000000000000000000000000000000000..078e8096d7fd3010d6bed260a2c6467bb3722574 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask.go @@ -0,0 +1,40 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modqnaopenai + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/modules/qna-openai/ask" +) + +func (m *QnAModule) initAskSearcher() error { + m.searcher = ask.NewSearcher(m.nearTextDependencies) + return nil +} + +func (m *QnAModule) initAskProvider() error { + m.graphqlProvider = ask.New(m.askTextTransformer) + return nil +} + +func (m *QnAModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return m.graphqlProvider.Arguments() +} + +func (m *QnAModule) VectorSearches() map[string]map[string]modulecapabilities.VectorForParams[[]float32] { + return m.searcher.VectorSearches() +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.DependencySearcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/graphql_argument.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/graphql_argument.go new file mode 100644 index 0000000000000000000000000000000000000000..fceeba315e4809075c5c06e953f0dbb8a9f37590 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/graphql_argument.go @@ -0,0 +1,64 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ask + +import ( + "fmt" + + "github.com/tailor-inc/graphql" + "github.com/weaviate/weaviate/adapters/handlers/graphql/descriptions" +) + +func (g *GraphQLArgumentsProvider) getAskArgumentFn(classname string) *graphql.ArgumentConfig { + return g.askArgument("GetObjects", classname) +} + +func (g *GraphQLArgumentsProvider) exploreAskArgumentFn() *graphql.ArgumentConfig { + return g.askArgument("Explore", "") +} + +func (g *GraphQLArgumentsProvider) aggregateAskArgumentFn(classname string) *graphql.ArgumentConfig { + return g.askArgument("Aggregate", classname) +} + +func (g *GraphQLArgumentsProvider) askArgument(prefix, className string) *graphql.ArgumentConfig { + prefixName := fmt.Sprintf("QnATransformers%s%s", prefix, className) + return &graphql.ArgumentConfig{ + Type: graphql.NewInputObject( + graphql.InputObjectConfig{ + Name: fmt.Sprintf("%sAskInpObj", prefixName), + Fields: g.askFields(prefixName), + Description: descriptions.GetWhereInpObj, + }, + ), + } +} + +func (g *GraphQLArgumentsProvider) askFields(prefix string) graphql.InputObjectConfigFieldMap { + askFields := graphql.InputObjectConfigFieldMap{ + "question": &graphql.InputObjectFieldConfig{ + Description: "Question to be answered", + Type: graphql.NewNonNull(graphql.String), + }, + "properties": &graphql.InputObjectFieldConfig{ + Description: "Properties which contains text", + Type: graphql.NewList(graphql.String), + }, + } + if g.askTransformer != nil { + askFields["autocorrect"] = &graphql.InputObjectFieldConfig{ + Description: "Autocorrect input text values", + Type: graphql.Boolean, + } + } + return askFields +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/graphql_argument_test.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/graphql_argument_test.go new file mode 100644 index 0000000000000000000000000000000000000000..d433a72ac86f9a76e51749de6a8bdc79eb344f40 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/graphql_argument_test.go @@ -0,0 +1,52 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ask + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/tailor-inc/graphql" +) + +func TestAskGraphQLArgument(t *testing.T) { + t.Run("should generate ask argument properly", func(t *testing.T) { + // given + prefix := "Prefix" + classname := "Class" + // when + ask := New(nil).askArgument(prefix, classname) + + // then + // the built graphQL field needs to support this structure: + // ask { + // question: "question?", + // properties: ["prop1", "prop2"] + // } + assert.NotNil(t, ask) + assert.Equal(t, "QnATransformersPrefixClassAskInpObj", ask.Type.Name()) + askFields, ok := ask.Type.(*graphql.InputObject) + assert.True(t, ok) + assert.NotNil(t, askFields) + assert.Equal(t, 2, len(askFields.Fields())) + fields := askFields.Fields() + question := fields["question"] + questionNonNull, questionNonNullOK := question.Type.(*graphql.NonNull) + assert.True(t, questionNonNullOK) + assert.Equal(t, "String", questionNonNull.OfType.Name()) + assert.NotNil(t, question) + properties := fields["properties"] + propertiesList, propertiesListOK := properties.Type.(*graphql.List) + assert.True(t, propertiesListOK) + assert.Equal(t, "String", propertiesList.OfType.Name()) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/graphql_provider.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/graphql_provider.go new file mode 100644 index 0000000000000000000000000000000000000000..04dff2a0021acee150e109aaca01ed243d4980da --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/graphql_provider.go @@ -0,0 +1,40 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ask + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" +) + +type GraphQLArgumentsProvider struct { + askTransformer modulecapabilities.TextTransform +} + +func New(askTransformer modulecapabilities.TextTransform) *GraphQLArgumentsProvider { + return &GraphQLArgumentsProvider{askTransformer} +} + +func (g *GraphQLArgumentsProvider) Arguments() map[string]modulecapabilities.GraphQLArgument { + arguments := map[string]modulecapabilities.GraphQLArgument{} + arguments["ask"] = g.getAsk() + return arguments +} + +func (g *GraphQLArgumentsProvider) getAsk() modulecapabilities.GraphQLArgument { + return modulecapabilities.GraphQLArgument{ + GetArgumentsFunction: g.getAskArgumentFn, + AggregateArgumentsFunction: g.aggregateAskArgumentFn, + ExploreArgumentsFunction: g.exploreAskArgumentFn, + ExtractFunction: g.extractAskFn, + ValidateFunction: g.validateAskFn, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/grapqhl_extract.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/grapqhl_extract.go new file mode 100644 index 0000000000000000000000000000000000000000..5ea71522757feb69667392cb44b8681b8371bcb2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/grapqhl_extract.go @@ -0,0 +1,33 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ask + +import "github.com/weaviate/weaviate/entities/dto" + +func (g *GraphQLArgumentsProvider) extractAskFn(source map[string]interface{}) (interface{}, *dto.TargetCombination, error) { + var args AskParams + + question, ok := source["question"].(string) + if ok { + args.Question = question + } + + properties, ok := source["properties"].([]interface{}) + if ok { + args.Properties = make([]string, len(properties)) + for i, value := range properties { + args.Properties[i] = value.(string) + } + } + + return &args, nil, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/grapqhl_extract_test.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/grapqhl_extract_test.go new file mode 100644 index 0000000000000000000000000000000000000000..d544ba0b0ed824a3cd0083145112385ad0f24ad4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/grapqhl_extract_test.go @@ -0,0 +1,70 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ask + +import ( + "reflect" + "testing" +) + +func Test_extractAskFn(t *testing.T) { + type args struct { + source map[string]interface{} + } + tests := []struct { + name string + args args + want interface{} + }{ + { + name: "should parse properly with only question", + args: args{ + source: map[string]interface{}{ + "question": "some question", + }, + }, + want: &AskParams{ + Question: "some question", + }, + }, + { + name: "should parse properly without params", + args: args{ + source: map[string]interface{}{}, + }, + want: &AskParams{}, + }, + { + name: "should parse properly with question, and properties", + args: args{ + source: map[string]interface{}{ + "question": "some question", + "properties": []interface{}{"prop1", "prop2"}, + }, + }, + want: &AskParams{ + Question: "some question", + Properties: []string{"prop1", "prop2"}, + }, + }, + } + t.Run("should extract without text transformer", func(t *testing.T) { + provider := New(nil) + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got, _, _ := provider.extractAskFn(tt.args.source); !reflect.DeepEqual(got, tt.want) { + t.Errorf("extractAskFn() = %v, want %v", got, tt.want) + } + }) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/param.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/param.go new file mode 100644 index 0000000000000000000000000000000000000000..b49c0eb74806bcc1f1c329ad1de6b04d827ccd63 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/param.go @@ -0,0 +1,55 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ask + +import ( + "github.com/pkg/errors" +) + +type AskParams struct { + Question string + Certainty float64 + Distance float64 + WithDistance bool + Properties []string + Autocorrect bool +} + +func (n AskParams) GetCertainty() float64 { + return n.Certainty +} + +func (n AskParams) GetDistance() float64 { + return n.Distance +} + +func (n AskParams) SimilarityMetricProvided() bool { + return n.Certainty != 0 || n.WithDistance +} + +func (g *GraphQLArgumentsProvider) validateAskFn(param interface{}) error { + ask, ok := param.(*AskParams) + if !ok { + return errors.New("'ask' invalid parameter") + } + + if len(ask.Question) == 0 { + return errors.Errorf("'ask.question' needs to be defined") + } + + if ask.Certainty != 0 && ask.WithDistance { + return errors.Errorf( + "nearText cannot provide both distance and certainty") + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/param_helper.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/param_helper.go new file mode 100644 index 0000000000000000000000000000000000000000..7b02a66d743f51f182dcd56e5d36e2678ca164b8 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/param_helper.go @@ -0,0 +1,46 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ask + +type ParamsHelper struct{} + +func NewParamsHelper() *ParamsHelper { + return &ParamsHelper{} +} + +func (p *ParamsHelper) GetQuestion(params interface{}) string { + if parameters, ok := params.(*AskParams); ok { + return parameters.Question + } + return "" +} + +func (p *ParamsHelper) GetProperties(params interface{}) []string { + if parameters, ok := params.(*AskParams); ok { + return parameters.Properties + } + return nil +} + +func (p *ParamsHelper) GetCertainty(params interface{}) float64 { + if parameters, ok := params.(*AskParams); ok { + return parameters.Certainty + } + return 0 +} + +func (p *ParamsHelper) GetDistance(params interface{}) float64 { + if parameters, ok := params.(*AskParams); ok { + return parameters.Distance + } + return 0 +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/param_helper_test.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/param_helper_test.go new file mode 100644 index 0000000000000000000000000000000000000000..1bc752c7cb61d7006c77a7a6780c3c62a7f6d196 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/param_helper_test.go @@ -0,0 +1,151 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ask + +import ( + "reflect" + "testing" +) + +func TestParamsHelper_GetQuestion(t *testing.T) { + type args struct { + params interface{} + } + tests := []struct { + name string + args args + want string + }{ + { + name: "should get question with certainty", + args: args{ + params: &AskParams{ + Question: "question", + Certainty: 0.8, + }, + }, + want: "question", + }, + { + name: "should get question with distance", + args: args{ + params: &AskParams{ + Question: "question", + Distance: 0.8, + }, + }, + want: "question", + }, + { + name: "should get empty string when empty params", + args: args{ + params: &AskParams{}, + }, + want: "", + }, + { + name: "should get empty string when nil params", + args: args{ + params: nil, + }, + want: "", + }, + { + name: "should get empty string when passed a struct, not a pointer to struct", + args: args{ + params: AskParams{}, + }, + want: "", + }, + { + name: "should get empty string when passed a struct with question, not a pointer to struct", + args: args{ + params: AskParams{ + Question: "question?", + }, + }, + want: "", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + p := &ParamsHelper{} + if got := p.GetQuestion(tt.args.params); got != tt.want { + t.Errorf("ParamsHelper.GetQuestion() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestParamsHelper_GetProperties(t *testing.T) { + type args struct { + params interface{} + } + tests := []struct { + name string + p *ParamsHelper + args args + want []string + }{ + { + name: "should get properties with distance", + args: args{ + params: &AskParams{ + Question: "question", + Properties: []string{"prop1", "prop2"}, + Distance: 0.8, + }, + }, + want: []string{"prop1", "prop2"}, + }, + { + name: "should get properties with certainty", + args: args{ + params: &AskParams{ + Question: "question", + Properties: []string{"prop1", "prop2"}, + Certainty: 0.8, + }, + }, + want: []string{"prop1", "prop2"}, + }, + { + name: "should get nil properties with empty pointer to AskParams", + args: args{ + params: &AskParams{}, + }, + want: nil, + }, + { + name: "should get nil properties with empty AskParams", + args: args{ + params: AskParams{}, + }, + want: nil, + }, + { + name: "should get nil properties with nil params", + args: args{ + params: nil, + }, + want: nil, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + p := &ParamsHelper{} + if got := p.GetProperties(tt.args.params); !reflect.DeepEqual(got, tt.want) { + t.Errorf("ParamsHelper.GetProperties() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/param_test.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/param_test.go new file mode 100644 index 0000000000000000000000000000000000000000..9e763e7ef52e4fb563c3ba4ee2b3788265f8fe31 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/param_test.go @@ -0,0 +1,77 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ask + +import "testing" + +func Test_validateAskFn(t *testing.T) { + type args struct { + param interface{} + } + tests := []struct { + name string + args args + wantErr bool + }{ + { + name: "should validate", + args: args{ + param: &AskParams{ + Question: "question", + }, + }, + }, + { + name: "should not validate when empty question", + args: args{ + param: &AskParams{ + Question: "", + }, + }, + wantErr: true, + }, + { + name: "should not validate when empty params", + args: args{ + param: &AskParams{}, + }, + wantErr: true, + }, + { + name: "should not validate when distance and certainty are present", + args: args{ + param: &AskParams{ + Distance: 0.1, + Certainty: 0.1, + }, + }, + wantErr: true, + }, + { + name: "should not validate when param passed is struct, not a pointer to struct", + args: args{ + param: AskParams{ + Question: "question", + }, + }, + wantErr: true, + }, + } + provider := New(nil) + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if err := provider.validateAskFn(tt.args.param); (err != nil) != tt.wantErr { + t.Errorf("validateAskFn() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/searcher.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/searcher.go new file mode 100644 index 0000000000000000000000000000000000000000..dc25f3625b894b7465caae12a9139b819b17fbfc --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ask/searcher.go @@ -0,0 +1,72 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ask + +import ( + "context" + + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/usecases/modulecomponents/generictypes" +) + +type vectorFromAskParam struct { + nearTextDep modulecapabilities.Dependency[[]float32] +} + +func (s *vectorFromAskParam) vectorForAskParamFn(ctx context.Context, params interface{}, + className string, + findVectorFn modulecapabilities.FindVectorFn[[]float32], + cfg moduletools.ClassConfig, +) ([]float32, error) { + return s.vectorFromAskParam(ctx, params.(*AskParams), className, findVectorFn, cfg) +} + +func (s *vectorFromAskParam) vectorFromAskParam(ctx context.Context, + params *AskParams, className string, + findVectorFn modulecapabilities.FindVectorFn[[]float32], + cfg moduletools.ClassConfig, +) ([]float32, error) { + arg := s.nearTextDep.GraphQLArgument() + + rawNearTextParam := map[string]interface{}{} + rawNearTextParam["concepts"] = []interface{}{params.Question} + + nearTextParam, _, _ := arg.ExtractFunction(rawNearTextParam) + vectorSearchFn := s.nearTextDep.VectorSearch() + + return vectorSearchFn.VectorForParams(ctx, nearTextParam, className, findVectorFn, cfg) +} + +type Searcher struct { + // nearText modules dependencies + nearTextDeps []modulecapabilities.Dependency[[]float32] +} + +func NewSearcher(nearTextDeps []modulecapabilities.Dependency[[]float32]) *Searcher { + return &Searcher{nearTextDeps} +} + +func (s *Searcher) VectorSearches() map[string]map[string]modulecapabilities.VectorForParams[[]float32] { + vectorSearchers := map[string]map[string]modulecapabilities.VectorForParams[[]float32]{} + for _, nearTextDep := range s.nearTextDeps { + vectorSearchers[nearTextDep.ModuleName()] = s.vectorSearches(nearTextDep) + } + return vectorSearchers +} + +func (s *Searcher) vectorSearches(nearTextDep modulecapabilities.Dependency[[]float32]) map[string]modulecapabilities.VectorForParams[[]float32] { + vectorSearches := map[string]modulecapabilities.VectorForParams[[]float32]{} + vectorFromAsk := &vectorFromAskParam{nearTextDep} + vectorSearches["ask"] = generictypes.VectorForParams(vectorFromAsk.vectorForAskParamFn) + return vectorSearches +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/clients/qna.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/clients/qna.go new file mode 100644 index 0000000000000000000000000000000000000000..445b0d48844328a80a03241e5e660783fd038bfc --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/clients/qna.go @@ -0,0 +1,300 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "strconv" + "strings" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/qna-openai/config" + "github.com/weaviate/weaviate/modules/qna-openai/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents" + "github.com/weaviate/weaviate/usecases/monitoring" +) + +func buildUrl(baseURL, resourceName, deploymentID string, isAzure bool) (string, error) { + ///X update with base url + if isAzure { + host := "https://" + resourceName + ".openai.azure.com" + path := "openai/deployments/" + deploymentID + "/completions" + queryParam := "api-version=2022-12-01" + return fmt.Sprintf("%s/%s?%s", host, path, queryParam), nil + } + host := baseURL + path := "/v1/completions" + return url.JoinPath(host, path) +} + +type qna struct { + openAIApiKey string + openAIOrganization string + azureApiKey string + buildUrlFn func(baseURL, resourceName, deploymentID string, isAzure bool) (string, error) + httpClient *http.Client + logger logrus.FieldLogger +} + +func New(openAIApiKey, openAIOrganization, azureApiKey string, timeout time.Duration, logger logrus.FieldLogger) *qna { + return &qna{ + openAIApiKey: openAIApiKey, + openAIOrganization: openAIOrganization, + azureApiKey: azureApiKey, + httpClient: &http.Client{Timeout: timeout}, + buildUrlFn: buildUrl, + logger: logger, + } +} + +func (v *qna) Answer(ctx context.Context, text, question string, cfg moduletools.ClassConfig) (*ent.AnswerResult, error) { + metrics := monitoring.GetMetrics() + startTime := time.Now() + metrics.ModuleExternalRequests.WithLabelValues("qna", "openai").Inc() + + prompt := v.generatePrompt(text, question) + + settings := config.NewClassSettings(cfg) + + body, err := json.Marshal(answersInput{ + Prompt: prompt, + Model: settings.Model(), + MaxTokens: settings.MaxTokens(), + Temperature: settings.Temperature(), + Stop: []string{"\n"}, + FrequencyPenalty: settings.FrequencyPenalty(), + PresencePenalty: settings.PresencePenalty(), + TopP: settings.TopP(), + }) + if err != nil { + return nil, errors.Wrapf(err, "marshal body") + } + + oaiUrl, err := v.buildOpenAIUrl(ctx, settings.BaseURL(), settings.ResourceName(), settings.DeploymentID(), settings.IsAzure()) + if err != nil { + return nil, errors.Wrap(err, "join OpenAI API host and path") + } + + defer func() { + monitoring.GetMetrics().ModuleExternalRequestDuration.WithLabelValues("qna", oaiUrl).Observe(time.Since(startTime).Seconds()) + }() + + v.logger.WithField("URL", oaiUrl).Info("using OpenAI") + + req, err := http.NewRequestWithContext(ctx, "POST", oaiUrl, + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + apiKey, err := v.getApiKey(ctx, settings.IsAzure()) + if err != nil { + return nil, errors.Wrapf(err, "OpenAI API Key") + } + req.Header.Add(v.getApiKeyHeaderAndValue(apiKey, settings.IsAzure())) + if openAIOrganization := v.getOpenAIOrganization(ctx); openAIOrganization != "" { + req.Header.Add("OpenAI-Organization", openAIOrganization) + } + req.Header.Add("Content-Type", "application/json") + + res, err := v.httpClient.Do(req) + if res != nil { + vrst := monitoring.GetMetrics().ModuleExternalResponseStatus + vrst.WithLabelValues("qna", oaiUrl, fmt.Sprintf("%v", res.StatusCode)).Inc() + } + if err != nil { + vrst := metrics.ModuleExternalResponseStatus + vrst.WithLabelValues("qna", oaiUrl, fmt.Sprintf("%v", res.StatusCode)).Inc() + monitoring.GetMetrics().ModuleExternalError.WithLabelValues("qna", "openai", "OpenAI API", fmt.Sprintf("%v", res.StatusCode)).Inc() + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + requestID := res.Header.Get("x-request-id") + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + var resBody answersResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + monitoring.GetMetrics().ModuleExternalResponseSize.WithLabelValues("generate", oaiUrl).Observe(float64(len(bodyBytes))) + vrst := monitoring.GetMetrics().ModuleExternalResponseStatus + vrst.WithLabelValues("qna", oaiUrl, fmt.Sprintf("%v", res.StatusCode)).Inc() + + if res.StatusCode != 200 || resBody.Error != nil { + return nil, v.getError(res.StatusCode, requestID, resBody.Error, settings.IsAzure()) + } + + if len(resBody.Choices) > 0 && resBody.Choices[0].Text != "" { + return &ent.AnswerResult{ + Text: text, + Question: question, + Answer: &resBody.Choices[0].Text, + }, nil + } + return &ent.AnswerResult{ + Text: text, + Question: question, + Answer: nil, + }, nil +} + +func (v *qna) buildOpenAIUrl(ctx context.Context, baseURL, resourceName, deploymentID string, isAzure bool) (string, error) { + passedBaseURL := baseURL + + if headerBaseURL := modulecomponents.GetValueFromContext(ctx, "X-Openai-Baseurl"); headerBaseURL != "" { + passedBaseURL = headerBaseURL + } + + if headerDeploymentID := modulecomponents.GetValueFromContext(ctx, "X-Azure-Deployment-Id"); headerDeploymentID != "" { + deploymentID = headerDeploymentID + } + + if headerResourceName := modulecomponents.GetValueFromContext(ctx, "X-Azure-Resource-Name"); headerResourceName != "" { + resourceName = headerResourceName + } + + return v.buildUrlFn(passedBaseURL, resourceName, deploymentID, isAzure) +} + +func (v *qna) getError(statusCode int, requestID string, resBodyError *openAIApiError, isAzure bool) error { + endpoint := "OpenAI API" + if isAzure { + endpoint = "Azure OpenAI API" + } + errorMsg := fmt.Sprintf("connection to: %s failed with status: %d", endpoint, statusCode) + if requestID != "" { + errorMsg = fmt.Sprintf("%s request-id: %s", errorMsg, requestID) + } + if resBodyError != nil { + errorMsg = fmt.Sprintf("%s error: %v", errorMsg, resBodyError.Message) + } + monitoring.GetMetrics().ModuleExternalError.WithLabelValues("qna", "openai", endpoint, fmt.Sprintf("%v", statusCode)).Inc() + return errors.New(errorMsg) +} + +func (v *qna) getApiKeyHeaderAndValue(apiKey string, isAzure bool) (string, string) { + if isAzure { + return "api-key", apiKey + } + return "Authorization", fmt.Sprintf("Bearer %s", apiKey) +} + +func (v *qna) generatePrompt(text string, question string) string { + return fmt.Sprintf(`'Please answer the question according to the above context. + +=== +Context: %v +=== +Q: %v +A:`, strings.ReplaceAll(text, "\n", " "), question) +} + +func (v *qna) getApiKey(ctx context.Context, isAzure bool) (string, error) { + var apiKey, envVarValue, envVar string + + if isAzure { + apiKey = "X-Azure-Api-Key" + envVar = "AZURE_APIKEY" + envVarValue = v.azureApiKey + } else { + apiKey = "X-Openai-Api-Key" + envVar = "OPENAI_APIKEY" + envVarValue = v.openAIApiKey + } + + return v.getApiKeyFromContext(ctx, apiKey, envVarValue, envVar) +} + +func (v *qna) getApiKeyFromContext(ctx context.Context, apiKey, envVarValue, envVar string) (string, error) { + if apiKeyValue := modulecomponents.GetValueFromContext(ctx, apiKey); apiKeyValue != "" { + return apiKeyValue, nil + } + if envVarValue != "" { + return envVarValue, nil + } + return "", fmt.Errorf("no api key found neither in request header: %s nor in environment variable under %s", apiKey, envVar) +} + +func (v *qna) getOpenAIOrganization(ctx context.Context) string { + if value := modulecomponents.GetValueFromContext(ctx, "X-Openai-Organization"); value != "" { + return value + } + return v.openAIOrganization +} + +type answersInput struct { + Prompt string `json:"prompt"` + Model string `json:"model"` + MaxTokens float64 `json:"max_tokens"` + Temperature float64 `json:"temperature"` + Stop []string `json:"stop"` + FrequencyPenalty float64 `json:"frequency_penalty"` + PresencePenalty float64 `json:"presence_penalty"` + TopP float64 `json:"top_p"` +} + +type answersResponse struct { + Choices []choice + Error *openAIApiError `json:"error,omitempty"` +} + +type choice struct { + FinishReason string + Index float32 + Logprobs string + Text string +} + +type openAIApiError struct { + Message string `json:"message"` + Type string `json:"type"` + Param string `json:"param"` + Code openAICode `json:"code"` +} + +type openAICode string + +func (c *openAICode) String() string { + if c == nil { + return "" + } + return string(*c) +} + +func (c *openAICode) UnmarshalJSON(data []byte) (err error) { + if number, err := strconv.Atoi(string(data)); err == nil { + str := strconv.Itoa(number) + *c = openAICode(str) + return nil + } + var str string + err = json.Unmarshal(data, &str) + if err != nil { + return err + } + *c = openAICode(str) + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/clients/qna_meta.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/clients/qna_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..08e633161bc7475284a7150f122a25c3944939f2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/clients/qna_meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *qna) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "OpenAI Question & Answering Module", + "documentationHref": "https://platform.openai.com/docs/api-reference/completions", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/clients/qna_meta_test.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/clients/qna_meta_test.go new file mode 100644 index 0000000000000000000000000000000000000000..448194631422c812c86b04f363f1f8a90037dd44 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/clients/qna_meta_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestGetMeta(t *testing.T) { + t.Run("when the server is providing meta", func(t *testing.T) { + server := httptest.NewServer(&testMetaHandler{t: t}) + defer server.Close() + c := New("", "", "", 0, nullLogger()) + meta, err := c.MetaInfo() + + assert.Nil(t, err) + assert.NotNil(t, meta) + metaModel := meta["name"] + assert.True(t, metaModel != nil) + documentationHref := meta["documentationHref"] + assert.True(t, documentationHref != nil) + }) +} + +type testMetaHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testMetaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/meta", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.Write([]byte(f.metaInfo())) +} + +func (f *testMetaHandler) metaInfo() string { + return `{ + "hostname": "http://127.0.0.1:8080", + "modules": { + "qna-openai": { + "documentationHref": "https://platform.openai.com/docs/api-reference/completions", + "name": "OpenAI Question & Answering Module" + } + }, + "version": "1.16.0" +}` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/clients/qna_test.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/clients/qna_test.go new file mode 100644 index 0000000000000000000000000000000000000000..9c89e4b59ad648b0e757be6940778da436088332 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/clients/qna_test.go @@ -0,0 +1,233 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/modules/qna-openai/ent" +) + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} + +func TestGetAnswer(t *testing.T) { + t.Run("when the server has a successful answer ", func(t *testing.T) { + handler := &testAnswerHandler{ + t: t, + answer: answersResponse{ + Choices: []choice{{ + FinishReason: "test", + Index: 0, + Logprobs: "", + Text: "John", + }}, + Error: nil, + }, + } + server := httptest.NewServer(handler) + defer server.Close() + + c := New("openAIApiKey", "", "", 0, nullLogger()) + c.buildUrlFn = func(baseURL, resourceName, deploymentID string, isAzure bool) (string, error) { + return buildUrl(server.URL, resourceName, deploymentID, isAzure) + } + + expected := ent.AnswerResult{ + Text: "My name is John", + Question: "What is my name?", + Answer: ptString("John"), + } + + res, err := c.Answer(context.Background(), "My name is John", "What is my name?", nil) + + assert.Nil(t, err) + assert.Equal(t, expected, *res) + }) + + t.Run("when the server has a an error", func(t *testing.T) { + server := httptest.NewServer(&testAnswerHandler{ + t: t, + answer: answersResponse{ + Error: &openAIApiError{ + Message: "some error from the server", + }, + }, + }) + defer server.Close() + + c := New("openAIApiKey", "", "", 0, nullLogger()) + c.buildUrlFn = func(baseURL, resourceName, deploymentID string, isAzure bool) (string, error) { + return buildUrl(server.URL, resourceName, deploymentID, isAzure) + } + + _, err := c.Answer(context.Background(), "My name is John", "What is my name?", nil) + + require.NotNil(t, err) + assert.Error(t, err, "connection to OpenAI failed with status: 500 error: some error from the server") + }) + + t.Run("when the server has a an error and request id header", func(t *testing.T) { + server := httptest.NewServer(&testAnswerHandler{ + t: t, + answer: answersResponse{ + Error: &openAIApiError{ + Message: "some error from the server", + }, + }, + headerRequestID: "some-request-id", + }) + defer server.Close() + + c := New("openAIApiKey", "", "", 0, nullLogger()) + c.buildUrlFn = func(baseURL, resourceName, deploymentID string, isAzure bool) (string, error) { + return buildUrl(server.URL, resourceName, deploymentID, isAzure) + } + + _, err := c.Answer(context.Background(), "My name is John", "What is my name?", nil) + + require.NotNil(t, err) + assert.Error(t, err, "connection to OpenAI failed with status: 500 request-id: some-request-id error: some error from the server") + }) + + t.Run("when X-OpenAI-BaseURL header is passed", func(t *testing.T) { + c := New("openAIApiKey", "", "", 0, nullLogger()) + + ctxWithValue := context.WithValue(context.Background(), + "X-Openai-Baseurl", []string{"http://base-url-passed-in-header.com"}) + + buildURL, err := c.buildOpenAIUrl(ctxWithValue, "http://default-url.com", "", "", false) + require.NoError(t, err) + assert.Equal(t, "http://base-url-passed-in-header.com/v1/completions", buildURL) + + buildURL, err = c.buildOpenAIUrl(context.TODO(), "http://default-url.com", "", "", false) + require.NoError(t, err) + assert.Equal(t, "http://default-url.com/v1/completions", buildURL) + }) + + t.Run("when X-Azure-DeploymentId is passed", func(t *testing.T) { + c := New("", "", "", 0, nullLogger()) + + ctxWithValue := context.WithValue(context.Background(), + "X-Azure-Deployment-Id", []string{"headerDeploymentId"}) + ctxWithValue = context.WithValue(ctxWithValue, + "X-Azure-Resource-Name", []string{"headerResourceName"}) + + buildURL, err := c.buildOpenAIUrl(ctxWithValue, "", "", "", true) + require.NoError(t, err) + assert.Equal(t, "https://headerResourceName.openai.azure.com/openai/deployments/headerDeploymentId/completions?api-version=2022-12-01", buildURL) + }) +} + +type testAnswerHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + answer answersResponse + headerRequestID string +} + +func (f *testAnswerHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/v1/completions", r.URL.String()) + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.answer.Error != nil && f.answer.Error.Message != "" { + outBytes, err := json.Marshal(f.answer) + require.Nil(f.t, err) + + if f.headerRequestID != "" { + w.Header().Add("x-request-id", f.headerRequestID) + } + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b map[string]interface{} + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + outBytes, err := json.Marshal(f.answer) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +func TestOpenAIApiErrorDecode(t *testing.T) { + t.Run("getModelStringQuery", func(t *testing.T) { + type args struct { + response []byte + } + tests := []struct { + name string + args args + want string + }{ + { + name: "Error code: missing property", + args: args{ + response: []byte(`{"message": "failed", "type": "error", "param": "arg..."}`), + }, + want: "", + }, + { + name: "Error code: as int", + args: args{ + response: []byte(`{"message": "failed", "type": "error", "param": "arg...", "code": 500}`), + }, + want: "500", + }, + { + name: "Error code as string number", + args: args{ + response: []byte(`{"message": "failed", "type": "error", "param": "arg...", "code": "500"}`), + }, + want: "500", + }, + { + name: "Error code as string text", + args: args{ + response: []byte(`{"message": "failed", "type": "error", "param": "arg...", "code": "invalid_api_key"}`), + }, + want: "invalid_api_key", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var got *openAIApiError + err := json.Unmarshal(tt.args.response, &got) + require.NoError(t, err) + + if got.Code.String() != tt.want { + t.Errorf("OpenAIerror.code = %v, want %v", got.Code, tt.want) + } + }) + } + }) +} + +func ptString(in string) *string { + return &in +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/config.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/config.go new file mode 100644 index 0000000000000000000000000000000000000000..7bde9fe06be3e7e1ad46c3190b4e49ae7c65c2d1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/config.go @@ -0,0 +1,41 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modqnaopenai + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/qna-openai/config" +) + +func (m *QnAModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *QnAModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *QnAModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := config.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/config/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/config/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..14af910fdbe7720671285635e215ddfa70e68f9a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/config/class_settings.go @@ -0,0 +1,221 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "encoding/json" + "fmt" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" +) + +const ( + modelProperty = "model" + temperatureProperty = "temperature" + maxTokensProperty = "maxTokens" + frequencyPenaltyProperty = "frequencyPenalty" + presencePenaltyProperty = "presencePenalty" + topPProperty = "topP" + baseURLProperty = "baseURL" +) + +var ( + DefaultOpenAIModel = "text-ada-001" + DefaultOpenAITemperature float64 = 0.0 + DefaultOpenAIMaxTokens float64 = 16 + DefaultOpenAIFrequencyPenalty float64 = 0.0 + DefaultOpenAIPresencePenalty float64 = 0.0 + DefaultOpenAITopP float64 = 1.0 + DefaultOpenAIBaseURL = "https://api.openai.com" +) + +var maxTokensForModel = map[string]float64{ + "text-ada-001": 2048, + "text-babbage-001": 2048, + "text-curie-001": 2048, + "text-davinci-002": 4000, + "text-davinci-003": 4000, + "gpt-3.5-turbo-instruct": 4000, +} + +var availableOpenAIModels = []string{ + "text-ada-001", + "text-babbage-001", + "text-curie-001", + "text-davinci-002", + "text-davinci-003", + "gpt-3.5-turbo-instruct", +} + +type classSettings struct { + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg} +} + +func (ic *classSettings) Validate(class *models.Class) error { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + + model := ic.getStringProperty(modelProperty, DefaultOpenAIModel) + if model == nil || !ic.validateOpenAISetting(*model, availableOpenAIModels) { + return errors.Errorf("wrong OpenAI model name, available model names are: %v", availableOpenAIModels) + } + + temperature := ic.getFloatProperty(temperatureProperty, &DefaultOpenAITemperature) + if temperature == nil || (*temperature < 0 || *temperature > 1) { + return errors.Errorf("Wrong temperature configuration, values are between 0.0 and 1.0") + } + + maxTokens := ic.getFloatProperty(maxTokensProperty, &DefaultOpenAIMaxTokens) + if maxTokens == nil || (*maxTokens < 0 || *maxTokens > getMaxTokensForModel(*model)) { + return errors.Errorf("Wrong maxTokens configuration, values are should have a minimal value of 1 and max is dependant on the model used") + } + + frequencyPenalty := ic.getFloatProperty(frequencyPenaltyProperty, &DefaultOpenAIFrequencyPenalty) + if frequencyPenalty == nil || (*frequencyPenalty < 0 || *frequencyPenalty > 1) { + return errors.Errorf("Wrong frequencyPenalty configuration, values are between 0.0 and 1.0") + } + + presencePenalty := ic.getFloatProperty(presencePenaltyProperty, &DefaultOpenAIPresencePenalty) + if presencePenalty == nil || (*presencePenalty < 0 || *presencePenalty > 1) { + return errors.Errorf("Wrong presencePenalty configuration, values are between 0.0 and 1.0") + } + + topP := ic.getFloatProperty(topPProperty, &DefaultOpenAITopP) + if topP == nil || (*topP < 0 || *topP > 5) { + return errors.Errorf("Wrong topP configuration, values are should have a minimal value of 1 and max of 5") + } + + err := ic.validateAzureConfig(ic.ResourceName(), ic.DeploymentID()) + if err != nil { + return err + } + + return nil +} + +func (ic *classSettings) getStringProperty(name, defaultValue string) *string { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return &defaultValue + } + + model, ok := ic.cfg.ClassByModuleName("qna-openai")[name] + if ok { + asString, ok := model.(string) + if ok { + return &asString + } + var empty string + return &empty + } + return &defaultValue +} + +func (ic *classSettings) getFloatProperty(name string, defaultValue *float64) *float64 { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return defaultValue + } + + val, ok := ic.cfg.ClassByModuleName("qna-openai")[name] + if ok { + asFloat, ok := val.(float64) + if ok { + return &asFloat + } + asNumber, ok := val.(json.Number) + if ok { + asFloat, _ := asNumber.Float64() + return &asFloat + } + asInt, ok := val.(int) + if ok { + asFloat := float64(asInt) + return &asFloat + } + wrongVal := float64(-1.0) + return &wrongVal + } + + if defaultValue != nil { + return defaultValue + } + return nil +} + +func getMaxTokensForModel(model string) float64 { + return maxTokensForModel[model] +} + +func (ic *classSettings) validateOpenAISetting(value string, availableValues []string) bool { + for i := range availableValues { + if value == availableValues[i] { + return true + } + } + return false +} + +func (ic *classSettings) Model() string { + return *ic.getStringProperty(modelProperty, DefaultOpenAIModel) +} + +func (ic *classSettings) MaxTokens() float64 { + return *ic.getFloatProperty(maxTokensProperty, &DefaultOpenAIMaxTokens) +} + +func (ic *classSettings) BaseURL() string { + return *ic.getStringProperty(baseURLProperty, DefaultOpenAIBaseURL) +} + +func (ic *classSettings) Temperature() float64 { + return *ic.getFloatProperty(temperatureProperty, &DefaultOpenAITemperature) +} + +func (ic *classSettings) FrequencyPenalty() float64 { + return *ic.getFloatProperty(frequencyPenaltyProperty, &DefaultOpenAIFrequencyPenalty) +} + +func (ic *classSettings) PresencePenalty() float64 { + return *ic.getFloatProperty(presencePenaltyProperty, &DefaultOpenAIPresencePenalty) +} + +func (ic *classSettings) TopP() float64 { + return *ic.getFloatProperty(topPProperty, &DefaultOpenAITopP) +} + +func (ic *classSettings) ResourceName() string { + return *ic.getStringProperty("resourceName", "") +} + +func (ic *classSettings) DeploymentID() string { + return *ic.getStringProperty("deploymentId", "") +} + +func (ic *classSettings) IsAzure() bool { + return ic.ResourceName() != "" && ic.DeploymentID() != "" +} + +func (ic *classSettings) validateAzureConfig(resourceName string, deploymentId string) error { + if (resourceName == "" && deploymentId != "") || (resourceName != "" && deploymentId == "") { + return fmt.Errorf("both resourceName and deploymentId must be provided") + } + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/config/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/config/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..17d347cb883322f5b80c1c3f135ec657508ad1b8 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/config/class_settings_test.go @@ -0,0 +1,229 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "testing" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_classSettings_Validate(t *testing.T) { + tests := []struct { + name string + cfg moduletools.ClassConfig + wantModel string + wantMaxTokens float64 + wantTemperature float64 + wantTopP float64 + wantFrequencyPenalty float64 + wantPresencePenalty float64 + wantResourceName string + wantDeploymentID string + wantIsAzure bool + wantErr error + wantBaseURL string + }{ + { + name: "Happy flow", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{}, + }, + wantModel: "text-ada-001", + wantMaxTokens: 16, + wantTemperature: 0.0, + wantTopP: 1, + wantFrequencyPenalty: 0.0, + wantPresencePenalty: 0.0, + wantErr: nil, + wantBaseURL: "https://api.openai.com", + }, + { + name: "Everything non default configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "text-babbage-001", + "maxTokens": 100, + "temperature": 0.5, + "topP": 3, + "frequencyPenalty": 0.1, + "presencePenalty": 0.9, + "baseURL": "https://openai.proxy.dev", + }, + }, + wantModel: "text-babbage-001", + wantMaxTokens: 100, + wantTemperature: 0.5, + wantTopP: 3, + wantFrequencyPenalty: 0.1, + wantPresencePenalty: 0.9, + wantBaseURL: "https://openai.proxy.dev", + wantErr: nil, + }, + { + name: "Azure OpenAI config", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "resourceName": "weaviate", + "deploymentId": "text-ada-001", + }, + }, + wantModel: "text-ada-001", + wantResourceName: "weaviate", + wantDeploymentID: "text-ada-001", + wantIsAzure: true, + wantMaxTokens: 16, + wantTemperature: 0.0, + wantTopP: 1, + wantFrequencyPenalty: 0.0, + wantPresencePenalty: 0.0, + wantErr: nil, + wantBaseURL: "https://api.openai.com", + }, + { + name: "Wrong model data type configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": true, + }, + }, + wantErr: errors.Errorf("wrong OpenAI model name, available model names are: %v", availableOpenAIModels), + }, + { + name: "Wrong model data type configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "this-is-a-non-existing-model", + }, + }, + wantErr: errors.Errorf("wrong OpenAI model name, available model names are: %v", availableOpenAIModels), + }, + { + name: "Wrong maxTokens configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "maxTokens": true, + }, + }, + wantErr: errors.Errorf("Wrong maxTokens configuration, values are should have a minimal value of 1 and max is dependant on the model used"), + }, + { + name: "Wrong temperature configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "temperature": true, + }, + }, + wantErr: errors.Errorf("Wrong temperature configuration, values are between 0.0 and 1.0"), + }, + { + name: "Wrong frequencyPenalty configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "frequencyPenalty": true, + }, + }, + wantErr: errors.Errorf("Wrong frequencyPenalty configuration, values are between 0.0 and 1.0"), + }, + { + name: "Wrong presencePenalty configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "presencePenalty": true, + }, + }, + wantErr: errors.Errorf("Wrong presencePenalty configuration, values are between 0.0 and 1.0"), + }, + { + name: "Wrong topP configured", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "topP": true, + }, + }, + wantErr: errors.Errorf("Wrong topP configuration, values are should have a minimal value of 1 and max of 5"), + }, + { + name: "Wrong Azure OpenAI config - empty deploymentId", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "resourceName": "resource-name", + }, + }, + wantErr: errors.Errorf("both resourceName and deploymentId must be provided"), + }, + { + name: "Wrong Azure OpenAI config - empty resourceName", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "deploymentId": "ada", + }, + }, + wantErr: errors.Errorf("both resourceName and deploymentId must be provided"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.cfg) + if tt.wantErr != nil { + assert.EqualError(t, tt.wantErr, ic.Validate(nil).Error()) + } else { + assert.Equal(t, tt.wantModel, ic.Model()) + assert.Equal(t, tt.wantMaxTokens, ic.MaxTokens()) + assert.Equal(t, tt.wantTemperature, ic.Temperature()) + assert.Equal(t, tt.wantTopP, ic.TopP()) + assert.Equal(t, tt.wantFrequencyPenalty, ic.FrequencyPenalty()) + assert.Equal(t, tt.wantPresencePenalty, ic.PresencePenalty()) + assert.Equal(t, tt.wantResourceName, ic.ResourceName()) + assert.Equal(t, tt.wantDeploymentID, ic.DeploymentID()) + assert.Equal(t, tt.wantIsAzure, ic.IsAzure()) + assert.Equal(t, tt.wantBaseURL, ic.BaseURL()) + } + }) + } +} + +type fakeClassConfig struct { + classConfig map[string]interface{} +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/dependency/dependency.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/dependency/dependency.go new file mode 100644 index 0000000000000000000000000000000000000000..23e9092f6412086a9cd5dc8c7e2ff2516a623a5d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/dependency/dependency.go @@ -0,0 +1,42 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package dependency + +import "github.com/weaviate/weaviate/entities/modulecapabilities" + +type NearTextDependecy struct { + moduleName string + argument modulecapabilities.GraphQLArgument + searcher modulecapabilities.VectorForParams[[]float32] +} + +func New(moduleName string, argument modulecapabilities.GraphQLArgument, + searcher modulecapabilities.VectorForParams[[]float32], +) *NearTextDependecy { + return &NearTextDependecy{moduleName, argument, searcher} +} + +func (d *NearTextDependecy) Argument() string { + return "nearText" +} + +func (d *NearTextDependecy) ModuleName() string { + return d.moduleName +} + +func (d *NearTextDependecy) GraphQLArgument() modulecapabilities.GraphQLArgument { + return d.argument +} + +func (d *NearTextDependecy) VectorSearch() modulecapabilities.VectorForParams[[]float32] { + return d.searcher +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/ent/vectorization_result.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ent/vectorization_result.go new file mode 100644 index 0000000000000000000000000000000000000000..6d62a2a74e75fbf870de87610cc082ac7840e3ca --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/ent/vectorization_result.go @@ -0,0 +1,18 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type AnswerResult struct { + Text string + Question string + Answer *string +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-openai/module.go b/platform/dbops/binaries/weaviate-src/modules/qna-openai/module.go new file mode 100644 index 0000000000000000000000000000000000000000..d689dd2c29760b4895e0614d8af794ec109651bd --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-openai/module.go @@ -0,0 +1,162 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modqnaopenai + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + qnaadditional "github.com/weaviate/weaviate/modules/qna-openai/additional" + qnaadditionalanswer "github.com/weaviate/weaviate/modules/qna-openai/additional/answer" + qnaask "github.com/weaviate/weaviate/modules/qna-openai/ask" + "github.com/weaviate/weaviate/modules/qna-openai/clients" + qnaadependency "github.com/weaviate/weaviate/modules/qna-openai/dependency" + "github.com/weaviate/weaviate/modules/qna-openai/ent" +) + +const Name = "qna-openai" + +func New() *QnAModule { + return &QnAModule{} +} + +type QnAModule struct { + qna qnaClient + graphqlProvider modulecapabilities.GraphQLArguments + searcher modulecapabilities.DependencySearcher[[]float32] + additionalPropertiesProvider modulecapabilities.AdditionalProperties + nearTextDependencies []modulecapabilities.Dependency[[]float32] + askTextTransformer modulecapabilities.TextTransform +} + +type qnaClient interface { + Answer(ctx context.Context, text, question string, cfg moduletools.ClassConfig) (*ent.AnswerResult, error) + MetaInfo() (map[string]interface{}, error) +} + +func (m *QnAModule) Name() string { + return Name +} + +func (m *QnAModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextQnA +} + +func (m *QnAModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrap(err, "init q/a") + } + + return nil +} + +func (m *QnAModule) InitExtension(modules []modulecapabilities.Module) error { + var textTransformer modulecapabilities.TextTransform + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + textTransformer = arg.TextTransformers()["ask"] + } + } + } + + m.askTextTransformer = textTransformer + + if err := m.initAskProvider(); err != nil { + return errors.Wrap(err, "init ask provider") + } + + return nil +} + +func (m *QnAModule) InitDependency(modules []modulecapabilities.Module) error { + nearTextDependencies := []modulecapabilities.Dependency[[]float32]{} + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + var argument modulecapabilities.GraphQLArgument + var searcher modulecapabilities.VectorForParams[[]float32] + if arg, ok := module.(modulecapabilities.GraphQLArguments); ok { + if arg != nil && arg.Arguments() != nil { + if nearTextArg, ok := arg.Arguments()["nearText"]; ok { + argument = nearTextArg + } + } + } + if arg, ok := module.(modulecapabilities.Searcher[[]float32]); ok { + if arg != nil && arg.VectorSearches() != nil { + if nearTextSearcher, ok := arg.VectorSearches()["nearText"]; ok { + searcher = nearTextSearcher + } + } + } + + if argument.ExtractFunction != nil && searcher != nil { + nearTextDependency := qnaadependency.New(module.Name(), argument, searcher) + nearTextDependencies = append(nearTextDependencies, nearTextDependency) + } + } + if len(nearTextDependencies) == 0 { + return errors.New("nearText dependecy not present") + } + + m.nearTextDependencies = nearTextDependencies + + if err := m.initAskSearcher(); err != nil { + return errors.Wrap(err, "init ask searcher") + } + + return nil +} + +func (m *QnAModule) initAdditional(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + openAIApiKey := os.Getenv("OPENAI_APIKEY") + openAIOrganization := os.Getenv("OPENAI_ORGANIZATION") + azureApiKey := os.Getenv("AZURE_APIKEY") + + client := clients.New(openAIApiKey, openAIOrganization, azureApiKey, timeout, logger) + + m.qna = client + + answerProvider := qnaadditionalanswer.New(m.qna, qnaask.NewParamsHelper()) + m.additionalPropertiesProvider = qnaadditional.New(answerProvider) + + return nil +} + +func (m *QnAModule) MetaInfo() (map[string]interface{}, error) { + return m.qna.MetaInfo() +} + +func (m *QnAModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.AdditionalProperties(New()) + _ = modulecapabilities.MetaProvider(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/additional/answer/answer.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/additional/answer/answer.go new file mode 100644 index 0000000000000000000000000000000000000000..9e4dab6c5ef78ea693266ec9e1e478377003700f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/additional/answer/answer.go @@ -0,0 +1,79 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package answer + +import ( + "context" + "errors" + + "github.com/weaviate/weaviate/entities/schema" + + "github.com/weaviate/weaviate/entities/models" + + "github.com/tailor-inc/graphql" + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/modules/qna-transformers/ent" +) + +type Params struct { + properties []string +} + +func (n Params) GetPropertiesToExtract() []string { + return n.properties +} + +type qnaClient interface { + Answer(ctx context.Context, + text, question string) (*ent.AnswerResult, error) +} + +type paramsHelper interface { + GetQuestion(params interface{}) string + GetProperties(params interface{}) []string + GetCertainty(params interface{}) float64 + GetDistance(params interface{}) float64 + GetRerank(params interface{}) bool +} + +type AnswerProvider struct { + qna qnaClient + paramsHelper +} + +func New(qna qnaClient, paramsHelper paramsHelper) *AnswerProvider { + return &AnswerProvider{qna, paramsHelper} +} + +func (p *AnswerProvider) AdditionalPropertyDefaultValue() interface{} { + return &Params{} +} + +func (p *AnswerProvider) ExtractAdditionalFn(param []*ast.Argument, class *models.Class) interface{} { + return &Params{properties: schema.GetPropertyNamesFromClass(class, false)} +} + +func (p *AnswerProvider) AdditionalFieldFn(classname string) *graphql.Field { + return p.additionalAnswerField(classname) +} + +func (p *AnswerProvider) AdditionalPropertyFn(ctx context.Context, + in []search.Result, params interface{}, limit *int, + argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig, +) ([]search.Result, error) { + if parameters, ok := params.(*Params); ok { + return p.findAnswer(ctx, in, parameters, limit, argumentModuleParams) + } + return nil, errors.New("wrong parameters") +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/additional/answer/answer_graphql_field.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/additional/answer/answer_graphql_field.go new file mode 100644 index 0000000000000000000000000000000000000000..d3da016ffd2f8bfee688093a3f584e4ae08d2dff --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/additional/answer/answer_graphql_field.go @@ -0,0 +1,35 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package answer + +import ( + "fmt" + + "github.com/tailor-inc/graphql" +) + +func (p *AnswerProvider) additionalAnswerField(classname string) *graphql.Field { + return &graphql.Field{ + Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%sAdditionalAnswer", classname), + Fields: graphql.Fields{ + "result": &graphql.Field{Type: graphql.String}, + "startPosition": &graphql.Field{Type: graphql.Int}, + "endPosition": &graphql.Field{Type: graphql.Int}, + "property": &graphql.Field{Type: graphql.String}, + "certainty": &graphql.Field{Type: graphql.Float}, + "distance": &graphql.Field{Type: graphql.Float}, + "hasAnswer": &graphql.Field{Type: graphql.Boolean}, + }, + }), + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/additional/answer/answer_graphql_field_test.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/additional/answer/answer_graphql_field_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ae4ef586b6ecb8ef740057341a861037726ca90b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/additional/answer/answer_graphql_field_test.go @@ -0,0 +1,56 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package answer + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/tailor-inc/graphql" +) + +func TestAnswerField(t *testing.T) { + t.Run("should generate answer argument properly", func(t *testing.T) { + // given + answerProvider := &AnswerProvider{} + classname := "Class" + + // when + answer := answerProvider.additionalAnswerField(classname) + + // then + // the built graphQL field needs to support this structure: + // Type: { + // answer: { + // result: "answer", + // startPosition: 1 + // endPosition: 2 + // distance: 0.2 + // property: "propName" + // hasAnswer: true + // } + // } + assert.NotNil(t, answer) + assert.Equal(t, "ClassAdditionalAnswer", answer.Type.Name()) + assert.NotNil(t, answer.Type) + answerObject, answerObjectOK := answer.Type.(*graphql.Object) + assert.True(t, answerObjectOK) + assert.Equal(t, 7, len(answerObject.Fields())) + assert.NotNil(t, answerObject.Fields()["result"]) + assert.NotNil(t, answerObject.Fields()["startPosition"]) + assert.NotNil(t, answerObject.Fields()["endPosition"]) + assert.NotNil(t, answerObject.Fields()["property"]) + assert.NotNil(t, answerObject.Fields()["certainty"]) + assert.NotNil(t, answerObject.Fields()["distance"]) + assert.NotNil(t, answerObject.Fields()["hasAnswer"]) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/additional/answer/answer_result.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/additional/answer/answer_result.go new file mode 100644 index 0000000000000000000000000000000000000000..5877d00107571742641f2a6dd2cccabec45059f7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/additional/answer/answer_result.go @@ -0,0 +1,159 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package answer + +import ( + "context" + "errors" + "sort" + "strings" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/search" + qnamodels "github.com/weaviate/weaviate/modules/qna-transformers/additional/models" + "github.com/weaviate/weaviate/modules/qna-transformers/ent" +) + +func (p *AnswerProvider) findAnswer(ctx context.Context, + in []search.Result, params *Params, limit *int, + argumentModuleParams map[string]interface{}, +) ([]search.Result, error) { + if len(in) > 0 { + question := p.paramsHelper.GetQuestion(argumentModuleParams["ask"]) + if question == "" { + return in, errors.New("empty question") + } + properties := p.paramsHelper.GetProperties(argumentModuleParams["ask"]) + + for i := range in { + textProperties := map[string]string{} + schema := in[i].Object().Properties.(map[string]interface{}) + for property, value := range schema { + if p.containsProperty(property, properties) { + if valueString, ok := value.(string); ok && len(valueString) > 0 { + textProperties[property] = valueString + } + } + } + + texts := []string{} + for _, value := range textProperties { + texts = append(texts, value) + } + text := strings.Join(texts, " ") + if len(text) == 0 { + return in, errors.New("empty content") + } + + answer, err := p.qna.Answer(ctx, text, question) + if err != nil { + return in, err + } + + ap := in[i].AdditionalProperties + if ap == nil { + ap = models.AdditionalProperties{} + } + + if answerMeetsSimilarityThreshold(argumentModuleParams["ask"], p.paramsHelper, answer) { + propertyName, startPos, endPos := p.findProperty(answer.Answer, textProperties) + ap["answer"] = &qnamodels.Answer{ + Result: answer.Answer, + Property: propertyName, + StartPosition: startPos, + EndPosition: endPos, + Certainty: answer.Certainty, + Distance: answer.Distance, + HasAnswer: answer.Answer != nil, + } + } else { + ap["answer"] = &qnamodels.Answer{ + HasAnswer: false, + } + } + + in[i].AdditionalProperties = ap + } + } + + rerank := p.paramsHelper.GetRerank(argumentModuleParams["ask"]) + if rerank { + return p.rerank(in), nil + } + return in, nil +} + +func answerMeetsSimilarityThreshold(params interface{}, helper paramsHelper, ans *ent.AnswerResult) bool { + certainty := helper.GetCertainty(params) + if certainty > 0 && ans.Certainty != nil && *ans.Certainty < certainty { + return false + } + + distance := helper.GetDistance(params) + if distance > 0 && ans.Distance != nil && *ans.Distance > distance { + return false + } + + return true +} + +func (p *AnswerProvider) rerank(in []search.Result) []search.Result { + if len(in) > 0 { + sort.SliceStable(in, func(i, j int) bool { + return p.getAnswerCertainty(in[i]) > p.getAnswerCertainty(in[j]) + }) + } + return in +} + +func (p *AnswerProvider) getAnswerCertainty(result search.Result) float64 { + answerObj, ok := result.AdditionalProperties["answer"] + if ok { + answer, ok := answerObj.(*qnamodels.Answer) + if ok { + if answer.HasAnswer { + return *answer.Certainty + } + } + } + return 0 +} + +func (p *AnswerProvider) containsProperty(property string, properties []string) bool { + if len(properties) == 0 { + return true + } + for i := range properties { + if properties[i] == property { + return true + } + } + return false +} + +func (p *AnswerProvider) findProperty(answer *string, textProperties map[string]string) (*string, int, int) { + if answer == nil { + return nil, 0, 0 + } + lowercaseAnswer := strings.ToLower(*answer) + if len(lowercaseAnswer) > 0 { + for property, value := range textProperties { + lowercaseValue := strings.ToLower(strings.ReplaceAll(value, "\n", " ")) + if strings.Contains(lowercaseValue, lowercaseAnswer) { + startIndex := strings.Index(lowercaseValue, lowercaseAnswer) + return &property, startIndex, startIndex + len(lowercaseAnswer) + } + } + } + propertyNotFound := "" + return &propertyNotFound, 0, 0 +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/additional/answer/answer_test.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/additional/answer/answer_test.go new file mode 100644 index 0000000000000000000000000000000000000000..cb19bbc124bd3d5536e76bea85baa86972ce39dd --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/additional/answer/answer_test.go @@ -0,0 +1,577 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package answer + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/additional" + "github.com/weaviate/weaviate/entities/search" + qnamodels "github.com/weaviate/weaviate/modules/qna-transformers/additional/models" + "github.com/weaviate/weaviate/modules/qna-transformers/ent" +) + +func TestAdditionalAnswerProvider(t *testing.T) { + t.Run("should fail with empty content", func(t *testing.T) { + // given + qnaClient := &fakeQnAClient{} + fakeHelper := &fakeParamsHelper{} + answerProvider := New(qnaClient, fakeHelper) + in := []search.Result{ + { + ID: "some-uuid", + }, + } + fakeParams := &Params{} + limit := 1 + argumentModuleParams := map[string]interface{}{} + + // when + out, err := answerProvider.AdditionalPropertyFn(context.Background(), in, fakeParams, &limit, argumentModuleParams, nil) + + // then + require.NotNil(t, err) + require.NotEmpty(t, out) + assert.Error(t, err, "empty content") + }) + + t.Run("should fail with empty question", func(t *testing.T) { + // given + qnaClient := &fakeQnAClient{} + fakeHelper := &fakeParamsHelper{} + answerProvider := New(qnaClient, fakeHelper) + in := []search.Result{ + { + ID: "some-uuid", + Schema: map[string]interface{}{ + "content": "content", + }, + }, + } + fakeParams := &Params{} + limit := 1 + argumentModuleParams := map[string]interface{}{} + + // when + out, err := answerProvider.AdditionalPropertyFn(context.Background(), in, fakeParams, &limit, argumentModuleParams, nil) + + // then + require.NotNil(t, err) + require.NotEmpty(t, out) + assert.Error(t, err, "empty content") + }) + + t.Run("should answer", func(t *testing.T) { + // given + qnaClient := &fakeQnAClient{} + fakeHelper := &fakeParamsHelper{} + answerProvider := New(qnaClient, fakeHelper) + in := []search.Result{ + { + ID: "some-uuid", + Schema: map[string]interface{}{ + "content": "content", + }, + }, + } + fakeParams := &Params{} + limit := 1 + argumentModuleParams := map[string]interface{}{ + "ask": map[string]interface{}{ + "question": "question", + }, + } + + // when + out, err := answerProvider.AdditionalPropertyFn(context.Background(), in, fakeParams, &limit, argumentModuleParams, nil) + + // then + require.Nil(t, err) + require.NotEmpty(t, out) + assert.Equal(t, 1, len(in)) + answer, answerOK := in[0].AdditionalProperties["answer"] + assert.True(t, answerOK) + assert.NotNil(t, answer) + answerAdditional, answerAdditionalOK := answer.(*qnamodels.Answer) + assert.True(t, answerAdditionalOK) + assert.Equal(t, "answer", *answerAdditional.Result) + }) + + t.Run("should answer with property", func(t *testing.T) { + // given + qnaClient := &fakeQnAClient{} + fakeHelper := &fakeParamsHelper{} + answerProvider := New(qnaClient, fakeHelper) + in := []search.Result{ + { + ID: "some-uuid", + Schema: map[string]interface{}{ + "content": "content with answer", + "content2": "this one is just a title", + }, + }, + } + fakeParams := &Params{} + limit := 1 + argumentModuleParams := map[string]interface{}{ + "ask": map[string]interface{}{ + "question": "question", + "properties": []string{"content", "content2"}, + }, + } + + // when + out, err := answerProvider.AdditionalPropertyFn(context.Background(), in, fakeParams, &limit, argumentModuleParams, nil) + + // then + require.Nil(t, err) + require.NotEmpty(t, out) + assert.Equal(t, 1, len(in)) + answer, answerOK := in[0].AdditionalProperties["answer"] + assert.True(t, answerOK) + assert.NotNil(t, answer) + answerAdditional, answerAdditionalOK := answer.(*qnamodels.Answer) + assert.True(t, answerAdditionalOK) + assert.Equal(t, "answer", *answerAdditional.Result) + assert.Equal(t, "content", *answerAdditional.Property) + assert.Equal(t, 0.8, *answerAdditional.Certainty) + assert.InDelta(t, 0.4, *answerAdditional.Distance, 1e-9) + assert.Equal(t, 13, answerAdditional.StartPosition) + assert.Equal(t, 19, answerAdditional.EndPosition) + assert.Equal(t, true, answerAdditional.HasAnswer) + }) + + t.Run("should answer with similarity set above ask distance", func(t *testing.T) { + // given + qnaClient := &fakeQnAClient{} + fakeHelper := &fakeParamsHelper{} + answerProvider := New(qnaClient, fakeHelper) + in := []search.Result{ + { + ID: "some-uuid", + Schema: map[string]interface{}{ + "content": "content with answer", + "content2": "this one is just a title", + }, + }, + } + fakeParams := &Params{} + limit := 1 + argumentModuleParams := map[string]interface{}{ + "ask": map[string]interface{}{ + "question": "question", + "properties": []string{"content", "content2"}, + "distance": float64(0.4), + }, + } + + // when + out, err := answerProvider.AdditionalPropertyFn(context.Background(), in, fakeParams, &limit, argumentModuleParams, nil) + + // then + require.Nil(t, err) + require.NotEmpty(t, out) + assert.Equal(t, 1, len(out)) + answer, answerOK := out[0].AdditionalProperties["answer"] + assert.True(t, answerOK) + assert.NotNil(t, answer) + answerAdditional, answerAdditionalOK := answer.(*qnamodels.Answer) + assert.True(t, answerAdditionalOK) + assert.Equal(t, "answer", *answerAdditional.Result) + assert.Equal(t, "content", *answerAdditional.Property) + assert.Equal(t, 0.8, *answerAdditional.Certainty) + assert.Equal(t, *additional.CertaintyToDistPtr(ptFloat(0.8)), *answerAdditional.Distance) + assert.Equal(t, 13, answerAdditional.StartPosition) + assert.Equal(t, 19, answerAdditional.EndPosition) + assert.Equal(t, true, answerAdditional.HasAnswer) + }) + + t.Run("should answer with similarity set above ask certainty", func(t *testing.T) { + // given + qnaClient := &fakeQnAClient{} + fakeHelper := &fakeParamsHelper{} + answerProvider := New(qnaClient, fakeHelper) + in := []search.Result{ + { + ID: "some-uuid", + Schema: map[string]interface{}{ + "content": "content with answer", + "content2": "this one is just a title", + }, + }, + } + fakeParams := &Params{} + limit := 1 + argumentModuleParams := map[string]interface{}{ + "ask": map[string]interface{}{ + "question": "question", + "properties": []string{"content", "content2"}, + "certainty": float64(0.8), + }, + } + + // when + out, err := answerProvider.AdditionalPropertyFn(context.Background(), in, fakeParams, &limit, argumentModuleParams, nil) + + // then + require.Nil(t, err) + require.NotEmpty(t, out) + assert.Equal(t, 1, len(out)) + answer, answerOK := out[0].AdditionalProperties["answer"] + assert.True(t, answerOK) + assert.NotNil(t, answer) + answerAdditional, answerAdditionalOK := answer.(*qnamodels.Answer) + assert.True(t, answerAdditionalOK) + assert.Equal(t, "answer", *answerAdditional.Result) + assert.Equal(t, "content", *answerAdditional.Property) + assert.Equal(t, 0.8, *answerAdditional.Certainty) + assert.Equal(t, *additional.CertaintyToDistPtr(ptFloat(0.8)), *answerAdditional.Distance) + assert.Equal(t, 13, answerAdditional.StartPosition) + assert.Equal(t, 19, answerAdditional.EndPosition) + assert.Equal(t, true, answerAdditional.HasAnswer) + }) + + t.Run("should not answer with distance set below ask distance", func(t *testing.T) { + // given + qnaClient := &fakeQnAClient{} + fakeHelper := &fakeParamsHelper{} + answerProvider := New(qnaClient, fakeHelper) + in := []search.Result{ + { + ID: "some-uuid", + Schema: map[string]interface{}{ + "content": "content with answer", + "content2": "this one is just a title", + }, + }, + } + fakeParams := &Params{} + limit := 1 + argumentModuleParams := map[string]interface{}{ + "ask": map[string]interface{}{ + "question": "question", + "properties": []string{"content", "content2"}, + "distance": float64(0.19), + }, + } + + // when + out, err := answerProvider.AdditionalPropertyFn(context.Background(), in, fakeParams, &limit, argumentModuleParams, nil) + + // then + require.Nil(t, err) + require.NotEmpty(t, out) + assert.Equal(t, 1, len(in)) + answer, answerOK := in[0].AdditionalProperties["answer"] + assert.True(t, answerOK) + assert.NotNil(t, answer) + answerAdditional, answerAdditionalOK := answer.(*qnamodels.Answer) + assert.True(t, answerAdditionalOK) + assert.True(t, answerAdditional.Result == nil) + assert.True(t, answerAdditional.Property == nil) + assert.True(t, answerAdditional.Certainty == nil) + assert.True(t, answerAdditional.Distance == nil) + assert.Equal(t, 0, answerAdditional.StartPosition) + assert.Equal(t, 0, answerAdditional.EndPosition) + assert.Equal(t, false, answerAdditional.HasAnswer) + }) + + t.Run("should not answer with certainty set below ask certainty", func(t *testing.T) { + // given + qnaClient := &fakeQnAClient{} + fakeHelper := &fakeParamsHelper{} + answerProvider := New(qnaClient, fakeHelper) + in := []search.Result{ + { + ID: "some-uuid", + Schema: map[string]interface{}{ + "content": "content with answer", + "content2": "this one is just a title", + }, + }, + } + fakeParams := &Params{} + limit := 1 + argumentModuleParams := map[string]interface{}{ + "ask": map[string]interface{}{ + "question": "question", + "properties": []string{"content", "content2"}, + "certainty": float64(0.81), + }, + } + + // when + out, err := answerProvider.AdditionalPropertyFn(context.Background(), in, fakeParams, &limit, argumentModuleParams, nil) + + // then + require.Nil(t, err) + require.NotEmpty(t, out) + assert.Equal(t, 1, len(in)) + answer, answerOK := in[0].AdditionalProperties["answer"] + assert.True(t, answerOK) + assert.NotNil(t, answer) + answerAdditional, answerAdditionalOK := answer.(*qnamodels.Answer) + assert.True(t, answerAdditionalOK) + assert.True(t, answerAdditional.Result == nil) + assert.True(t, answerAdditional.Property == nil) + assert.True(t, answerAdditional.Certainty == nil) + assert.True(t, answerAdditional.Distance == nil) + assert.Equal(t, 0, answerAdditional.StartPosition) + assert.Equal(t, 0, answerAdditional.EndPosition) + assert.Equal(t, false, answerAdditional.HasAnswer) + }) + + t.Run("should answer with certainty set above ask certainty and the results should be reranked", func(t *testing.T) { + // given + qnaClient := &fakeQnAClient{} + fakeHelper := &fakeParamsHelper{} + answerProvider := New(qnaClient, fakeHelper) + in := []search.Result{ + { + ID: "uuid1", + Schema: map[string]interface{}{ + "content": "rerank 0.5", + }, + }, + { + ID: "uuid2", + Schema: map[string]interface{}{ + "content": "rerank 0.2", + }, + }, + { + ID: "uuid3", + Schema: map[string]interface{}{ + "content": "rerank 0.9", + }, + }, + } + fakeParams := &Params{} + limit := 1 + argumentModuleParams := map[string]interface{}{ + "ask": map[string]interface{}{ + "question": "question", + "properties": []string{"content"}, + "rerank": true, + }, + } + + // when + out, err := answerProvider.AdditionalPropertyFn(context.Background(), in, fakeParams, &limit, argumentModuleParams, nil) + + // then + require.Nil(t, err) + require.NotEmpty(t, out) + assert.Equal(t, 3, len(in)) + answer, answerOK := in[0].AdditionalProperties["answer"] + assert.True(t, answerOK) + assert.NotNil(t, answer) + answerAdditional, answerAdditionalOK := answer.(*qnamodels.Answer) + assert.True(t, answerAdditionalOK) + assert.Equal(t, "rerank 0.9", *answerAdditional.Result) + assert.Equal(t, "content", *answerAdditional.Property) + assert.Equal(t, 0.9, *answerAdditional.Certainty) + assert.Equal(t, *additional.CertaintyToDistPtr(ptFloat(0.9)), *answerAdditional.Distance) + assert.Equal(t, 0, answerAdditional.StartPosition) + assert.Equal(t, 10, answerAdditional.EndPosition) + assert.Equal(t, true, answerAdditional.HasAnswer) + + answer, answerOK = in[1].AdditionalProperties["answer"] + assert.True(t, answerOK) + assert.NotNil(t, answer) + answerAdditional, answerAdditionalOK = answer.(*qnamodels.Answer) + assert.True(t, answerAdditionalOK) + assert.Equal(t, "rerank 0.5", *answerAdditional.Result) + assert.Equal(t, "content", *answerAdditional.Property) + assert.Equal(t, 0.5, *answerAdditional.Certainty) + assert.Equal(t, *additional.CertaintyToDistPtr(ptFloat(0.5)), *answerAdditional.Distance) + assert.Equal(t, 0, answerAdditional.StartPosition) + assert.Equal(t, 10, answerAdditional.EndPosition) + assert.Equal(t, true, answerAdditional.HasAnswer) + + answer, answerOK = in[2].AdditionalProperties["answer"] + assert.True(t, answerOK) + assert.NotNil(t, answer) + answerAdditional, answerAdditionalOK = answer.(*qnamodels.Answer) + assert.True(t, answerAdditionalOK) + assert.Equal(t, "rerank 0.2", *answerAdditional.Result) + assert.Equal(t, "content", *answerAdditional.Property) + assert.Equal(t, 0.2, *answerAdditional.Certainty) + assert.Equal(t, *additional.CertaintyToDistPtr(ptFloat(0.2)), *answerAdditional.Distance) + assert.Equal(t, 0, answerAdditional.StartPosition) + assert.Equal(t, 10, answerAdditional.EndPosition) + assert.Equal(t, true, answerAdditional.HasAnswer) + }) + + t.Run("should answer with certainty set above ask certainty and the results should not be reranked", func(t *testing.T) { + // given + qnaClient := &fakeQnAClient{} + fakeHelper := &fakeParamsHelper{} + answerProvider := New(qnaClient, fakeHelper) + in := []search.Result{ + { + ID: "uuid1", + Schema: map[string]interface{}{ + "content": "rerank 0.5", + }, + }, + { + ID: "uuid2", + Schema: map[string]interface{}{ + "content": "rerank 0.2", + }, + }, + { + ID: "uuid3", + Schema: map[string]interface{}{ + "content": "rerank 0.9", + }, + }, + } + fakeParams := &Params{} + limit := 1 + argumentModuleParams := map[string]interface{}{ + "ask": map[string]interface{}{ + "question": "question", + "properties": []string{"content"}, + "rerank": false, + }, + } + + // when + out, err := answerProvider.AdditionalPropertyFn(context.Background(), in, fakeParams, &limit, argumentModuleParams, nil) + + // then + require.Nil(t, err) + require.NotEmpty(t, out) + assert.Equal(t, 3, len(in)) + answer, answerOK := in[0].AdditionalProperties["answer"] + assert.True(t, answerOK) + assert.NotNil(t, answer) + answerAdditional, answerAdditionalOK := answer.(*qnamodels.Answer) + assert.True(t, answerAdditionalOK) + assert.Equal(t, "rerank 0.5", *answerAdditional.Result) + assert.Equal(t, "content", *answerAdditional.Property) + assert.Equal(t, 0.5, *answerAdditional.Certainty) + assert.Equal(t, *additional.CertaintyToDistPtr(ptFloat(0.5)), *answerAdditional.Distance) + assert.Equal(t, 0, answerAdditional.StartPosition) + assert.Equal(t, 10, answerAdditional.EndPosition) + assert.Equal(t, true, answerAdditional.HasAnswer) + + answer, answerOK = in[1].AdditionalProperties["answer"] + assert.True(t, answerOK) + assert.NotNil(t, answer) + answerAdditional, answerAdditionalOK = answer.(*qnamodels.Answer) + assert.True(t, answerAdditionalOK) + assert.Equal(t, "rerank 0.2", *answerAdditional.Result) + assert.Equal(t, "content", *answerAdditional.Property) + assert.Equal(t, 0.2, *answerAdditional.Certainty) + assert.Equal(t, *additional.CertaintyToDistPtr(ptFloat(0.2)), *answerAdditional.Distance) + assert.Equal(t, 0, answerAdditional.StartPosition) + assert.Equal(t, 10, answerAdditional.EndPosition) + assert.Equal(t, true, answerAdditional.HasAnswer) + + answer, answerOK = in[2].AdditionalProperties["answer"] + assert.True(t, answerOK) + assert.NotNil(t, answer) + answerAdditional, answerAdditionalOK = answer.(*qnamodels.Answer) + assert.True(t, answerAdditionalOK) + assert.Equal(t, "rerank 0.9", *answerAdditional.Result) + assert.Equal(t, "content", *answerAdditional.Property) + assert.Equal(t, 0.9, *answerAdditional.Certainty) + assert.Equal(t, *additional.CertaintyToDistPtr(ptFloat(0.9)), *answerAdditional.Distance) + assert.Equal(t, 0, answerAdditional.StartPosition) + assert.Equal(t, 10, answerAdditional.EndPosition) + assert.Equal(t, true, answerAdditional.HasAnswer) + }) +} + +type fakeQnAClient struct{} + +func (c *fakeQnAClient) Answer(ctx context.Context, + text, question string, +) (*ent.AnswerResult, error) { + if text == "rerank 0.9" { + return c.getAnswer(question, "rerank 0.9", 0.9), nil + } + if text == "rerank 0.5" { + return c.getAnswer(question, "rerank 0.5", 0.5), nil + } + if text == "rerank 0.2" { + return c.getAnswer(question, "rerank 0.2", 0.2), nil + } + return c.getAnswer(question, "answer", 0.8), nil +} + +func (c *fakeQnAClient) getAnswer(question, answer string, certainty float64) *ent.AnswerResult { + return &ent.AnswerResult{ + Text: question, + Question: question, + Answer: &answer, + Certainty: &certainty, + Distance: additional.CertaintyToDistPtr(&certainty), + } +} + +type fakeParamsHelper struct{} + +func (h *fakeParamsHelper) GetQuestion(params interface{}) string { + if fakeParamsMap, ok := params.(map[string]interface{}); ok { + if question, ok := fakeParamsMap["question"].(string); ok { + return question + } + } + return "" +} + +func (h *fakeParamsHelper) GetProperties(params interface{}) []string { + if fakeParamsMap, ok := params.(map[string]interface{}); ok { + if properties, ok := fakeParamsMap["properties"].([]string); ok { + return properties + } + } + return nil +} + +func (h *fakeParamsHelper) GetCertainty(params interface{}) float64 { + if fakeParamsMap, ok := params.(map[string]interface{}); ok { + if certainty, ok := fakeParamsMap["certainty"].(float64); ok { + return certainty + } + } + return 0 +} + +func (h *fakeParamsHelper) GetDistance(params interface{}) float64 { + if fakeParamsMap, ok := params.(map[string]interface{}); ok { + if distance, ok := fakeParamsMap["distance"].(float64); ok { + return distance + } + } + return 0 +} + +func (h *fakeParamsHelper) GetRerank(params interface{}) bool { + if fakeParamsMap, ok := params.(map[string]interface{}); ok { + if rerank, ok := fakeParamsMap["rerank"].(bool); ok { + return rerank + } + } + return false +} + +func ptFloat(f float64) *float64 { + return &f +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/additional/models/models.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/additional/models/models.go new file mode 100644 index 0000000000000000000000000000000000000000..20f79305edd0b034572eb27bdb197bb47e59e627 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/additional/models/models.go @@ -0,0 +1,24 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package models + +// Answer used in qna module to represent +// the answer to a given question +type Answer struct { + Result *string `json:"result,omitempty"` + Property *string `json:"property,omitempty"` + StartPosition int `json:"startPosition,omitempty"` + EndPosition int `json:"endPosition,omitempty"` + Certainty *float64 `json:"certainty,omitempty"` + Distance *float64 `json:"distance,omitempty"` + HasAnswer bool `json:"hasAnswer,omitempty"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/additional/provider.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/additional/provider.go new file mode 100644 index 0000000000000000000000000000000000000000..ed115442e664633b9235c4a0ffd8be00d993cfc2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/additional/provider.go @@ -0,0 +1,59 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package additional + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + + "github.com/tailor-inc/graphql" + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/search" +) + +type AdditionalProperty interface { + AdditionalPropertyFn(ctx context.Context, + in []search.Result, params interface{}, limit *int, + argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig) ([]search.Result, error) + ExtractAdditionalFn(param []*ast.Argument, class *models.Class) interface{} + AdditionalPropertyDefaultValue() interface{} + AdditionalFieldFn(classname string) *graphql.Field +} + +type GraphQLAdditionalArgumentsProvider struct { + answerProvider AdditionalProperty +} + +func New(answerProvider AdditionalProperty) *GraphQLAdditionalArgumentsProvider { + return &GraphQLAdditionalArgumentsProvider{answerProvider} +} + +func (p *GraphQLAdditionalArgumentsProvider) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + additionalProperties := map[string]modulecapabilities.AdditionalProperty{} + additionalProperties["answer"] = p.getAnswer() + return additionalProperties +} + +func (p *GraphQLAdditionalArgumentsProvider) getAnswer() modulecapabilities.AdditionalProperty { + return modulecapabilities.AdditionalProperty{ + GraphQLNames: []string{"answer"}, + GraphQLFieldFunction: p.answerProvider.AdditionalFieldFn, + GraphQLExtractFunction: p.answerProvider.ExtractAdditionalFn, + SearchFunctions: modulecapabilities.AdditionalSearch{ + ExploreGet: p.answerProvider.AdditionalPropertyFn, + ExploreList: p.answerProvider.AdditionalPropertyFn, + }, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask.go new file mode 100644 index 0000000000000000000000000000000000000000..88b0742690d5eef77ad74b749c4905ba0e98925d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask.go @@ -0,0 +1,40 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modqna + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/modules/qna-transformers/ask" +) + +func (m *QnAModule) initAskSearcher() error { + m.searcher = ask.NewSearcher(m.nearTextDependencies) + return nil +} + +func (m *QnAModule) initAskProvider() error { + m.graphqlProvider = ask.New(m.askTextTransformer) + return nil +} + +func (m *QnAModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return m.graphqlProvider.Arguments() +} + +func (m *QnAModule) VectorSearches() map[string]map[string]modulecapabilities.VectorForParams[[]float32] { + return m.searcher.VectorSearches() +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.DependencySearcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..a92cd4a246b7df2999c90a9481902c47d1cfb56a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/fakes_for_test.go @@ -0,0 +1,21 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ask + +type fakeTransformer struct{} + +func (t *fakeTransformer) Transform(in []string) ([]string, error) { + if len(in) == 1 && in[0] == "transform this" { + return []string{"transformed text"}, nil + } + return in, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/graphql_argument.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/graphql_argument.go new file mode 100644 index 0000000000000000000000000000000000000000..175400b9c500e81b24dd546d8fccf901766438f2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/graphql_argument.go @@ -0,0 +1,76 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ask + +import ( + "fmt" + + "github.com/tailor-inc/graphql" + "github.com/weaviate/weaviate/adapters/handlers/graphql/descriptions" +) + +func (g *GraphQLArgumentsProvider) getAskArgumentFn(classname string) *graphql.ArgumentConfig { + return g.askArgument("GetObjects", classname) +} + +func (g *GraphQLArgumentsProvider) exploreAskArgumentFn() *graphql.ArgumentConfig { + return g.askArgument("Explore", "") +} + +func (g *GraphQLArgumentsProvider) aggregateAskArgumentFn(classname string) *graphql.ArgumentConfig { + return g.askArgument("Aggregate", classname) +} + +func (g *GraphQLArgumentsProvider) askArgument(prefix, className string) *graphql.ArgumentConfig { + prefixName := fmt.Sprintf("QnATransformers%s%s", prefix, className) + return &graphql.ArgumentConfig{ + Type: graphql.NewInputObject( + graphql.InputObjectConfig{ + Name: fmt.Sprintf("%sAskInpObj", prefixName), + Fields: g.askFields(prefixName), + Description: descriptions.GetWhereInpObj, + }, + ), + } +} + +func (g *GraphQLArgumentsProvider) askFields(prefix string) graphql.InputObjectConfigFieldMap { + askFields := graphql.InputObjectConfigFieldMap{ + "question": &graphql.InputObjectFieldConfig{ + Description: "Question to be answered", + Type: graphql.NewNonNull(graphql.String), + }, + "certainty": &graphql.InputObjectFieldConfig{ + Description: descriptions.Certainty, + Type: graphql.Float, + }, + "distance": &graphql.InputObjectFieldConfig{ + Description: descriptions.Distance, + Type: graphql.Float, + }, + "properties": &graphql.InputObjectFieldConfig{ + Description: "Properties which contains text", + Type: graphql.NewList(graphql.String), + }, + "rerank": &graphql.InputObjectFieldConfig{ + Description: "Arranges the results by certainty", + Type: graphql.Boolean, + }, + } + if g.askTransformer != nil { + askFields["autocorrect"] = &graphql.InputObjectFieldConfig{ + Description: "Autocorrect input text values", + Type: graphql.Boolean, + } + } + return askFields +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/graphql_argument_test.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/graphql_argument_test.go new file mode 100644 index 0000000000000000000000000000000000000000..fbc8f070c1772c6eb0d196c3be8124fa88b5acea --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/graphql_argument_test.go @@ -0,0 +1,97 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ask + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/tailor-inc/graphql" +) + +func TestAskGraphQLArgument(t *testing.T) { + t.Run("should generate ask argument properly", func(t *testing.T) { + // given + prefix := "Prefix" + classname := "Class" + // when + ask := New(nil).askArgument(prefix, classname) + + // then + // the built graphQL field needs to support this structure: + // ask { + // question: "question?", + // distance: 0.9 + // properties: ["prop1", "prop2"] + // rerank: true + // } + assert.NotNil(t, ask) + assert.Equal(t, "QnATransformersPrefixClassAskInpObj", ask.Type.Name()) + askFields, ok := ask.Type.(*graphql.InputObject) + assert.True(t, ok) + assert.NotNil(t, askFields) + assert.Equal(t, 5, len(askFields.Fields())) + fields := askFields.Fields() + question := fields["question"] + questionNonNull, questionNonNullOK := question.Type.(*graphql.NonNull) + assert.True(t, questionNonNullOK) + assert.Equal(t, "String", questionNonNull.OfType.Name()) + assert.NotNil(t, question) + assert.NotNil(t, fields["certainty"]) + assert.NotNil(t, fields["distance"]) + properties := fields["properties"] + propertiesList, propertiesListOK := properties.Type.(*graphql.List) + assert.True(t, propertiesListOK) + assert.Equal(t, "String", propertiesList.OfType.Name()) + assert.NotNil(t, fields["rerank"]) + }) +} + +func TestAskGraphQLArgumentWithAutocorrect(t *testing.T) { + t.Run("should generate ask argument properly with autocorrect", func(t *testing.T) { + // given + prefix := "Prefix" + classname := "Class" + // when + ask := New(&fakeTransformer{}).askArgument(prefix, classname) + + // then + // the built graphQL field needs to support this structure: + // ask { + // question: "question?", + // distance: 0.9 + // properties: ["prop1", "prop2"] + // autocorrect: true + // rerank: true + // } + assert.NotNil(t, ask) + assert.Equal(t, "QnATransformersPrefixClassAskInpObj", ask.Type.Name()) + askFields, ok := ask.Type.(*graphql.InputObject) + assert.True(t, ok) + assert.NotNil(t, askFields) + assert.Equal(t, 6, len(askFields.Fields())) + fields := askFields.Fields() + question := fields["question"] + questionNonNull, questionNonNullOK := question.Type.(*graphql.NonNull) + assert.True(t, questionNonNullOK) + assert.Equal(t, "String", questionNonNull.OfType.Name()) + assert.NotNil(t, question) + assert.NotNil(t, fields["certainty"]) + assert.NotNil(t, fields["distance"]) + properties := fields["properties"] + propertiesList, propertiesListOK := properties.Type.(*graphql.List) + assert.True(t, propertiesListOK) + assert.Equal(t, "String", propertiesList.OfType.Name()) + assert.NotNil(t, fields["autocorrect"]) + assert.NotNil(t, fields["rerank"]) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/graphql_provider.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/graphql_provider.go new file mode 100644 index 0000000000000000000000000000000000000000..04dff2a0021acee150e109aaca01ed243d4980da --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/graphql_provider.go @@ -0,0 +1,40 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ask + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" +) + +type GraphQLArgumentsProvider struct { + askTransformer modulecapabilities.TextTransform +} + +func New(askTransformer modulecapabilities.TextTransform) *GraphQLArgumentsProvider { + return &GraphQLArgumentsProvider{askTransformer} +} + +func (g *GraphQLArgumentsProvider) Arguments() map[string]modulecapabilities.GraphQLArgument { + arguments := map[string]modulecapabilities.GraphQLArgument{} + arguments["ask"] = g.getAsk() + return arguments +} + +func (g *GraphQLArgumentsProvider) getAsk() modulecapabilities.GraphQLArgument { + return modulecapabilities.GraphQLArgument{ + GetArgumentsFunction: g.getAskArgumentFn, + AggregateArgumentsFunction: g.aggregateAskArgumentFn, + ExploreArgumentsFunction: g.exploreAskArgumentFn, + ExtractFunction: g.extractAskFn, + ValidateFunction: g.validateAskFn, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/grapqhl_extract.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/grapqhl_extract.go new file mode 100644 index 0000000000000000000000000000000000000000..bb4f0f0c44859f47f8cef0e596cb0b85603e950c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/grapqhl_extract.go @@ -0,0 +1,64 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ask + +import "github.com/weaviate/weaviate/entities/dto" + +func (g *GraphQLArgumentsProvider) extractAskFn(source map[string]interface{}) (interface{}, *dto.TargetCombination, error) { + var args AskParams + + question, ok := source["question"].(string) + if ok { + args.Question = question + } + + // autocorrect is an optional arg, so it could be nil + autocorrect, ok := source["autocorrect"] + if ok { + args.Autocorrect = autocorrect.(bool) + } + + // if there's text transformer present and autocorrect set to true + // perform text transformation operation + if args.Autocorrect && g.askTransformer != nil { + if transformedValues, err := g.askTransformer.Transform([]string{args.Question}); err == nil && len(transformedValues) == 1 { + args.Question = transformedValues[0] + } + } + + certainty, ok := source["certainty"] + if ok { + args.Certainty = certainty.(float64) + } + + distance, ok := source["distance"] + if ok { + args.Distance = distance.(float64) + args.WithDistance = true + } + + properties, ok := source["properties"].([]interface{}) + if ok { + args.Properties = make([]string, len(properties)) + for i, value := range properties { + args.Properties[i] = value.(string) + } + } + + // rerank is an optional arg, so it could be nil + rerank, ok := source["rerank"] + if ok { + args.Rerank = rerank.(bool) + } + + return &args, nil, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/grapqhl_extract_test.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/grapqhl_extract_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ebfe6e9c265e817fcf3fa624f7190fba10c5a892 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/grapqhl_extract_test.go @@ -0,0 +1,283 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ask + +import ( + "reflect" + "testing" +) + +func Test_extractAskFn(t *testing.T) { + type args struct { + source map[string]interface{} + } + tests := []struct { + name string + args args + want interface{} + }{ + { + name: "should parse properly with only question", + args: args{ + source: map[string]interface{}{ + "question": "some question", + }, + }, + want: &AskParams{ + Question: "some question", + }, + }, + { + name: "should parse properly with question and distance", + args: args{ + source: map[string]interface{}{ + "question": "some question", + "distance": 0.8, + }, + }, + want: &AskParams{ + Question: "some question", + Distance: 0.8, + WithDistance: true, + }, + }, + { + name: "should parse properly with question and certainty", + args: args{ + source: map[string]interface{}{ + "question": "some question", + "certainty": 0.8, + }, + }, + want: &AskParams{ + Question: "some question", + Certainty: 0.8, + }, + }, + { + name: "should parse properly without params", + args: args{ + source: map[string]interface{}{}, + }, + want: &AskParams{}, + }, + { + name: "should parse properly with question, distance, and properties", + args: args{ + source: map[string]interface{}{ + "question": "some question", + "distance": 0.8, + "properties": []interface{}{"prop1", "prop2"}, + }, + }, + want: &AskParams{ + Question: "some question", + Distance: 0.8, + WithDistance: true, + Properties: []string{"prop1", "prop2"}, + }, + }, + { + name: "should parse properly with question and certainty and properties", + args: args{ + source: map[string]interface{}{ + "question": "some question", + "certainty": 0.8, + "properties": []interface{}{"prop1", "prop2"}, + }, + }, + want: &AskParams{ + Question: "some question", + Certainty: 0.8, + Properties: []string{"prop1", "prop2"}, + }, + }, + { + name: "should parse properly with question, distance, properties, and rerank", + args: args{ + source: map[string]interface{}{ + "question": "some question", + "distance": 0.8, + "properties": []interface{}{"prop1", "prop2"}, + "rerank": true, + }, + }, + want: &AskParams{ + Question: "some question", + Distance: 0.8, + WithDistance: true, + Properties: []string{"prop1", "prop2"}, + Rerank: true, + }, + }, + { + name: "should parse properly with question and certainty and properties and rerank", + args: args{ + source: map[string]interface{}{ + "question": "some question", + "certainty": 0.8, + "properties": []interface{}{"prop1", "prop2"}, + "rerank": true, + }, + }, + want: &AskParams{ + Question: "some question", + Certainty: 0.8, + Properties: []string{"prop1", "prop2"}, + Rerank: true, + }, + }, + } + + testsWithAutocorrect := []struct { + name string + args args + want interface{} + }{ + { + name: "should parse properly with only question and autocorrect", + args: args{ + source: map[string]interface{}{ + "question": "some question", + "autocorrect": true, + }, + }, + want: &AskParams{ + Question: "some question", + Autocorrect: true, + }, + }, + { + name: "should parse properly and transform text in question", + args: args{ + source: map[string]interface{}{ + "question": "transform this", + "autocorrect": true, + }, + }, + want: &AskParams{ + Question: "transformed text", + Autocorrect: true, + }, + }, + { + name: "should parse properly and not transform text in question", + args: args{ + source: map[string]interface{}{ + "question": "transform this", + "autocorrect": false, + }, + }, + want: &AskParams{ + Question: "transform this", + Autocorrect: false, + }, + }, + { + name: "should parse properly with question, distance, properties, and autocorrect", + args: args{ + source: map[string]interface{}{ + "question": "transform this", + "distance": 0.8, + "properties": []interface{}{"prop1", "prop2"}, + "autocorrect": true, + }, + }, + want: &AskParams{ + Question: "transformed text", + Distance: 0.8, + WithDistance: true, + Properties: []string{"prop1", "prop2"}, + Autocorrect: true, + }, + }, + { + name: "should parse properly with question and certainty and properties and autocorrect", + args: args{ + source: map[string]interface{}{ + "question": "transform this", + "certainty": 0.8, + "properties": []interface{}{"prop1", "prop2"}, + "autocorrect": true, + }, + }, + want: &AskParams{ + Question: "transformed text", + Certainty: 0.8, + Properties: []string{"prop1", "prop2"}, + Autocorrect: true, + }, + }, + { + name: "should parse properly with question, distance, properties, autocorrect, and rerank", + args: args{ + source: map[string]interface{}{ + "question": "transform this", + "distance": 0.8, + "properties": []interface{}{"prop1", "prop2"}, + "autocorrect": true, + "rerank": true, + }, + }, + want: &AskParams{ + Question: "transformed text", + Distance: 0.8, + WithDistance: true, + Properties: []string{"prop1", "prop2"}, + Autocorrect: true, + Rerank: true, + }, + }, + { + name: "should parse properly with question and certainty and properties and autocorrect and rerank", + args: args{ + source: map[string]interface{}{ + "question": "transform this", + "certainty": 0.8, + "properties": []interface{}{"prop1", "prop2"}, + "autocorrect": true, + "rerank": true, + }, + }, + want: &AskParams{ + Question: "transformed text", + Certainty: 0.8, + Properties: []string{"prop1", "prop2"}, + Autocorrect: true, + Rerank: true, + }, + }, + } + + testsWithAutocorrect = append(testsWithAutocorrect, tests...) + + t.Run("should extract without text transformer", func(t *testing.T) { + provider := New(nil) + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got, _, _ := provider.extractAskFn(tt.args.source); !reflect.DeepEqual(got, tt.want) { + t.Errorf("extractAskFn() = %v, want %v", got, tt.want) + } + }) + } + }) + t.Run("should extract with text transformer", func(t *testing.T) { + provider := New(&fakeTransformer{}) + for _, tt := range testsWithAutocorrect { + t.Run(tt.name, func(t *testing.T) { + if got, _, _ := provider.extractAskFn(tt.args.source); !reflect.DeepEqual(got, tt.want) { + t.Errorf("extractAskFn() = %v, want %v", got, tt.want) + } + }) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/param.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/param.go new file mode 100644 index 0000000000000000000000000000000000000000..14364c75271de70116cfcf6eaed78107699e7ab8 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/param.go @@ -0,0 +1,56 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ask + +import ( + "github.com/pkg/errors" +) + +type AskParams struct { + Question string + Certainty float64 + Distance float64 + WithDistance bool + Properties []string + Autocorrect bool + Rerank bool +} + +func (n AskParams) GetCertainty() float64 { + return n.Certainty +} + +func (n AskParams) GetDistance() float64 { + return n.Distance +} + +func (n AskParams) SimilarityMetricProvided() bool { + return n.Certainty != 0 || n.WithDistance +} + +func (g *GraphQLArgumentsProvider) validateAskFn(param interface{}) error { + ask, ok := param.(*AskParams) + if !ok { + return errors.New("'ask' invalid parameter") + } + + if len(ask.Question) == 0 { + return errors.Errorf("'ask.question' needs to be defined") + } + + if ask.Certainty != 0 && ask.WithDistance { + return errors.Errorf( + "nearText cannot provide both distance and certainty") + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/param_helper.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/param_helper.go new file mode 100644 index 0000000000000000000000000000000000000000..8c58c8dc23abbb45ac79e8bc9e6b05458d6571c2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/param_helper.go @@ -0,0 +1,53 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ask + +type ParamsHelper struct{} + +func NewParamsHelper() *ParamsHelper { + return &ParamsHelper{} +} + +func (p *ParamsHelper) GetQuestion(params interface{}) string { + if parameters, ok := params.(*AskParams); ok { + return parameters.Question + } + return "" +} + +func (p *ParamsHelper) GetProperties(params interface{}) []string { + if parameters, ok := params.(*AskParams); ok { + return parameters.Properties + } + return nil +} + +func (p *ParamsHelper) GetCertainty(params interface{}) float64 { + if parameters, ok := params.(*AskParams); ok { + return parameters.Certainty + } + return 0 +} + +func (p *ParamsHelper) GetDistance(params interface{}) float64 { + if parameters, ok := params.(*AskParams); ok { + return parameters.Distance + } + return 0 +} + +func (p *ParamsHelper) GetRerank(params interface{}) bool { + if parameters, ok := params.(*AskParams); ok { + return parameters.Rerank + } + return false +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/param_helper_test.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/param_helper_test.go new file mode 100644 index 0000000000000000000000000000000000000000..1bc752c7cb61d7006c77a7a6780c3c62a7f6d196 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/param_helper_test.go @@ -0,0 +1,151 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ask + +import ( + "reflect" + "testing" +) + +func TestParamsHelper_GetQuestion(t *testing.T) { + type args struct { + params interface{} + } + tests := []struct { + name string + args args + want string + }{ + { + name: "should get question with certainty", + args: args{ + params: &AskParams{ + Question: "question", + Certainty: 0.8, + }, + }, + want: "question", + }, + { + name: "should get question with distance", + args: args{ + params: &AskParams{ + Question: "question", + Distance: 0.8, + }, + }, + want: "question", + }, + { + name: "should get empty string when empty params", + args: args{ + params: &AskParams{}, + }, + want: "", + }, + { + name: "should get empty string when nil params", + args: args{ + params: nil, + }, + want: "", + }, + { + name: "should get empty string when passed a struct, not a pointer to struct", + args: args{ + params: AskParams{}, + }, + want: "", + }, + { + name: "should get empty string when passed a struct with question, not a pointer to struct", + args: args{ + params: AskParams{ + Question: "question?", + }, + }, + want: "", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + p := &ParamsHelper{} + if got := p.GetQuestion(tt.args.params); got != tt.want { + t.Errorf("ParamsHelper.GetQuestion() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestParamsHelper_GetProperties(t *testing.T) { + type args struct { + params interface{} + } + tests := []struct { + name string + p *ParamsHelper + args args + want []string + }{ + { + name: "should get properties with distance", + args: args{ + params: &AskParams{ + Question: "question", + Properties: []string{"prop1", "prop2"}, + Distance: 0.8, + }, + }, + want: []string{"prop1", "prop2"}, + }, + { + name: "should get properties with certainty", + args: args{ + params: &AskParams{ + Question: "question", + Properties: []string{"prop1", "prop2"}, + Certainty: 0.8, + }, + }, + want: []string{"prop1", "prop2"}, + }, + { + name: "should get nil properties with empty pointer to AskParams", + args: args{ + params: &AskParams{}, + }, + want: nil, + }, + { + name: "should get nil properties with empty AskParams", + args: args{ + params: AskParams{}, + }, + want: nil, + }, + { + name: "should get nil properties with nil params", + args: args{ + params: nil, + }, + want: nil, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + p := &ParamsHelper{} + if got := p.GetProperties(tt.args.params); !reflect.DeepEqual(got, tt.want) { + t.Errorf("ParamsHelper.GetProperties() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/param_test.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/param_test.go new file mode 100644 index 0000000000000000000000000000000000000000..9e763e7ef52e4fb563c3ba4ee2b3788265f8fe31 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/param_test.go @@ -0,0 +1,77 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ask + +import "testing" + +func Test_validateAskFn(t *testing.T) { + type args struct { + param interface{} + } + tests := []struct { + name string + args args + wantErr bool + }{ + { + name: "should validate", + args: args{ + param: &AskParams{ + Question: "question", + }, + }, + }, + { + name: "should not validate when empty question", + args: args{ + param: &AskParams{ + Question: "", + }, + }, + wantErr: true, + }, + { + name: "should not validate when empty params", + args: args{ + param: &AskParams{}, + }, + wantErr: true, + }, + { + name: "should not validate when distance and certainty are present", + args: args{ + param: &AskParams{ + Distance: 0.1, + Certainty: 0.1, + }, + }, + wantErr: true, + }, + { + name: "should not validate when param passed is struct, not a pointer to struct", + args: args{ + param: AskParams{ + Question: "question", + }, + }, + wantErr: true, + }, + } + provider := New(nil) + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if err := provider.validateAskFn(tt.args.param); (err != nil) != tt.wantErr { + t.Errorf("validateAskFn() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/searcher.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/searcher.go new file mode 100644 index 0000000000000000000000000000000000000000..dc25f3625b894b7465caae12a9139b819b17fbfc --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ask/searcher.go @@ -0,0 +1,72 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ask + +import ( + "context" + + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/usecases/modulecomponents/generictypes" +) + +type vectorFromAskParam struct { + nearTextDep modulecapabilities.Dependency[[]float32] +} + +func (s *vectorFromAskParam) vectorForAskParamFn(ctx context.Context, params interface{}, + className string, + findVectorFn modulecapabilities.FindVectorFn[[]float32], + cfg moduletools.ClassConfig, +) ([]float32, error) { + return s.vectorFromAskParam(ctx, params.(*AskParams), className, findVectorFn, cfg) +} + +func (s *vectorFromAskParam) vectorFromAskParam(ctx context.Context, + params *AskParams, className string, + findVectorFn modulecapabilities.FindVectorFn[[]float32], + cfg moduletools.ClassConfig, +) ([]float32, error) { + arg := s.nearTextDep.GraphQLArgument() + + rawNearTextParam := map[string]interface{}{} + rawNearTextParam["concepts"] = []interface{}{params.Question} + + nearTextParam, _, _ := arg.ExtractFunction(rawNearTextParam) + vectorSearchFn := s.nearTextDep.VectorSearch() + + return vectorSearchFn.VectorForParams(ctx, nearTextParam, className, findVectorFn, cfg) +} + +type Searcher struct { + // nearText modules dependencies + nearTextDeps []modulecapabilities.Dependency[[]float32] +} + +func NewSearcher(nearTextDeps []modulecapabilities.Dependency[[]float32]) *Searcher { + return &Searcher{nearTextDeps} +} + +func (s *Searcher) VectorSearches() map[string]map[string]modulecapabilities.VectorForParams[[]float32] { + vectorSearchers := map[string]map[string]modulecapabilities.VectorForParams[[]float32]{} + for _, nearTextDep := range s.nearTextDeps { + vectorSearchers[nearTextDep.ModuleName()] = s.vectorSearches(nearTextDep) + } + return vectorSearchers +} + +func (s *Searcher) vectorSearches(nearTextDep modulecapabilities.Dependency[[]float32]) map[string]modulecapabilities.VectorForParams[[]float32] { + vectorSearches := map[string]modulecapabilities.VectorForParams[[]float32]{} + vectorFromAsk := &vectorFromAskParam{nearTextDep} + vectorSearches["ask"] = generictypes.VectorForParams(vectorFromAsk.vectorForAskParamFn) + return vectorSearches +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/clients/qna.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/clients/qna.go new file mode 100644 index 0000000000000000000000000000000000000000..79fc3c2874d220c4db4106126b74a727748470d2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/clients/qna.go @@ -0,0 +1,105 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/additional" + "github.com/weaviate/weaviate/modules/qna-transformers/ent" +) + +type qna struct { + origin string + httpClient *http.Client + logger logrus.FieldLogger +} + +func New(origin string, timeout time.Duration, logger logrus.FieldLogger) *qna { + return &qna{ + origin: origin, + httpClient: &http.Client{Timeout: timeout}, + logger: logger, + } +} + +func (q *qna) Answer(ctx context.Context, + text, question string, +) (*ent.AnswerResult, error) { + body, err := json.Marshal(answersInput{ + Text: text, + Question: question, + }) + if err != nil { + return nil, errors.Wrapf(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", q.url("/answers/"), + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + + res, err := q.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + var resBody answersResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if res.StatusCode > 399 { + return nil, errors.Errorf("fail with status %d: %s", res.StatusCode, + resBody.Error) + } + + return &ent.AnswerResult{ + Text: resBody.Text, + Question: resBody.Question, + Answer: resBody.Answer, + Certainty: resBody.Certainty, + Distance: additional.CertaintyToDistPtr(resBody.Certainty), + }, nil +} + +func (q *qna) url(path string) string { + return fmt.Sprintf("%s%s", q.origin, path) +} + +type answersInput struct { + Text string `json:"text"` + Question string `json:"question"` +} + +type answersResponse struct { + answersInput `json:"answersInput"` + Answer *string `json:"answer"` + Certainty *float64 `json:"certainty"` + Distance *float64 `json:"distance"` + Error string `json:"error"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/clients/qna_meta.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/clients/qna_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..fb874b89bed421e300255a00c9f46b28fb5945e5 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/clients/qna_meta.go @@ -0,0 +1,45 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + + "github.com/pkg/errors" +) + +func (q *qna) MetaInfo() (map[string]interface{}, error) { + req, err := http.NewRequestWithContext(context.Background(), "GET", q.url("/meta"), nil) + if err != nil { + return nil, errors.Wrap(err, "create GET meta request") + } + + res, err := q.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send GET meta request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read meta response body") + } + + var resBody map[string]interface{} + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, "unmarshal meta response body") + } + return resBody, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/clients/qna_meta_test.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/clients/qna_meta_test.go new file mode 100644 index 0000000000000000000000000000000000000000..932075f452892c217d8adac661b4cd2a54b54d78 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/clients/qna_meta_test.go @@ -0,0 +1,135 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestGetMeta(t *testing.T) { + t.Run("when the server is providing meta", func(t *testing.T) { + server := httptest.NewServer(&testMetaHandler{t: t}) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + meta, err := c.MetaInfo() + + assert.Nil(t, err) + assert.NotNil(t, meta) + metaModel := meta["model"] + assert.True(t, metaModel != nil) + model, modelOK := metaModel.(map[string]interface{}) + assert.True(t, modelOK) + assert.True(t, model["_name_or_path"] != nil) + assert.True(t, model["architectures"] != nil) + modelID2label, modelID2labelOK := model["id2label"].(map[string]interface{}) + assert.True(t, modelID2labelOK) + assert.True(t, modelID2label["0"] != nil) + }) +} + +type testMetaHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testMetaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/meta", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.Write([]byte(f.metaInfo())) +} + +func (f *testMetaHandler) metaInfo() string { + return `{ + "model": { + "_name_or_path": "bert-large-uncased-whole-word-masking-finetuned-squad", + "add_cross_attention": false, + "architectures": [ + "BertForQuestionAnswering" + ], + "attention_probs_dropout_prob": 0.1, + "bad_words_ids": null, + "bos_token_id": null, + "chunk_size_feed_forward": 0, + "decoder_start_token_id": null, + "diversity_penalty": 0.0, + "do_sample": false, + "early_stopping": false, + "encoder_no_repeat_ngram_size": 0, + "eos_token_id": null, + "finetuning_task": null, + "gradient_checkpointing": false, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 1024, + "id2label": { + "0": "LABEL_0", + "1": "LABEL_1" + }, + "initializer_range": 0.02, + "intermediate_size": 4096, + "is_decoder": false, + "is_encoder_decoder": false, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1 + }, + "layer_norm_eps": 1e-12, + "length_penalty": 1.0, + "max_length": 20, + "max_position_embeddings": 512, + "min_length": 0, + "model_type": "bert", + "no_repeat_ngram_size": 0, + "num_attention_heads": 16, + "num_beam_groups": 1, + "num_beams": 1, + "num_hidden_layers": 24, + "num_return_sequences": 1, + "output_attentions": false, + "output_hidden_states": false, + "output_scores": false, + "pad_token_id": 0, + "position_embedding_type": "absolute", + "prefix": null, + "pruned_heads": {}, + "repetition_penalty": 1.0, + "return_dict": true, + "return_dict_in_generate": false, + "sep_token_id": null, + "task_specific_params": null, + "temperature": 1.0, + "tie_encoder_decoder": false, + "tie_word_embeddings": true, + "tokenizer_class": null, + "top_k": 50, + "top_p": 1.0, + "torchscript": false, + "transformers_version": "4.3.2", + "type_vocab_size": 2, + "use_bfloat16": false, + "use_cache": true, + "vocab_size": 30522, + "xla_device": null + } +}` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/clients/qna_test.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/clients/qna_test.go new file mode 100644 index 0000000000000000000000000000000000000000..8f00030becb7e6d41ed9c80905e5a71e60d2ddad --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/clients/qna_test.go @@ -0,0 +1,129 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/additional" + "github.com/weaviate/weaviate/modules/qna-transformers/ent" +) + +func TestGetAnswer(t *testing.T) { + t.Run("when the server has a successful answer (with distance)", func(t *testing.T) { + server := httptest.NewServer(&testAnswerHandler{ + t: t, + answer: answersResponse{ + answersInput: answersInput{ + Text: "My name is John", + Question: "What is my name?", + }, + Answer: ptString("John"), + Certainty: ptFloat(0.7), + Distance: ptFloat(0.3), + }, + }) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + res, err := c.Answer(context.Background(), "My name is John", + "What is my name?") + assert.Nil(t, err) + + expectedResult := ent.AnswerResult{ + Text: "My name is John", + Question: "What is my name?", + Answer: ptString("John"), + Certainty: ptFloat(0.7), + Distance: ptFloat(0.6), + } + + assert.Equal(t, expectedResult.Text, res.Text) + assert.Equal(t, expectedResult.Question, res.Question) + assert.Equal(t, expectedResult.Answer, res.Answer) + assert.Equal(t, expectedResult.Certainty, res.Certainty) + assert.InDelta(t, *expectedResult.Distance, *res.Distance, 1e-9) + }) + + t.Run("when the server has a successful answer (with certainty)", func(t *testing.T) { + server := httptest.NewServer(&testAnswerHandler{ + t: t, + answer: answersResponse{ + answersInput: answersInput{ + Text: "My name is John", + Question: "What is my name?", + }, + Answer: ptString("John"), + Certainty: ptFloat(0.7), + }, + }) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + res, err := c.Answer(context.Background(), "My name is John", + "What is my name?") + + assert.Nil(t, err) + assert.Equal(t, &ent.AnswerResult{ + Text: "My name is John", + Question: "What is my name?", + Answer: ptString("John"), + Certainty: ptFloat(0.7), + Distance: additional.CertaintyToDistPtr(ptFloat(0.7)), + }, res) + }) + + t.Run("when the server has a an error", func(t *testing.T) { + server := httptest.NewServer(&testAnswerHandler{ + t: t, + answer: answersResponse{ + Error: "some error from the server", + }, + }) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + _, err := c.Answer(context.Background(), "My name is John", + "What is my name?") + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "some error from the server") + }) +} + +type testAnswerHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + answer answersResponse +} + +func (f *testAnswerHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/answers/", r.URL.String()) + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.answer.Error != "" { + w.WriteHeader(500) + } + jsonBytes, _ := json.Marshal(f.answer) + w.Write(jsonBytes) +} + +func ptFloat(in float64) *float64 { + return &in +} + +func ptString(in string) *string { + return &in +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/clients/startup.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/clients/startup.go new file mode 100644 index 0000000000000000000000000000000000000000..1e8df85d685aea7bcfa9ef783e440374026998b1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/clients/startup.go @@ -0,0 +1,68 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "net/http" + "time" + + "github.com/pkg/errors" +) + +func (q *qna) WaitForStartup(initCtx context.Context, + interval time.Duration, +) error { + t := time.NewTicker(interval) + defer t.Stop() + expired := initCtx.Done() + var lastErr error + for { + select { + case <-t.C: + lastErr = q.checkReady(initCtx) + if lastErr == nil { + return nil + } + q.logger. + WithField("action", "qna_remote_wait_for_startup"). + WithError(lastErr).Warnf("qna remote service not ready") + case <-expired: + return errors.Wrapf(lastErr, "init context expired before remote was ready") + } + } +} + +func (q *qna) checkReady(initCtx context.Context) error { + // spawn a new context (derived on the overall context) which is used to + // consider an individual request timed out + requestCtx, cancel := context.WithTimeout(initCtx, 500*time.Millisecond) + defer cancel() + + req, err := http.NewRequestWithContext(requestCtx, http.MethodGet, + q.url("/.well-known/ready"), nil) + if err != nil { + return errors.Wrap(err, "create check ready request") + } + + res, err := q.httpClient.Do(req) + if err != nil { + return errors.Wrap(err, "send check ready request") + } + + defer res.Body.Close() + if res.StatusCode > 299 { + return errors.Errorf("not ready: status %d", res.StatusCode) + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/clients/startup_test.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/clients/startup_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ff65558b1e87f7f143c6c43afdaf422dd3829c34 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/clients/startup_test.go @@ -0,0 +1,98 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestWaitForStartup(t *testing.T) { + t.Run("when the server is immediately ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{t: t}) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + err := c.WaitForStartup(context.Background(), 50*time.Millisecond) + + assert.Nil(t, err) + }) + + t.Run("when the server is down", func(t *testing.T) { + c := New("http://nothing-running-at-this-url", 0, nullLogger()) + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 150*time.Millisecond) + + require.NotNil(t, err, nullLogger()) + assert.Contains(t, err.Error(), "expired before remote was ready") + }) + + t.Run("when the server is alive, but not ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(1 * time.Minute), + }) + c := New(server.URL, 0, nullLogger()) + defer server.Close() + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 50*time.Millisecond) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "expired before remote was ready") + }) + + t.Run("when the server is initially not ready, but then becomes ready", + func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(100 * time.Millisecond), + }) + c := New(server.URL, 0, nullLogger()) + defer server.Close() + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 50*time.Millisecond) + + require.Nil(t, err) + }) +} + +type testReadyHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testReadyHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/.well-known/ready", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.WriteHeader(http.StatusNoContent) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/config.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/config.go new file mode 100644 index 0000000000000000000000000000000000000000..126fba9ea7695827f6dd926986e37de5c8aa2cae --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/config.go @@ -0,0 +1,39 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modqna + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *QnAModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *QnAModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *QnAModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + return nil +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/dependency/dependency.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/dependency/dependency.go new file mode 100644 index 0000000000000000000000000000000000000000..23e9092f6412086a9cd5dc8c7e2ff2516a623a5d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/dependency/dependency.go @@ -0,0 +1,42 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package dependency + +import "github.com/weaviate/weaviate/entities/modulecapabilities" + +type NearTextDependecy struct { + moduleName string + argument modulecapabilities.GraphQLArgument + searcher modulecapabilities.VectorForParams[[]float32] +} + +func New(moduleName string, argument modulecapabilities.GraphQLArgument, + searcher modulecapabilities.VectorForParams[[]float32], +) *NearTextDependecy { + return &NearTextDependecy{moduleName, argument, searcher} +} + +func (d *NearTextDependecy) Argument() string { + return "nearText" +} + +func (d *NearTextDependecy) ModuleName() string { + return d.moduleName +} + +func (d *NearTextDependecy) GraphQLArgument() modulecapabilities.GraphQLArgument { + return d.argument +} + +func (d *NearTextDependecy) VectorSearch() modulecapabilities.VectorForParams[[]float32] { + return d.searcher +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ent/vectorization_result.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ent/vectorization_result.go new file mode 100644 index 0000000000000000000000000000000000000000..d6cff73d8fb4706bf960ee9e1d55cb91058f7d60 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/ent/vectorization_result.go @@ -0,0 +1,20 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type AnswerResult struct { + Text string + Question string + Answer *string + Certainty *float64 + Distance *float64 +} diff --git a/platform/dbops/binaries/weaviate-src/modules/qna-transformers/module.go b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/module.go new file mode 100644 index 0000000000000000000000000000000000000000..4fd06f2975b42e6342b9f6967baf00b5f1e074d6 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/qna-transformers/module.go @@ -0,0 +1,176 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modqna + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + entcfg "github.com/weaviate/weaviate/entities/config" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + qnaadditional "github.com/weaviate/weaviate/modules/qna-transformers/additional" + qnaadditionalanswer "github.com/weaviate/weaviate/modules/qna-transformers/additional/answer" + qnaask "github.com/weaviate/weaviate/modules/qna-transformers/ask" + "github.com/weaviate/weaviate/modules/qna-transformers/clients" + qnaadependency "github.com/weaviate/weaviate/modules/qna-transformers/dependency" + "github.com/weaviate/weaviate/modules/qna-transformers/ent" +) + +const Name = "qna-transformers" + +func New() *QnAModule { + return &QnAModule{} +} + +type QnAModule struct { + qna qnaClient + graphqlProvider modulecapabilities.GraphQLArguments + searcher modulecapabilities.DependencySearcher[[]float32] + additionalPropertiesProvider modulecapabilities.AdditionalProperties + nearTextDependencies []modulecapabilities.Dependency[[]float32] + askTextTransformer modulecapabilities.TextTransform +} + +type qnaClient interface { + Answer(ctx context.Context, + text, question string) (*ent.AnswerResult, error) + MetaInfo() (map[string]interface{}, error) +} + +func (m *QnAModule) Name() string { + return Name +} + +func (m *QnAModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextQnA +} + +func (m *QnAModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + return nil +} + +func (m *QnAModule) InitExtension(modules []modulecapabilities.Module) error { + var textTransformer modulecapabilities.TextTransform + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + textTransformer = arg.TextTransformers()["ask"] + } + } + } + + m.askTextTransformer = textTransformer + + if err := m.initAskProvider(); err != nil { + return errors.Wrap(err, "init ask provider") + } + + return nil +} + +func (m *QnAModule) InitDependency(modules []modulecapabilities.Module) error { + nearTextDependencies := []modulecapabilities.Dependency[[]float32]{} + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + var argument modulecapabilities.GraphQLArgument + var searcher modulecapabilities.VectorForParams[[]float32] + if arg, ok := module.(modulecapabilities.GraphQLArguments); ok { + if arg != nil && arg.Arguments() != nil { + if nearTextArg, ok := arg.Arguments()["nearText"]; ok { + argument = nearTextArg + } + } + } + if arg, ok := module.(modulecapabilities.Searcher[[]float32]); ok { + if arg != nil && arg.VectorSearches() != nil { + if nearTextSearcher, ok := arg.VectorSearches()["nearText"]; ok { + searcher = nearTextSearcher + } + } + } + + if argument.ExtractFunction != nil && searcher != nil { + nearTextDependency := qnaadependency.New(module.Name(), argument, searcher) + nearTextDependencies = append(nearTextDependencies, nearTextDependency) + } + } + if len(nearTextDependencies) == 0 { + return errors.New("nearText dependecy not present") + } + + m.nearTextDependencies = nearTextDependencies + + if err := m.initAskSearcher(); err != nil { + return errors.Wrap(err, "init ask searcher") + } + + return nil +} + +func (m *QnAModule) initAdditional(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + // TODO: proper config management + uri := os.Getenv("QNA_INFERENCE_API") + if uri == "" { + return errors.Errorf("required variable QNA_INFERENCE_API is not set") + } + + waitForStartup := true + if envWaitForStartup := os.Getenv("QNA_WAIT_FOR_STARTUP"); envWaitForStartup != "" { + waitForStartup = entcfg.Enabled(envWaitForStartup) + } + + client := clients.New(uri, timeout, logger) + if waitForStartup { + if err := client.WaitForStartup(ctx, 1*time.Second); err != nil { + return errors.Wrap(err, "init remote vectorizer") + } + } + + m.qna = client + + answerProvider := qnaadditionalanswer.New(m.qna, qnaask.NewParamsHelper()) + m.additionalPropertiesProvider = qnaadditional.New(answerProvider) + + return nil +} + +func (m *QnAModule) MetaInfo() (map[string]interface{}, error) { + return m.qna.MetaInfo() +} + +func (m *QnAModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.AdditionalProperties(New()) + _ = modulecapabilities.MetaProvider(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/config.go b/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/config.go new file mode 100644 index 0000000000000000000000000000000000000000..3375fd90c46bc53ef3ec3b6cc74cb6feef2205f9 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/config.go @@ -0,0 +1,44 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modcentroid + +import ( + "context" + "fmt" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/ref2vec-centroid/config" +) + +func (m *CentroidModule) ClassConfigDefaults() map[string]interface{} { + return config.Default() +} + +func (m *CentroidModule) PropertyConfigDefaults(dataType *schema.DataType) map[string]interface{} { + // no property-specific config for this module + return nil +} + +func (m *CentroidModule) ValidateClass(ctx context.Context, + class *models.Class, classConfig moduletools.ClassConfig, +) error { + err := config.Validate(config.New(classConfig)) + if err != nil { + return fmt.Errorf("validate %q: %w", class.Class, err) + } + return nil +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/config/config.go b/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/config/config.go new file mode 100644 index 0000000000000000000000000000000000000000..d7fd49d2745da2bf1d939fba27c167bb261a62bf --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/config/config.go @@ -0,0 +1,63 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "fmt" + + "github.com/weaviate/weaviate/entities/moduletools" +) + +const ( + MethodMean = "mean" + MethodDefault = MethodMean +) + +const ( + calculationMethodField = "method" + referencePropertiesField = "referenceProperties" +) + +func Default() map[string]interface{} { + return map[string]interface{}{ + calculationMethodField: MethodDefault, + } +} + +type Config struct { + class moduletools.ClassConfig +} + +func New(cfg moduletools.ClassConfig) *Config { + return &Config{class: cfg} +} + +func (c *Config) ReferenceProperties() map[string]struct{} { + refProps := map[string]struct{}{} + props := c.class.Class() + + iRefProps := props[referencePropertiesField].([]interface{}) + for _, iProp := range iRefProps { + refProps[iProp.(string)] = struct{}{} + } + + return refProps +} + +func (c *Config) CalculationMethod() (string, error) { + props := c.class.Class() + calcMethod, ok := props[calculationMethodField].(string) + if !ok { + return "", fmt.Errorf("could not parse calculation methode. Expected a string, got: %v", props[calculationMethodField]) + } + return calcMethod, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/config/validation.go b/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/config/validation.go new file mode 100644 index 0000000000000000000000000000000000000000..71d8d474ecf9fc3ab4e8b677889f943fb5a1d7ca --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/config/validation.go @@ -0,0 +1,55 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "errors" + "fmt" +) + +var errInvalidConfig = errors.New("invalid config") + +func Validate(cfg *Config) error { + // referencePropertiesField is a required field + class := cfg.class.Class() + refProps, ok := class[referencePropertiesField] + if !ok { + return fmt.Errorf("%w: must have at least one value in the %q field", + errInvalidConfig, referencePropertiesField) + } + + propSlice, ok := refProps.([]interface{}) + if !ok { + return fmt.Errorf("%w: expected array for field %q, got %T", + errInvalidConfig, referencePropertiesField, refProps) + } + + if len(propSlice) == 0 { + return fmt.Errorf("%w: must have at least one value in the %q field", + errInvalidConfig, referencePropertiesField) + } + + // all provided property names must be strings + for _, prop := range propSlice { + if _, ok := prop.(string); !ok { + return fmt.Errorf("%w: expected %q to contain strings, found %T: %+v", + errInvalidConfig, referencePropertiesField, prop, refProps) + } + } + + _, err := cfg.CalculationMethod() + if err != nil { + return err + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/config_test.go b/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/config_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ee23eb8983c5c36c901b02ff2bf38529be2850d2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/config_test.go @@ -0,0 +1,114 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modcentroid + +import ( + "context" + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/ref2vec-centroid/config" +) + +func TestConfigDefaults(t *testing.T) { + def := New().ClassConfigDefaults() + cfg := config.New(fakeClassConfig(def)) + + method, err := cfg.CalculationMethod() + + assert.Equal(t, config.MethodDefault, method) + assert.Nil(t, err) +} + +func TestConfigValidator(t *testing.T) { + class := &models.Class{Class: "CentroidClass"} + + tests := []struct { + name string + class *models.Class + classConfig moduletools.ClassConfig + expectedErr error + }{ + { + name: "valid config", + class: class, + classConfig: fakeClassConfig{ + "referenceProperties": []interface{}{"someRef"}, + "method": "mean", + }, + }, + { + name: "invalid config - wrong method", + class: class, + classConfig: fakeClassConfig{ + "referenceProperties": []interface{}{"someRef"}, + "method": []string{"mean"}, + }, + expectedErr: fmt.Errorf("validate %q: could not parse calculation methode. "+ + "Expected a string, got: [mean]", + class.Class), + }, + { + name: "invalid config - required fields omitted", + class: class, + classConfig: fakeClassConfig{}, + expectedErr: fmt.Errorf("validate %q: invalid config: must have at least "+ + "one value in the \"referenceProperties\" field", + class.Class), + }, + { + name: "invalid config - wrong type for referenceProperties", + class: class, + classConfig: fakeClassConfig{ + "referenceProperties": "someRef", + }, + expectedErr: fmt.Errorf("validate %q: invalid config: expected array for "+ + "field \"referenceProperties\", got string", + class.Class), + }, + { + name: "invalid config - empty referenceProperties slice", + class: class, + classConfig: fakeClassConfig{ + "referenceProperties": []interface{}{}, + }, + expectedErr: fmt.Errorf("validate %q: invalid config: must have at least "+ + "one value in the \"referenceProperties\" field", + class.Class), + }, + { + name: "invalid config - non-string value in referenceProperties array", + class: class, + classConfig: fakeClassConfig{ + "referenceProperties": []interface{}{"someRef", 123}, + }, + expectedErr: fmt.Errorf("validate %q: invalid config: expected \"referenceProperties\" "+ + "to contain strings, found int: [someRef 123]", + class.Class), + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + mod := New() + err := mod.ValidateClass(context.Background(), test.class, test.classConfig) + if test.expectedErr != nil { + assert.EqualError(t, err, test.expectedErr.Error()) + } else { + assert.Nil(t, err) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..85dc653e7218b943c25ec6674d4e3f6aeb959080 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/fakes_for_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modcentroid + +import ( + "testing" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +type fakeClassConfig map[string]interface{} + +func (cfg fakeClassConfig) Class() map[string]interface{} { + return cfg +} + +func (cfg fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return cfg +} + +func (cfg fakeClassConfig) Property(string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} + +func newFakeStorageProvider(t *testing.T) *fakeStorageProvider { + dirName := t.TempDir() + return &fakeStorageProvider{dirName} +} + +type fakeStorageProvider struct { + dataPath string +} + +func (sp fakeStorageProvider) Storage(name string) (moduletools.Storage, error) { + return nil, nil +} + +func (sp fakeStorageProvider) DataPath() string { + return sp.dataPath +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/module.go b/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/module.go new file mode 100644 index 0000000000000000000000000000000000000000..a4b84b777ec13c52580871d90e6722af818c4c58 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/module.go @@ -0,0 +1,70 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modcentroid + +import ( + "context" + + "github.com/sirupsen/logrus" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/ref2vec-centroid/vectorizer" +) + +const ( + Name = "ref2vec-centroid" +) + +func New() *CentroidModule { + return &CentroidModule{} +} + +type CentroidModule struct { + logger logrus.FieldLogger +} + +func (m *CentroidModule) Name() string { + return Name +} + +func (m *CentroidModule) Init(ctx context.Context, params moduletools.ModuleInitParams) error { + m.logger = params.GetLogger() + return nil +} + +func (m *CentroidModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Ref2Vec +} + +func (m *CentroidModule) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{}, nil +} + +func (m *CentroidModule) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, + findRefVecsFn modulecapabilities.FindObjectFn, +) ([]float32, error) { + vzr, err := vectorizer.New(cfg, findRefVecsFn) + if err != nil { + return nil, err + } + return vzr.Object(ctx, obj) +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.ReferenceVectorizer[[]float32](New()) + _ = modulecapabilities.MetaProvider(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/module_test.go b/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/module_test.go new file mode 100644 index 0000000000000000000000000000000000000000..47e6525086b095a3fdb3acf7dc956fa752d89841 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/module_test.go @@ -0,0 +1,170 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modcentroid + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/go-openapi/strfmt" + "github.com/google/uuid" + "github.com/prometheus/client_golang/prometheus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + + "github.com/weaviate/weaviate/entities/additional" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/schema/crossref" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/usecases/config" +) + +func TestRef2VecCentroid(t *testing.T) { + ctx, cancel := context.WithTimeout(context.Background(), time.Minute) + defer cancel() + sp := newFakeStorageProvider(t) + logger, _ := test.NewNullLogger() + params := moduletools.NewInitParams(sp, nil, &config.Config{}, logger, prometheus.NewPedanticRegistry()) + + mod := New() + classConfig := fakeClassConfig(mod.ClassConfigDefaults()) + refProp := "someRef" + classConfig["referenceProperties"] = []interface{}{refProp} + + t.Run("Init", func(t *testing.T) { + err := mod.Init(ctx, params) + assert.Nil(t, err) + }) + + t.Run("Type", func(t *testing.T) { + typ := mod.Type() + assert.Equal(t, modulecapabilities.Ref2Vec, typ) + }) + + t.Run("Name", func(t *testing.T) { + name := mod.Name() + assert.Equal(t, Name, name) + }) + + t.Run("MetaInfo", func(t *testing.T) { + meta, err := mod.MetaInfo() + assert.Nil(t, err) + assert.Empty(t, meta) + }) + + t.Run("PropertyConfigDefaults", func(t *testing.T) { + dt := schema.DataType("dataType") + props := mod.PropertyConfigDefaults(&dt) + assert.Nil(t, props) + }) + + t.Run("ValidateClass", func(t *testing.T) { + t.Run("expected success", func(t *testing.T) { + class := &models.Class{} + + err := mod.ValidateClass(ctx, class, classConfig) + assert.Nil(t, err) + }) + + t.Run("expected error", func(t *testing.T) { + class := &models.Class{Class: "InvalidConfigClass"} + cfg := fakeClassConfig{} + + expectedErr := fmt.Sprintf( + "validate %q: invalid config: must have at least one "+ + "value in the \"referenceProperties\" field", + class.Class) + err := mod.ValidateClass(ctx, class, cfg) + assert.EqualError(t, err, expectedErr) + }) + }) + + t.Run("VectorizeObject", func(t *testing.T) { + t.Run("expected success", func(t *testing.T) { + t.Run("one refVec", func(t *testing.T) { + repo := &fakeObjectsRepo{} + ref := crossref.New("localhost", "SomeClass", strfmt.UUID(uuid.NewString())) + obj := &models.Object{Properties: map[string]interface{}{ + refProp: models.MultipleRef{ref.SingleRef()}, + }} + + repo.On("Object", ctx, ref.Class, ref.TargetID). + Return(&search.Result{Vector: []float32{1, 2, 3}}, nil) + + vec, err := mod.VectorizeObject(ctx, obj, classConfig, repo.Object) + assert.Nil(t, err) + expectedVec := models.C11yVector{1, 2, 3} + assert.EqualValues(t, expectedVec, vec) + }) + + t.Run("no refVecs", func(t *testing.T) { + repo := &fakeObjectsRepo{} + ref := crossref.New("localhost", "SomeClass", strfmt.UUID(uuid.NewString())) + obj := &models.Object{Properties: map[string]interface{}{ + refProp: models.MultipleRef{ref.SingleRef()}, + }} + + repo.On("Object", ctx, ref.Class, ref.TargetID). + Return(&search.Result{}, nil) + + _, err := mod.VectorizeObject(ctx, obj, classConfig, repo.Object) + assert.Nil(t, err) + assert.Nil(t, nil, obj.Vector) + }) + }) + + t.Run("expected error", func(t *testing.T) { + t.Run("mismatched refVec lengths", func(t *testing.T) { + repo := &fakeObjectsRepo{} + ref1 := crossref.New("localhost", "SomeClass", strfmt.UUID(uuid.NewString())) + ref2 := crossref.New("localhost", "OtherClass", strfmt.UUID(uuid.NewString())) + obj := &models.Object{Properties: map[string]interface{}{ + refProp: models.MultipleRef{ + ref1.SingleRef(), + ref2.SingleRef(), + }, + }} + expectedErr := fmt.Errorf("calculate vector: calculate mean: " + + "found vectors of different length: 2 and 3") + + repo.On("Object", ctx, ref1.Class, ref1.TargetID). + Return(&search.Result{Vector: []float32{1, 2}}, nil) + repo.On("Object", ctx, ref2.Class, ref2.TargetID). + Return(&search.Result{Vector: []float32{1, 2, 3}}, nil) + + _, err := mod.VectorizeObject(ctx, obj, classConfig, repo.Object) + assert.EqualError(t, err, expectedErr.Error()) + }) + }) + }) +} + +type fakeObjectsRepo struct { + mock.Mock +} + +func (r *fakeObjectsRepo) Object(ctx context.Context, class string, + id strfmt.UUID, props search.SelectProperties, + addl additional.Properties, tenant string, +) (*search.Result, error) { + args := r.Called(ctx, class, id) + if args.Get(0) == nil { + return nil, args.Error(1) + } + return args.Get(0).(*search.Result), args.Error(1) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/vectorizer/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/vectorizer/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..7fe61ce41dbaf1a2b5b6f02a80d09a7e65464943 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/vectorizer/fakes_for_test.go @@ -0,0 +1,68 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/mock" + "github.com/weaviate/weaviate/entities/additional" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/usecases/config" +) + +type fakeClassConfig map[string]interface{} + +func (cfg fakeClassConfig) Class() map[string]interface{} { + return cfg +} + +func (cfg fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return cfg +} + +func (cfg fakeClassConfig) Property(string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} + +type fakeObjectsRepo struct { + mock.Mock +} + +func (r *fakeObjectsRepo) Object(ctx context.Context, class string, + id strfmt.UUID, props search.SelectProperties, + addl additional.Properties, tenant string, +) (*search.Result, error) { + args := r.Called(ctx, class, id, tenant) + if args.Get(0) == nil { + return nil, args.Error(1) + } + return args.Get(0).(*search.Result), args.Error(1) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/vectorizer/method_mean.go b/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/vectorizer/method_mean.go new file mode 100644 index 0000000000000000000000000000000000000000..3d02ffe1ee1ca0560e3408d8743c8c0cf044b88f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/vectorizer/method_mean.go @@ -0,0 +1,41 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import "fmt" + +func calculateMean(refVecs ...[]float32) ([]float32, error) { + if len(refVecs) == 0 || len(refVecs[0]) == 0 { + return nil, nil + } + + targetVecLen := len(refVecs[0]) + meanVec := make([]float32, targetVecLen) + + // TODO: is there a more efficient way of doing this? + for _, vec := range refVecs { + if len(vec) != targetVecLen { + return nil, fmt.Errorf("calculate mean: found vectors of different length: %d and %d", + targetVecLen, len(vec)) + } + + for i, val := range vec { + meanVec[i] += val + } + } + + for i := range meanVec { + meanVec[i] /= float32(len(refVecs)) + } + + return meanVec, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/vectorizer/vectorizer.go b/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/vectorizer/vectorizer.go new file mode 100644 index 0000000000000000000000000000000000000000..ee43b3b2cad917fcb72a8c27e3bd55b31a6b9675 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/vectorizer/vectorizer.go @@ -0,0 +1,145 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "fmt" + + "github.com/go-openapi/strfmt" + "github.com/weaviate/weaviate/entities/additional" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema/crossref" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/modules/ref2vec-centroid/config" +) + +type calcFn func(vecs ...[]float32) ([]float32, error) + +type Vectorizer struct { + config *config.Config + calcFn calcFn + findObjectFn modulecapabilities.FindObjectFn +} + +func New(cfg moduletools.ClassConfig, findFn modulecapabilities.FindObjectFn) (*Vectorizer, error) { + v := &Vectorizer{ + config: config.New(cfg), + findObjectFn: findFn, + } + + method, err := v.config.CalculationMethod() + if err != nil { + return nil, err + } + switch method { + case config.MethodMean: + v.calcFn = calculateMean + default: + v.calcFn = calculateMean + } + + return v, nil +} + +func (v *Vectorizer) Object(ctx context.Context, obj *models.Object) ([]float32, error) { + props := v.config.ReferenceProperties() + + refVecs, err := v.referenceVectorSearch(ctx, obj, props) + if err != nil { + return nil, err + } + + if len(refVecs) == 0 { + obj.Vector = nil + return nil, nil + } + + vec, err := v.calcFn(refVecs...) + if err != nil { + return nil, fmt.Errorf("calculate vector: %w", err) + } + + return vec, nil +} + +func (v *Vectorizer) referenceVectorSearch(ctx context.Context, + obj *models.Object, refProps map[string]struct{}, +) ([][]float32, error) { + var refVecs [][]float32 + props := obj.Properties.(map[string]interface{}) + + // use the ids from parent's beacons to find the referenced objects + beacons := beaconsForVectorization(props, refProps) + for _, beacon := range beacons { + res, err := v.findReferenceObject(ctx, beacon, obj.Tenant) + if err != nil { + return nil, err + } + + // if the ref'd object has a vector, we grab it. + // these will be used to compute the parent's + // vector eventually + if res.Vector != nil { + refVecs = append(refVecs, res.Vector) + } + } + + return refVecs, nil +} + +func (v *Vectorizer) findReferenceObject(ctx context.Context, beacon strfmt.URI, tenant string) (res *search.Result, err error) { + ref, err := crossref.Parse(beacon.String()) + if err != nil { + return nil, fmt.Errorf("parse beacon %q: %w", beacon, err) + } + + res, err = v.findObjectFn(ctx, ref.Class, ref.TargetID, + search.SelectProperties{}, additional.Properties{}, tenant) + if err != nil || res == nil { + if err == nil { + err = fmt.Errorf("not found") + } + err = fmt.Errorf("find object with beacon %q': %w", beacon, err) + } + return +} + +func beaconsForVectorization(allProps map[string]interface{}, + targetRefProps map[string]struct{}, +) []strfmt.URI { + var beacons []strfmt.URI + + // add any refs that were supplied as a part of the parent + // object, like when caller is AddObject/UpdateObject + for prop, val := range allProps { + if _, ok := targetRefProps[prop]; ok { + switch refs := val.(type) { + case []interface{}: + // due to the fix introduced in https://github.com/weaviate/weaviate/pull/2320, + // MultipleRef's can appear as empty []interface{} when no actual refs are provided for + // an object's reference property. + // + // if we encounter []interface{}, assume it indicates an empty ref prop, and skip it. + continue + case models.MultipleRef: + for _, ref := range refs { + beacons = append(beacons, ref.Beacon) + } + } + } + } + + return beacons +} diff --git a/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/vectorizer/vectorizer_test.go b/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/vectorizer/vectorizer_test.go new file mode 100644 index 0000000000000000000000000000000000000000..c707b97fd8677450d0cc18dd62ffc79703fa8eb5 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/ref2vec-centroid/vectorizer/vectorizer_test.go @@ -0,0 +1,203 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "errors" + "reflect" + "testing" + + "github.com/go-openapi/strfmt" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema/crossref" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/modules/ref2vec-centroid/config" +) + +func TestVectorizer_New(t *testing.T) { + repo := &fakeObjectsRepo{} + t.Run("default is set correctly", func(t *testing.T) { + vzr, err := New(fakeClassConfig(config.Default()), repo.Object) + assert.Nil(t, err) + + expected := reflect.ValueOf(calculateMean).Pointer() + received := reflect.ValueOf(vzr.calcFn).Pointer() + + assert.EqualValues(t, expected, received) + }) + + t.Run("default calcFn is used when none provided", func(t *testing.T) { + cfg := fakeClassConfig{"method": ""} + vzr, err := New(cfg, repo.Object) + assert.Nil(t, err) + + expected := reflect.ValueOf(calculateMean).Pointer() + received := reflect.ValueOf(vzr.calcFn).Pointer() + + assert.EqualValues(t, expected, received) + }) +} + +func TestVectorizer_Object(t *testing.T) { + t.Run("calculate with mean", func(t *testing.T) { + type objectSearchResult struct { + res *search.Result + err error + } + + tests := []struct { + name string + objectSearchResults []objectSearchResult + expectedResult []float32 + expectedCalcError error + }{ + { + name: "expected success 1", + objectSearchResults: []objectSearchResult{ + {res: &search.Result{Vector: []float32{2, 4, 6}}}, + {res: &search.Result{Vector: []float32{4, 6, 8}}}, + }, + expectedResult: []float32{3, 5, 7}, + }, + { + name: "expected success 2", + objectSearchResults: []objectSearchResult{ + {res: &search.Result{Vector: []float32{1, 1, 1, 1, 1, 1, 1, 1, 1, 1}}}, + {res: &search.Result{Vector: []float32{2, 2, 2, 2, 2, 2, 2, 2, 2, 2}}}, + {res: &search.Result{Vector: []float32{3, 3, 3, 3, 3, 3, 3, 3, 3, 3}}}, + {res: &search.Result{Vector: []float32{4, 4, 4, 4, 4, 4, 4, 4, 4, 4}}}, + {res: &search.Result{Vector: []float32{5, 5, 5, 5, 5, 5, 5, 5, 5, 5}}}, + {res: &search.Result{Vector: []float32{6, 6, 6, 6, 6, 6, 6, 6, 6, 6}}}, + {res: &search.Result{Vector: []float32{7, 7, 7, 7, 7, 7, 7, 7, 7, 7}}}, + {res: &search.Result{Vector: []float32{8, 8, 8, 8, 8, 8, 8, 8, 8, 8}}}, + {res: &search.Result{Vector: []float32{9, 9, 9, 9, 9, 9, 9, 9, 9, 9}}}, + }, + expectedResult: []float32{5, 5, 5, 5, 5, 5, 5, 5, 5, 5}, + }, + { + name: "expected success 3", + objectSearchResults: []objectSearchResult{{}}, + }, + { + name: "expected success 4", + objectSearchResults: []objectSearchResult{ + {res: &search.Result{Vector: []float32{1, 2, 3, 4, 5, 6, 7, 8, 9}}}, + }, + expectedResult: []float32{1, 2, 3, 4, 5, 6, 7, 8, 9}, + }, + { + name: "expected success 5", + objectSearchResults: []objectSearchResult{ + {res: &search.Result{}}, + }, + expectedResult: nil, + }, + { + name: "expected error - mismatched vector dimensions", + objectSearchResults: []objectSearchResult{ + {res: &search.Result{Vector: []float32{1, 2, 3, 4, 5, 6, 7, 8, 9}}}, + {res: &search.Result{Vector: []float32{1, 2, 3, 4, 5, 6, 7, 8}}}, + }, + expectedCalcError: errors.New( + "calculate vector: calculate mean: found vectors of different length: 9 and 8"), + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + ctx := context.Background() + repo := &fakeObjectsRepo{} + refProps := []interface{}{"toRef"} + cfg := fakeClassConfig{"method": "mean", "referenceProperties": refProps} + + crossRefs := make([]*crossref.Ref, len(test.objectSearchResults)) + modelRefs := make(models.MultipleRef, len(test.objectSearchResults)) + for i, res := range test.objectSearchResults { + crossRef := crossref.New("localhost", "SomeClass", + strfmt.UUID(uuid.NewString())) + crossRefs[i] = crossRef + modelRefs[i] = crossRef.SingleRef() + + repo.On("Object", ctx, crossRef.Class, crossRef.TargetID, ""). + Return(res.res, res.err) + } + + obj := &models.Object{ + Properties: map[string]interface{}{"toRef": modelRefs}, + } + vectorizer, err := New(cfg, repo.Object) + assert.Nil(t, err) + vec, err := vectorizer.Object(ctx, obj) + if test.expectedCalcError != nil { + assert.EqualError(t, err, test.expectedCalcError.Error()) + } else { + assert.EqualValues(t, test.expectedResult, vec) + } + }) + } + }) + + // due to the fix introduced in https://github.com/weaviate/weaviate/pull/2320, + // MultipleRef's can appear as empty []interface{} when no actual refs are provided for + // an object's reference property. + // + // this test asserts that reference properties do not break when they are unmarshalled + // as empty interface{} slices. + t.Run("when rep prop is stored as empty interface{} slice", func(t *testing.T) { + ctx := context.Background() + repo := &fakeObjectsRepo{} + refProps := []interface{}{"toRef"} + cfg := fakeClassConfig{"method": "mean", "referenceProperties": refProps} + + obj := &models.Object{ + Properties: map[string]interface{}{"toRef": []interface{}{}}, + } + + vectorizer, err := New(cfg, repo.Object) + assert.Nil(t, err) + + _, err = vectorizer.Object(ctx, obj) + assert.Nil(t, err) + assert.Nil(t, obj.Vector) + }) +} + +func TestVectorizer_Tenant(t *testing.T) { + objectSearchResults := search.Result{Vector: []float32{}} + ctx := context.Background() + repo := &fakeObjectsRepo{} + refProps := []interface{}{"toRef"} + cfg := fakeClassConfig{"method": "mean", "referenceProperties": refProps} + tenant := "randomTenant" + + crossRef := crossref.New("localhost", "SomeClass", + strfmt.UUID(uuid.NewString())) + modelRefs := models.MultipleRef{crossRef.SingleRef()} + + repo.On("Object", ctx, crossRef.Class, crossRef.TargetID, tenant). + Return(&objectSearchResults, nil) + + obj := &models.Object{ + Properties: map[string]interface{}{"toRef": modelRefs}, + Tenant: tenant, + } + + vectorizer, err := New(cfg, repo.Object) + assert.Nil(t, err) + + _, err = vectorizer.Object(ctx, obj) + assert.Nil(t, err) + assert.Nil(t, obj.Vector) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-cohere/clients/ranker.go b/platform/dbops/binaries/weaviate-src/modules/reranker-cohere/clients/ranker.go new file mode 100644 index 0000000000000000000000000000000000000000..55c8220341839544c0de97ad9613cb7dc4b83d38 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-cohere/clients/ranker.go @@ -0,0 +1,248 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "runtime" + "sync" + "time" + + enterrors "github.com/weaviate/weaviate/entities/errors" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/reranker-cohere/config" + "github.com/weaviate/weaviate/usecases/modulecomponents/ent" +) + +var _NUMCPU = runtime.NumCPU() + +type client struct { + lock sync.RWMutex + apiKey string + host string + path string + httpClient *http.Client + maxDocuments int + logger logrus.FieldLogger +} + +func New(apiKey string, timeout time.Duration, logger logrus.FieldLogger) *client { + return &client{ + apiKey: apiKey, + httpClient: &http.Client{Timeout: timeout}, + host: "https://api.cohere.ai", + path: "/v1/rerank", + maxDocuments: 1000, + logger: logger, + } +} + +func (c *client) Rank(ctx context.Context, query string, documents []string, + cfg moduletools.ClassConfig, +) (*ent.RankResult, error) { + eg := enterrors.NewErrorGroupWrapper(c.logger) + eg.SetLimit(_NUMCPU) + + chunkedDocuments := c.chunkDocuments(documents, c.maxDocuments) + documentScoreResponses := make([][]ent.DocumentScore, len(chunkedDocuments)) + for i := range chunkedDocuments { + i := i // https://golang.org/doc/faq#closures_and_goroutines + eg.Go(func() error { + documentScoreResponse, err := c.performRank(ctx, query, chunkedDocuments[i], cfg) + if err != nil { + return err + } + c.lockGuard(func() { + documentScoreResponses[i] = documentScoreResponse + }) + return nil + }, chunkedDocuments[i]) + } + if err := eg.Wait(); err != nil { + return nil, err + } + + return c.toRankResult(query, documentScoreResponses), nil +} + +func (c *client) lockGuard(mutate func()) { + c.lock.Lock() + defer c.lock.Unlock() + mutate() +} + +func (c *client) performRank(ctx context.Context, query string, documents []string, + cfg moduletools.ClassConfig, +) ([]ent.DocumentScore, error) { + settings := config.NewClassSettings(cfg) + cohereUrl, err := url.JoinPath(c.host, c.path) + if err != nil { + return nil, errors.Wrap(err, "join Cohere API host and path") + } + + input := RankInput{ + Documents: documents, + Query: query, + Model: settings.Model(), + ReturnDocuments: false, + } + + body, err := json.Marshal(input) + if err != nil { + return nil, errors.Wrapf(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", cohereUrl, bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + + apiKey, err := c.getApiKey(ctx) + if err != nil { + return nil, errors.Wrapf(err, "Cohere API Key") + } + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", apiKey)) + req.Header.Add("Content-Type", "application/json") + req.Header.Add("Request-Source", "unspecified:weaviate") + + res, err := c.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + if res.StatusCode != 200 { + var apiError cohereApiError + err = json.Unmarshal(bodyBytes, &apiError) + if err != nil { + return nil, errors.Wrap(err, "unmarshal error from response body") + } + if apiError.Message != "" { + return nil, errors.Errorf("connection to Cohere API failed with status %d: %s", res.StatusCode, apiError.Message) + } + return nil, errors.Errorf("connection to Cohere API failed with status %d", res.StatusCode) + } + + var rankResponse RankResponse + if err := json.Unmarshal(bodyBytes, &rankResponse); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + return c.toDocumentScores(documents, rankResponse.Results), nil +} + +func (c *client) chunkDocuments(documents []string, chunkSize int) [][]string { + var requests [][]string + for i := 0; i < len(documents); i += chunkSize { + end := i + chunkSize + + if end > len(documents) { + end = len(documents) + } + + requests = append(requests, documents[i:end]) + } + + return requests +} + +func (c *client) toDocumentScores(documents []string, results []Result) []ent.DocumentScore { + documentScores := make([]ent.DocumentScore, len(results)) + for _, result := range results { + documentScores[result.Index] = ent.DocumentScore{ + Document: documents[result.Index], + Score: result.RelevanceScore, + } + } + return documentScores +} + +func (c *client) toRankResult(query string, results [][]ent.DocumentScore) *ent.RankResult { + documentScores := []ent.DocumentScore{} + for i := range results { + documentScores = append(documentScores, results[i]...) + } + return &ent.RankResult{ + Query: query, + DocumentScores: documentScores, + } +} + +func (c *client) getApiKey(ctx context.Context) (string, error) { + if len(c.apiKey) > 0 { + return c.apiKey, nil + } + key := "X-Cohere-Api-Key" + + apiKey := ctx.Value(key) + // try getting header from GRPC if not successful + if apiKey == nil { + apiKey = modulecomponents.GetValueFromGRPC(ctx, key) + } + if apiKeyHeader, ok := apiKey.([]string); ok && + len(apiKeyHeader) > 0 && len(apiKeyHeader[0]) > 0 { + return apiKeyHeader[0], nil + } + return "", errors.New("no api key found " + + "neither in request header: X-Cohere-Api-Key " + + "nor in environment variable under COHERE_APIKEY") +} + +type RankInput struct { + Documents []string `json:"documents"` + Query string `json:"query"` + Model string `json:"model"` + ReturnDocuments bool `json:"return_documents"` +} + +type Document struct { + Text string `json:"text"` +} + +type Result struct { + Index int `json:"index"` + RelevanceScore float64 `json:"relevance_score"` + Document Document `json:"document"` +} + +type APIVersion struct { + Version string `json:"version"` +} + +type Meta struct { + APIVersion APIVersion `json:"api_version"` +} + +type RankResponse struct { + ID string `json:"id"` + Results []Result `json:"results"` + Meta Meta `json:"meta"` +} + +type cohereApiError struct { + Message string `json:"message"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-cohere/clients/ranker_meta.go b/platform/dbops/binaries/weaviate-src/modules/reranker-cohere/clients/ranker_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..9318be077e2d94e1905a9e97480189a429919716 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-cohere/clients/ranker_meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (s *client) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Reranker - Cohere", + "documentationHref": "https://txt.cohere.com/rerank/", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-cohere/clients/ranker_test.go b/platform/dbops/binaries/weaviate-src/modules/reranker-cohere/clients/ranker_test.go new file mode 100644 index 0000000000000000000000000000000000000000..03faf5c2e9fa77e1a8b5376baaad30c3673a6926 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-cohere/clients/ranker_test.go @@ -0,0 +1,215 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "sync" + "testing" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/usecases/modulecomponents/ent" +) + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} + +func TestRank(t *testing.T) { + t.Run("when the server has a successful response", func(t *testing.T) { + handler := &testRankHandler{ + t: t, + response: RankResponse{ + Results: []Result{ + { + Index: 0, + RelevanceScore: 0.9, + }, + }, + }, + } + server := httptest.NewServer(handler) + defer server.Close() + + c := New("apiKey", 0, nullLogger()) + c.host = server.URL + + expected := &ent.RankResult{ + DocumentScores: []ent.DocumentScore{ + { + Document: "I work at Apple", + Score: 0.9, + }, + }, + Query: "Where do I work?", + } + + res, err := c.Rank(context.Background(), "Where do I work?", []string{"I work at Apple"}, nil) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when the server has an error", func(t *testing.T) { + handler := &testRankHandler{ + t: t, + response: RankResponse{ + Results: []Result{}, + }, + errorMessage: "some error from the server", + } + server := httptest.NewServer(handler) + defer server.Close() + + c := New("apiKey", 0, nullLogger()) + c.host = server.URL + + _, err := c.Rank(context.Background(), "I work at Apple", []string{"Where do I work?"}, nil) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "some error from the server") + }) + + t.Run("when we send requests in batches", func(t *testing.T) { + handler := &testRankHandler{ + t: t, + batchedResults: [][]Result{ + { + { + Index: 0, + RelevanceScore: 0.99, + }, + { + Index: 1, + RelevanceScore: 0.89, + }, + }, + { + { + Index: 0, + RelevanceScore: 0.19, + }, + { + Index: 1, + RelevanceScore: 0.29, + }, + }, + { + { + Index: 0, + RelevanceScore: 0.79, + }, + { + Index: 1, + RelevanceScore: 0.789, + }, + }, + { + { + Index: 0, + RelevanceScore: 0.0001, + }, + }, + }, + } + server := httptest.NewServer(handler) + defer server.Close() + + c := New("apiKey", 0, nullLogger()) + c.host = server.URL + // this will trigger 4 go routines + c.maxDocuments = 2 + + query := "Where do I work?" + documents := []string{ + "Response 1", "Response 2", "Response 3", "Response 4", + "Response 5", "Response 6", "Response 7", + } + + resp, err := c.Rank(context.Background(), query, documents, nil) + + require.Nil(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.DocumentScores) + for i := range resp.DocumentScores { + assert.Equal(t, documents[i], resp.DocumentScores[i].Document) + if i == 0 { + assert.Equal(t, 0.99, resp.DocumentScores[i].Score) + } + if i == len(documents)-1 { + assert.Equal(t, 0.0001, resp.DocumentScores[i].Score) + } + } + }) +} + +type testRankHandler struct { + lock sync.RWMutex + t *testing.T + response RankResponse + batchedResults [][]Result + errorMessage string +} + +func (f *testRankHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + f.lock.Lock() + defer f.lock.Unlock() + + if f.errorMessage != "" { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(`{"message":"` + f.errorMessage + `"}`)) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var req RankInput + require.Nil(f.t, json.Unmarshal(bodyBytes, &req)) + + containsDocument := func(req RankInput, in string) bool { + for _, doc := range req.Documents { + if doc == in { + return true + } + } + return false + } + + index := 0 + if len(f.batchedResults) > 0 { + if containsDocument(req, "Response 3") { + index = 1 + } + if containsDocument(req, "Response 5") { + index = 2 + } + if containsDocument(req, "Response 7") { + index = 3 + } + f.response.Results = f.batchedResults[index] + } + + outBytes, err := json.Marshal(f.response) + require.Nil(f.t, err) + + w.Write(outBytes) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-cohere/config.go b/platform/dbops/binaries/weaviate-src/modules/reranker-cohere/config.go new file mode 100644 index 0000000000000000000000000000000000000000..db0b074d3d2c80d3012407a4a5388eab537a5f66 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-cohere/config.go @@ -0,0 +1,39 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modrerankercohere + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *ReRankerCohereModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *ReRankerCohereModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *ReRankerCohereModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + return nil +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-cohere/config/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/reranker-cohere/config/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..626c3017ecc7e7544280818c3b16a91990ec3c22 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-cohere/config/class_settings.go @@ -0,0 +1,79 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + modelProperty = "model" +) + +const ( + DefaultCohereModel = "rerank-v3.5" +) + +var availableCohereModels = []string{ + "rerank-v3.5", + "rerank-english-v3.0", + "rerank-multilingual-v3.0", + "rerank-english-v2.0", + "rerank-multilingual-v2.0", +} + +type classSettings struct { + cfg moduletools.ClassConfig + propertyValuesHelper basesettings.PropertyValuesHelper +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, propertyValuesHelper: basesettings.NewPropertyValuesHelper("reranker-cohere")} +} + +func (ic *classSettings) Validate(class *models.Class) error { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + model := ic.getStringProperty(modelProperty, DefaultCohereModel) + if model == nil || !ic.validateModel(*model) { + return errors.Errorf("wrong Cohere model name, available model names are: %v", availableCohereModels) + } + + return nil +} + +func (ic *classSettings) getStringProperty(name string, defaultValue string) *string { + asString := ic.propertyValuesHelper.GetPropertyAsStringWithNotExists(ic.cfg, name, "", defaultValue) + return &asString +} + +func (ic *classSettings) validateModel(model string) bool { + return contains(availableCohereModels, model) +} + +func (ic *classSettings) Model() string { + return *ic.getStringProperty(modelProperty, DefaultCohereModel) +} + +func contains[T comparable](s []T, e T) bool { + for _, v := range s { + if v == e { + return true + } + } + return false +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-cohere/config/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/reranker-cohere/config/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..a7b5ec6351fcfcb5e21cfc5c6af07b91cbd9be14 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-cohere/config/class_settings_test.go @@ -0,0 +1,99 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_classSettings_Validate(t *testing.T) { + tests := []struct { + name string + cfg moduletools.ClassConfig + wantModel string + wantErr error + }{ + { + name: "default settings", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{}, + }, + wantModel: "rerank-v3.5", + }, + { + name: "custom settings", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "rerank-english-v2.0", + }, + }, + wantModel: "rerank-english-v2.0", + }, + { + name: "unsupported model error", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "rerank-french-v2.0", + }, + }, + wantErr: fmt.Errorf("wrong Cohere model name, available model names are: [rerank-v3.5 rerank-english-v3.0 rerank-multilingual-v3.0 rerank-english-v2.0 rerank-multilingual-v2.0]"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.cfg) + if tt.wantErr != nil { + assert.EqualError(t, ic.Validate(nil), tt.wantErr.Error()) + } else { + assert.Equal(t, tt.wantModel, ic.Model()) + } + }) + } +} + +type fakeClassConfig struct { + classConfig map[string]interface{} +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-cohere/module.go b/platform/dbops/binaries/weaviate-src/modules/reranker-cohere/module.go new file mode 100644 index 0000000000000000000000000000000000000000..2cd32253111b969552f83880900323bb735e6663 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-cohere/module.go @@ -0,0 +1,86 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modrerankercohere + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/reranker-cohere/clients" + rerankeradditional "github.com/weaviate/weaviate/usecases/modulecomponents/additional" + "github.com/weaviate/weaviate/usecases/modulecomponents/ent" +) + +const Name = "reranker-cohere" + +func New() *ReRankerCohereModule { + return &ReRankerCohereModule{} +} + +type ReRankerCohereModule struct { + reranker ReRankerCohereClient + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +type ReRankerCohereClient interface { + Rank(ctx context.Context, query string, documents []string, cfg moduletools.ClassConfig) (*ent.RankResult, error) + MetaInfo() (map[string]interface{}, error) +} + +func (m *ReRankerCohereModule) Name() string { + return Name +} + +func (m *ReRankerCohereModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextReranker +} + +func (m *ReRankerCohereModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrap(err, "init cross encoder") + } + + return nil +} + +func (m *ReRankerCohereModule) initAdditional(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("COHERE_APIKEY") + client := clients.New(apiKey, timeout, logger) + m.reranker = client + m.additionalPropertiesProvider = rerankeradditional.NewRankerProvider(m.reranker) + return nil +} + +func (m *ReRankerCohereModule) MetaInfo() (map[string]interface{}, error) { + return m.reranker.MetaInfo() +} + +func (m *ReRankerCohereModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.AdditionalProperties(New()) + _ = modulecapabilities.MetaProvider(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-dummy/clients/ranker.go b/platform/dbops/binaries/weaviate-src/modules/reranker-dummy/clients/ranker.go new file mode 100644 index 0000000000000000000000000000000000000000..c6189ba09b16ec4e0e86f413b8efd4d54bd45097 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-dummy/clients/ranker.go @@ -0,0 +1,46 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/usecases/modulecomponents/ent" +) + +type client struct { + logger logrus.FieldLogger +} + +func New(logger logrus.FieldLogger) *client { + return &client{ + logger: logger, + } +} + +func (c *client) Rank(ctx context.Context, query string, documents []string, + cfg moduletools.ClassConfig, +) (*ent.RankResult, error) { + documentScores := make([]ent.DocumentScore, 0, len(documents)) + for _, doc := range documents { + documentScores = append(documentScores, ent.DocumentScore{ + Document: doc, + Score: float64(len(doc)), + }) + } + + return &ent.RankResult{ + Query: query, DocumentScores: documentScores, + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-dummy/clients/ranker_meta.go b/platform/dbops/binaries/weaviate-src/modules/reranker-dummy/clients/ranker_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..5e2a620fe656b4b56fe576e63ddf92bb6a26212a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-dummy/clients/ranker_meta.go @@ -0,0 +1,18 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (s *client) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Reranker - Dummy", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-dummy/config.go b/platform/dbops/binaries/weaviate-src/modules/reranker-dummy/config.go new file mode 100644 index 0000000000000000000000000000000000000000..952dcb359426b78d97837ac83122459948adeef4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-dummy/config.go @@ -0,0 +1,39 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modrerankerdummy + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *ReRankerDummyModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *ReRankerDummyModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *ReRankerDummyModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + return nil +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-dummy/module.go b/platform/dbops/binaries/weaviate-src/modules/reranker-dummy/module.go new file mode 100644 index 0000000000000000000000000000000000000000..5a201e3d7474322e07eb86a3680bebdd03f0704a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-dummy/module.go @@ -0,0 +1,84 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modrerankerdummy + +import ( + "context" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/reranker-dummy/clients" + rerankeradditional "github.com/weaviate/weaviate/usecases/modulecomponents/additional" + "github.com/weaviate/weaviate/usecases/modulecomponents/ent" +) + +const Name = "reranker-dummy" + +func New() *ReRankerDummyModule { + return &ReRankerDummyModule{} +} + +type ReRankerDummyModule struct { + reranker ReRankerDummyClient + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +type ReRankerDummyClient interface { + Rank(ctx context.Context, query string, documents []string, cfg moduletools.ClassConfig) (*ent.RankResult, error) + MetaInfo() (map[string]interface{}, error) +} + +func (m *ReRankerDummyModule) Name() string { + return Name +} + +func (m *ReRankerDummyModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextReranker +} + +func (m *ReRankerDummyModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrap(err, "init cross encoder") + } + + return nil +} + +func (m *ReRankerDummyModule) initAdditional(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + client := clients.New(logger) + m.reranker = client + m.additionalPropertiesProvider = rerankeradditional.NewRankerProvider(m.reranker) + return nil +} + +func (m *ReRankerDummyModule) MetaInfo() (map[string]interface{}, error) { + return m.reranker.MetaInfo() +} + +func (m *ReRankerDummyModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.AdditionalProperties(New()) + _ = modulecapabilities.MetaProvider(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-jinaai/clients/ranker.go b/platform/dbops/binaries/weaviate-src/modules/reranker-jinaai/clients/ranker.go new file mode 100644 index 0000000000000000000000000000000000000000..d96e22cb1f0effdce436ebc04cd3a31f45d1847f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-jinaai/clients/ranker.go @@ -0,0 +1,248 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "runtime" + "sync" + "time" + + enterrors "github.com/weaviate/weaviate/entities/errors" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/reranker-jinaai/config" + "github.com/weaviate/weaviate/usecases/modulecomponents/ent" +) + +var _NUMCPU = runtime.NumCPU() + +type client struct { + lock sync.RWMutex + apiKey string + host string + path string + httpClient *http.Client + maxDocuments int + logger logrus.FieldLogger +} + +func New(apiKey string, timeout time.Duration, logger logrus.FieldLogger) *client { + return &client{ + apiKey: apiKey, + httpClient: &http.Client{Timeout: timeout}, + host: "https://api.jina.ai", + path: "/v1/rerank", + maxDocuments: 1000, + logger: logger, + } +} + +func (c *client) Rank(ctx context.Context, query string, documents []string, + cfg moduletools.ClassConfig, +) (*ent.RankResult, error) { + eg := enterrors.NewErrorGroupWrapper(c.logger) + eg.SetLimit(_NUMCPU) + + chunkedDocuments := c.chunkDocuments(documents, c.maxDocuments) + documentScoreResponses := make([][]ent.DocumentScore, len(chunkedDocuments)) + for i := range chunkedDocuments { + i := i // https://golang.org/doc/faq#closures_and_goroutines + eg.Go(func() error { + documentScoreResponse, err := c.performRank(ctx, query, chunkedDocuments[i], cfg) + if err != nil { + return err + } + c.lockGuard(func() { + documentScoreResponses[i] = documentScoreResponse + }) + return nil + }, chunkedDocuments[i]) + } + if err := eg.Wait(); err != nil { + return nil, err + } + + return c.toRankResult(query, documentScoreResponses), nil +} + +func (c *client) lockGuard(mutate func()) { + c.lock.Lock() + defer c.lock.Unlock() + mutate() +} + +func (c *client) performRank(ctx context.Context, query string, documents []string, + cfg moduletools.ClassConfig, +) ([]ent.DocumentScore, error) { + settings := config.NewClassSettings(cfg) + jinaaiUrl, err := url.JoinPath(c.host, c.path) + if err != nil { + return nil, errors.Wrap(err, "join Jinaai API host and path") + } + + input := RankInput{ + Documents: documents, + Query: query, + Model: settings.Model(), + } + + body, err := json.Marshal(input) + if err != nil { + return nil, errors.Wrapf(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", jinaaiUrl, bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + + apiKey, err := c.getApiKey(ctx) + if err != nil { + return nil, errors.Wrapf(err, "Jinaai API Key") + } + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", apiKey)) + req.Header.Add("Content-Type", "application/json") + req.Header.Add("Request-Source", "unspecified:weaviate") + + res, err := c.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + if res.StatusCode != 200 { + var apiError jinaaiApiError + err = json.Unmarshal(bodyBytes, &apiError) + if err != nil { + return nil, errors.Wrap(err, "unmarshal error from response body") + } + if apiError.Detail != "" { + return nil, errors.Errorf("connection to Jinaai API failed with status %d: %s", res.StatusCode, apiError.Detail) + } + return nil, errors.Errorf("connection to Jinaai API failed with status %d", res.StatusCode) + } + + var rankResponse RankResponse + if err := json.Unmarshal(bodyBytes, &rankResponse); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + return c.toDocumentScores(documents, rankResponse.Results), nil +} + +func (c *client) chunkDocuments(documents []string, chunkSize int) [][]string { + var requests [][]string + for i := 0; i < len(documents); i += chunkSize { + end := i + chunkSize + + if end > len(documents) { + end = len(documents) + } + + requests = append(requests, documents[i:end]) + } + + return requests +} + +func (c *client) toDocumentScores(documents []string, results []Result) []ent.DocumentScore { + documentScores := make([]ent.DocumentScore, len(results)) + for _, result := range results { + documentScores[result.Index] = ent.DocumentScore{ + Document: documents[result.Index], + Score: result.RelevanceScore, + } + } + return documentScores +} + +func (c *client) toRankResult(query string, results [][]ent.DocumentScore) *ent.RankResult { + documentScores := []ent.DocumentScore{} + for i := range results { + documentScores = append(documentScores, results[i]...) + } + return &ent.RankResult{ + Query: query, + DocumentScores: documentScores, + } +} + +func (c *client) getApiKey(ctx context.Context) (string, error) { + if len(c.apiKey) > 0 { + return c.apiKey, nil + } + key := "X-Jinaai-Api-Key" + + apiKey := ctx.Value(key) + // try getting header from GRPC if not successful + if apiKey == nil { + apiKey = modulecomponents.GetValueFromGRPC(ctx, key) + } + if apiKeyHeader, ok := apiKey.([]string); ok && + len(apiKeyHeader) > 0 && len(apiKeyHeader[0]) > 0 { + return apiKeyHeader[0], nil + } + return "", errors.New("no api key found " + + "neither in request header: X-Jinaai-Api-Key " + + "nor in environment variable under JINAAI_APIKEY") +} + +type RankInput struct { + Documents []string `json:"documents"` + Query string `json:"query"` + Model string `json:"model"` + TopN int `json:"top_n,omitempty"` +} + +type Document struct { + Text string `json:"text"` +} + +type Result struct { + Index int `json:"index"` + RelevanceScore float64 `json:"relevance_score"` + Document Document `json:"document"` +} + +type APIVersion struct { + Version string `json:"version"` +} + +type Usage struct { + TotalTokens int `json:"total_tokens"` + PromptTokens int `json:"prompt_tokens"` +} + +type RankResponse struct { + Results []Result `json:"results"` + Model string `json:"model"` + Usage Usage `json:"usage"` +} + +type jinaaiApiError struct { + Detail string `json:"detail"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-jinaai/clients/ranker_meta.go b/platform/dbops/binaries/weaviate-src/modules/reranker-jinaai/clients/ranker_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..222b55b19c1c17ffcc46602210017636bcf50d93 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-jinaai/clients/ranker_meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (s *client) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Reranker - Jinaai", + "documentationHref": "https://jina.ai/reranker", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-jinaai/clients/ranker_test.go b/platform/dbops/binaries/weaviate-src/modules/reranker-jinaai/clients/ranker_test.go new file mode 100644 index 0000000000000000000000000000000000000000..4a8b034e0b091cf57d1fce40a03740049ae06575 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-jinaai/clients/ranker_test.go @@ -0,0 +1,215 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "sync" + "testing" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/usecases/modulecomponents/ent" +) + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} + +func TestRank(t *testing.T) { + t.Run("when the server has a successful response", func(t *testing.T) { + handler := &testRankHandler{ + t: t, + response: RankResponse{ + Results: []Result{ + { + Index: 0, + RelevanceScore: 0.9, + }, + }, + }, + } + server := httptest.NewServer(handler) + defer server.Close() + + c := New("apiKey", 0, nullLogger()) + c.host = server.URL + + expected := &ent.RankResult{ + DocumentScores: []ent.DocumentScore{ + { + Document: "I work at Apple", + Score: 0.9, + }, + }, + Query: "Where do I work?", + } + + res, err := c.Rank(context.Background(), "Where do I work?", []string{"I work at Apple"}, nil) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when the server has an error", func(t *testing.T) { + handler := &testRankHandler{ + t: t, + response: RankResponse{ + Results: []Result{}, + }, + errorMessage: "some error from the server", + } + server := httptest.NewServer(handler) + defer server.Close() + + c := New("apiKey", 0, nullLogger()) + c.host = server.URL + + _, err := c.Rank(context.Background(), "I work at Apple", []string{"Where do I work?"}, nil) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "some error from the server") + }) + + t.Run("when we send requests in batches", func(t *testing.T) { + handler := &testRankHandler{ + t: t, + batchedResults: [][]Result{ + { + { + Index: 0, + RelevanceScore: 0.99, + }, + { + Index: 1, + RelevanceScore: 0.89, + }, + }, + { + { + Index: 0, + RelevanceScore: 0.19, + }, + { + Index: 1, + RelevanceScore: 0.29, + }, + }, + { + { + Index: 0, + RelevanceScore: 0.79, + }, + { + Index: 1, + RelevanceScore: 0.789, + }, + }, + { + { + Index: 0, + RelevanceScore: 0.0001, + }, + }, + }, + } + server := httptest.NewServer(handler) + defer server.Close() + + c := New("apiKey", 0, nullLogger()) + c.host = server.URL + // this will trigger 4 go routines + c.maxDocuments = 2 + + query := "Where do I work?" + documents := []string{ + "Response 1", "Response 2", "Response 3", "Response 4", + "Response 5", "Response 6", "Response 7", + } + + resp, err := c.Rank(context.Background(), query, documents, nil) + + require.Nil(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.DocumentScores) + for i := range resp.DocumentScores { + assert.Equal(t, documents[i], resp.DocumentScores[i].Document) + if i == 0 { + assert.Equal(t, 0.99, resp.DocumentScores[i].Score) + } + if i == len(documents)-1 { + assert.Equal(t, 0.0001, resp.DocumentScores[i].Score) + } + } + }) +} + +type testRankHandler struct { + lock sync.RWMutex + t *testing.T + response RankResponse + batchedResults [][]Result + errorMessage string +} + +func (f *testRankHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + f.lock.Lock() + defer f.lock.Unlock() + + if f.errorMessage != "" { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(`{"detail":"` + f.errorMessage + `"}`)) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var req RankInput + require.Nil(f.t, json.Unmarshal(bodyBytes, &req)) + + containsDocument := func(req RankInput, in string) bool { + for _, doc := range req.Documents { + if doc == in { + return true + } + } + return false + } + + index := 0 + if len(f.batchedResults) > 0 { + if containsDocument(req, "Response 3") { + index = 1 + } + if containsDocument(req, "Response 5") { + index = 2 + } + if containsDocument(req, "Response 7") { + index = 3 + } + f.response.Results = f.batchedResults[index] + } + + outBytes, err := json.Marshal(f.response) + require.Nil(f.t, err) + + w.Write(outBytes) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-jinaai/config.go b/platform/dbops/binaries/weaviate-src/modules/reranker-jinaai/config.go new file mode 100644 index 0000000000000000000000000000000000000000..dafcea64ebd54f9bfa4030923a76997891107539 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-jinaai/config.go @@ -0,0 +1,39 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modrerankerjinaai + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *ReRankerJinaaiModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *ReRankerJinaaiModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *ReRankerJinaaiModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + return nil +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-jinaai/config/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/reranker-jinaai/config/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..4d959690f4056baa213e4698146d8bac3e6f8066 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-jinaai/config/class_settings.go @@ -0,0 +1,57 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + modelProperty = "model" +) + +const ( + DefaultJinaaiModel = "jina-reranker-v2-base-multilingual" +) + +var availableJinaaiModels = []string{ + "jina-reranker-v2-base-multilingual", + "jina-reranker-v1-base-en", + "jina-reranker-v1-turbo-en", + "jina-reranker-v1-tiny-en", + "jina-colbert-v1-en", +} + +type classSettings struct { + cfg moduletools.ClassConfig + propertyValuesHelper basesettings.PropertyValuesHelper +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, propertyValuesHelper: basesettings.NewPropertyValuesHelper("reranker-jinaai")} +} + +func (ic *classSettings) Model() string { + return ic.propertyValuesHelper.GetPropertyAsStringWithNotExists(ic.cfg, modelProperty, "", DefaultJinaaiModel) +} + +func (ic *classSettings) Validate(class *models.Class) error { + model := ic.Model() + if !basesettings.ValidateSetting(model, availableJinaaiModels) { + return errors.Errorf("wrong Jinaai model name, available model names are: %v", availableJinaaiModels) + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-jinaai/config/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/reranker-jinaai/config/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..bc0506ec2519bfbf9862427e4535b32b482c8471 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-jinaai/config/class_settings_test.go @@ -0,0 +1,99 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_classSettings_Validate(t *testing.T) { + tests := []struct { + name string + cfg moduletools.ClassConfig + wantModel string + wantErr error + }{ + { + name: "default settings", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{}, + }, + wantModel: "jina-reranker-v2-base-multilingual", + }, + { + name: "custom settings", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "jina-reranker-v1-base-en", + }, + }, + wantModel: "jina-reranker-v1-base-en", + }, + { + name: "unsupported model error", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "jina-reranker-v1-base-pl", + }, + }, + wantErr: fmt.Errorf("wrong Jinaai model name, available model names are: [jina-reranker-v2-base-multilingual jina-reranker-v1-base-en jina-reranker-v1-turbo-en jina-reranker-v1-tiny-en jina-colbert-v1-en]"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.cfg) + if tt.wantErr != nil { + assert.EqualError(t, ic.Validate(nil), tt.wantErr.Error()) + } else { + assert.Equal(t, tt.wantModel, ic.Model()) + } + }) + } +} + +type fakeClassConfig struct { + classConfig map[string]interface{} +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-jinaai/module.go b/platform/dbops/binaries/weaviate-src/modules/reranker-jinaai/module.go new file mode 100644 index 0000000000000000000000000000000000000000..0f5fda1e1d00e4a5d21006e560da74acffd5e1de --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-jinaai/module.go @@ -0,0 +1,86 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modrerankerjinaai + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/reranker-jinaai/clients" + rerankeradditional "github.com/weaviate/weaviate/usecases/modulecomponents/additional" + "github.com/weaviate/weaviate/usecases/modulecomponents/ent" +) + +const Name = "reranker-jinaai" + +func New() *ReRankerJinaaiModule { + return &ReRankerJinaaiModule{} +} + +type ReRankerJinaaiModule struct { + reranker ReRankerJinaaiClient + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +type ReRankerJinaaiClient interface { + Rank(ctx context.Context, query string, documents []string, cfg moduletools.ClassConfig) (*ent.RankResult, error) + MetaInfo() (map[string]interface{}, error) +} + +func (m *ReRankerJinaaiModule) Name() string { + return Name +} + +func (m *ReRankerJinaaiModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextReranker +} + +func (m *ReRankerJinaaiModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrap(err, "init cross encoder") + } + + return nil +} + +func (m *ReRankerJinaaiModule) initAdditional(_ context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("JINAAI_APIKEY") + client := clients.New(apiKey, timeout, logger) + m.reranker = client + m.additionalPropertiesProvider = rerankeradditional.NewRankerProvider(m.reranker) + return nil +} + +func (m *ReRankerJinaaiModule) MetaInfo() (map[string]interface{}, error) { + return m.reranker.MetaInfo() +} + +func (m *ReRankerJinaaiModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.AdditionalProperties(New()) + _ = modulecapabilities.MetaProvider(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-nvidia/clients/ranker.go b/platform/dbops/binaries/weaviate-src/modules/reranker-nvidia/clients/ranker.go new file mode 100644 index 0000000000000000000000000000000000000000..3207b092b773d569edf4657421be0a053571acc4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-nvidia/clients/ranker.go @@ -0,0 +1,257 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package client + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "runtime" + "strings" + "sync" + "time" + + enterrors "github.com/weaviate/weaviate/entities/errors" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/reranker-nvidia/config" + "github.com/weaviate/weaviate/usecases/modulecomponents" + "github.com/weaviate/weaviate/usecases/modulecomponents/ent" +) + +var _NUMCPU = runtime.NumCPU() + +type client struct { + lock sync.RWMutex + apiKey string + httpClient *http.Client + maxDocuments int + logger logrus.FieldLogger +} + +func New(apiKey string, timeout time.Duration, logger logrus.FieldLogger) *client { + return &client{ + apiKey: apiKey, + httpClient: &http.Client{Timeout: timeout}, + maxDocuments: 512, + logger: logger, + } +} + +func (c *client) Rank(ctx context.Context, query string, documents []string, + cfg moduletools.ClassConfig, +) (*ent.RankResult, error) { + eg := enterrors.NewErrorGroupWrapper(c.logger) + eg.SetLimit(_NUMCPU) + + chunkedDocuments := c.chunkDocuments(documents, c.maxDocuments) + documentScoreResponses := make([][]ent.DocumentScore, len(chunkedDocuments)) + for i := range chunkedDocuments { + i := i // https://golang.org/doc/faq#closures_and_goroutines + eg.Go(func() error { + documentScoreResponse, err := c.performRank(ctx, query, chunkedDocuments[i], cfg) + if err != nil { + return err + } + c.lockGuard(func() { + documentScoreResponses[i] = documentScoreResponse + }) + return nil + }, chunkedDocuments[i]) + } + if err := eg.Wait(); err != nil { + return nil, err + } + + return c.toRankResult(query, documentScoreResponses), nil +} + +func (c *client) lockGuard(mutate func()) { + c.lock.Lock() + defer c.lock.Unlock() + mutate() +} + +func (c *client) toRankResult(query string, results [][]ent.DocumentScore) *ent.RankResult { + documentScores := []ent.DocumentScore{} + for i := range results { + documentScores = append(documentScores, results[i]...) + } + return &ent.RankResult{ + Query: query, + DocumentScores: documentScores, + } +} + +func (c *client) performRank(ctx context.Context, + query string, documents []string, cfg moduletools.ClassConfig, +) ([]ent.DocumentScore, error) { + settings := config.NewClassSettings(cfg) + body, err := json.Marshal(c.getRankRequest(settings.Model(), query, documents)) + if err != nil { + return nil, errors.Wrapf(err, "marshal body") + } + url := c.getNvidiaUrl(ctx, settings.BaseURL()) + req, err := http.NewRequestWithContext(ctx, "POST", url, + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + apiKey, err := c.getApiKey(ctx) + if err != nil { + return nil, errors.Wrapf(err, "Nvidia API Key") + } + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", apiKey)) + req.Header.Add("Accept", "application/json") + req.Header.Add("Content-Type", "application/json") + + res, err := c.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + if res.StatusCode != 200 { + return nil, c.getResponseError(res.StatusCode, bodyBytes) + } + + var resBody rankResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + return c.toDocumentScores(documents, resBody.Rankings), nil +} + +func (c *client) getRankRequest(model, query string, documents []string) rankRequest { + passages := make([]text, len(documents)) + for i := range documents { + passages[i] = text{Text: documents[i]} + } + return rankRequest{ + Model: model, + Query: text{Text: query}, + Passages: passages, + } +} + +func (c *client) toDocumentScores(documents []string, results []ranking) []ent.DocumentScore { + documentScores := make([]ent.DocumentScore, len(results)) + for _, result := range results { + documentScores[result.Index] = ent.DocumentScore{ + Document: documents[result.Index], + Score: result.Logit, + } + } + return documentScores +} + +func (c *client) chunkDocuments(documents []string, chunkSize int) [][]string { + var requests [][]string + for i := 0; i < len(documents); i += chunkSize { + end := i + chunkSize + + if end > len(documents) { + end = len(documents) + } + + requests = append(requests, documents[i:end]) + } + + return requests +} + +func (c *client) getNvidiaUrl(ctx context.Context, baseURL string) string { + passedBaseURL := baseURL + if headerBaseURL := modulecomponents.GetValueFromContext(ctx, "X-Nvidia-Baseurl"); headerBaseURL != "" { + passedBaseURL = headerBaseURL + } + return fmt.Sprintf("%s/v1/retrieval/nvidia/reranking", passedBaseURL) +} + +func (c *client) getApiKey(ctx context.Context) (string, error) { + if apiKey := modulecomponents.GetValueFromContext(ctx, "X-Nvidia-Api-Key"); apiKey != "" { + return apiKey, nil + } + if c.apiKey != "" { + return c.apiKey, nil + } + return "", errors.New("no api key found " + + "neither in request header: X-Nvidia-Api-Key " + + "nor in environment variable under NVIDIA_APIKEY") +} + +func (c *client) getResponseError(statusCode int, bodyBytes []byte) error { + switch statusCode { + case 402, 403: + var resBody responseError402 + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return fmt.Errorf("connection to NVIDIA API failed with status: %d: unmarshal response body: %w: got: %v", + statusCode, err, string(bodyBytes)) + } + return fmt.Errorf("connection to NVIDIA API failed with status: %d error: %s", statusCode, resBody.Detail) + case 422: + var resBody responseError422 + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return fmt.Errorf("connection to NVIDIA API failed with status: %d: unmarshal response body: %w: got: %v", + statusCode, err, string(bodyBytes)) + } + details := make([]string, len(resBody.Detail)) + for i := range resBody.Detail { + details[i] = resBody.Detail[0].Message + } + return fmt.Errorf("connection to NVIDIA API failed with status: %d error: %s", statusCode, strings.Join(details, " ")) + default: + return fmt.Errorf("connection to NVIDIA API failed with status: %d", statusCode) + } +} + +type rankRequest struct { + Model string `json:"model"` + Query text `json:"query"` + Passages []text `json:"passages"` + Truncate *string `json:"truncate,omitempty"` +} + +type text struct { + Text string `json:"text"` +} + +type rankResponse struct { + Rankings []ranking `json:"rankings"` +} + +type ranking struct { + Index int `json:"index"` + Logit float64 `json:"logit"` +} + +type responseError402 struct { + Detail string `json:"detail"` +} + +type responseError422 struct { + Detail []errorDetail `json:"detail"` +} + +type errorDetail struct { + Message string `json:"msg"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-nvidia/clients/ranker_meta.go b/platform/dbops/binaries/weaviate-src/modules/reranker-nvidia/clients/ranker_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..8ed547ebe69d80c6600ba39fca03be2ac8b49ee0 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-nvidia/clients/ranker_meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package client + +func (s *client) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Reranker - NVIDIA", + "documentationHref": "https://docs.api.nvidia.com/nim/reference/retrieval-apis", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-nvidia/clients/ranker_test.go b/platform/dbops/binaries/weaviate-src/modules/reranker-nvidia/clients/ranker_test.go new file mode 100644 index 0000000000000000000000000000000000000000..5dcdacc25f0af0f3d4cacdec1b43668af7ec8117 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-nvidia/clients/ranker_test.go @@ -0,0 +1,243 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package client + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "sync" + "testing" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/modulecomponents/ent" +) + +func TestGetScore(t *testing.T) { + t.Run("when the server has a successful answer", func(t *testing.T) { + server := httptest.NewServer(&testCrossRankerHandler{ + t: t, + res: rankResponse{ + Rankings: []ranking{ + {Index: 0, Logit: 0.15}, + }, + }, + }) + defer server.Close() + c := New("key", 0, nullLogger()) + + cfg := fakeClassConfig{classConfig: map[string]interface{}{"Model": "large", "baseURL": server.URL}} + res, err := c.Rank(context.Background(), "Where do I work?", []string{"I work at Apple"}, cfg) + + assert.Nil(t, err) + assert.Equal(t, ent.RankResult{ + Query: "Where do I work?", + DocumentScores: []ent.DocumentScore{ + { + Document: "I work at Apple", + Score: 0.15, + }, + }, + }, *res) + }) + + t.Run("when the server has an error", func(t *testing.T) { + server := httptest.NewServer(&testCrossRankerHandler{ + t: t, + error402: &responseError402{ + Detail: "some error from the server", + }, + }) + defer server.Close() + c := New("key", 0, nullLogger()) + + cfg := fakeClassConfig{classConfig: map[string]interface{}{"Model": "large", "baseURL": server.URL}} + _, err := c.Rank(context.Background(), "prop", []string{"I work at Apple"}, cfg) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "some error from the server") + }) + + t.Run("when we send requests in batches", func(t *testing.T) { + server := httptest.NewServer(&testCrossRankerHandler{ + t: t, + batchedResults: []rankResponse{ + { + Rankings: []ranking{ + {Index: 0, Logit: 0.99}, + {Index: 1, Logit: 0.89}, + }, + }, + { + Rankings: []ranking{ + {Index: 0, Logit: 0.299}, + {Index: 1, Logit: 0.289}, + }, + }, + { + Rankings: []ranking{ + {Index: 0, Logit: 0.199}, + {Index: 1, Logit: 0.189}, + }, + }, + { + Rankings: []ranking{ + {Index: 0, Logit: 0.0001}, + }, + }, + }, + }) + defer server.Close() + + c := New("apiKey", 0, nullLogger()) + c.maxDocuments = 2 + + query := "Where do I work?" + documents := []string{ + "Response 1", "Response 2", "Response 3", "Response 4", + "Response 5", "Response 6", "Response 7", + } + + cfg := fakeClassConfig{classConfig: map[string]interface{}{"Model": "large", "baseURL": server.URL}} + resp, err := c.Rank(context.Background(), query, documents, cfg) + + require.Nil(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.DocumentScores) + for i := range resp.DocumentScores { + assert.Equal(t, documents[i], resp.DocumentScores[i].Document) + if i == 0 { + assert.Equal(t, 0.99, resp.DocumentScores[i].Score) + } + if i == len(documents)-1 { + assert.Equal(t, 0.0001, resp.DocumentScores[i].Score) + } + } + }) +} + +type testCrossRankerHandler struct { + lock sync.RWMutex + t *testing.T + res rankResponse + batchedResults []rankResponse + error402 *responseError402 +} + +func (f *testCrossRankerHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + f.lock.Lock() + defer f.lock.Unlock() + + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.error402 != nil { + w.WriteHeader(http.StatusPaymentRequired) + jsonBytes, _ := json.Marshal(f.error402) + w.Write(jsonBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var req rankRequest + require.Nil(f.t, json.Unmarshal(bodyBytes, &req)) + + containsDocument := func(req rankRequest, in string) bool { + for _, doc := range req.Passages { + if doc.Text == in { + return true + } + } + return false + } + + index := 0 + if len(f.batchedResults) > 0 { + if containsDocument(req, "Response 3") { + index = 1 + } + if containsDocument(req, "Response 5") { + index = 2 + } + if containsDocument(req, "Response 7") { + index = 3 + } + f.res = f.batchedResults[index] + } + + jsonBytes, _ := json.Marshal(f.res) + w.Write(jsonBytes) +} + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizePropertyName bool + skippedProperty string + excludedProperty string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-nvidia/config.go b/platform/dbops/binaries/weaviate-src/modules/reranker-nvidia/config.go new file mode 100644 index 0000000000000000000000000000000000000000..80a63c78dc014a0b4ec697b9c520f1fb6b0f16cc --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-nvidia/config.go @@ -0,0 +1,39 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modrerankernvidia + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *RerankerModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *RerankerModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *RerankerModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + return nil +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-nvidia/config/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/reranker-nvidia/config/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..dae48a2c035c66fcf66d77ed491fa31775f68b7b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-nvidia/config/class_settings.go @@ -0,0 +1,44 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + DefaultBaseURL = "https://ai.api.nvidia.com" + DefaultNvidiaModel = "nvidia/rerank-qa-mistral-4b" +) + +type classSettings struct { + cfg moduletools.ClassConfig + propertyValuesHelper basesettings.PropertyValuesHelper +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, propertyValuesHelper: basesettings.NewPropertyValuesHelper("reranker-nvidia")} +} + +func (cs *classSettings) BaseURL() string { + return cs.propertyValuesHelper.GetPropertyAsString(cs.cfg, "baseURL", DefaultBaseURL) +} + +func (cs *classSettings) Model() string { + return cs.propertyValuesHelper.GetPropertyAsString(cs.cfg, "model", DefaultNvidiaModel) +} + +func (ic *classSettings) Validate(class *models.Class) error { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-nvidia/config/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/reranker-nvidia/config/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..1e203f377486f1cb4c2acf95c34fe7a16b558011 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-nvidia/config/class_settings_test.go @@ -0,0 +1,89 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_classSettings_Validate(t *testing.T) { + tests := []struct { + name string + cfg moduletools.ClassConfig + wantModel string + wantErr error + }{ + { + name: "default settings", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{}, + }, + wantModel: "nvidia/rerank-qa-mistral-4b", + }, + { + name: "custom settings", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "custom-model", + }, + }, + wantModel: "custom-model", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.cfg) + if tt.wantErr != nil { + assert.EqualError(t, ic.Validate(nil), tt.wantErr.Error()) + } else { + assert.Equal(t, tt.wantModel, ic.Model()) + } + }) + } +} + +type fakeClassConfig struct { + classConfig map[string]interface{} +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-nvidia/module.go b/platform/dbops/binaries/weaviate-src/modules/reranker-nvidia/module.go new file mode 100644 index 0000000000000000000000000000000000000000..a7f0d5dcff727707f7af8b5e9d7a34233788e326 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-nvidia/module.go @@ -0,0 +1,86 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modrerankernvidia + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + client "github.com/weaviate/weaviate/modules/reranker-nvidia/clients" + additionalprovider "github.com/weaviate/weaviate/usecases/modulecomponents/additional" + "github.com/weaviate/weaviate/usecases/modulecomponents/ent" +) + +const Name = "reranker-nvidia" + +func New() *RerankerModule { + return &RerankerModule{} +} + +type RerankerModule struct { + reranker ReRankerClient + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +type ReRankerClient interface { + Rank(ctx context.Context, query string, documents []string, cfg moduletools.ClassConfig) (*ent.RankResult, error) + MetaInfo() (map[string]interface{}, error) +} + +func (m *RerankerModule) Name() string { + return Name +} + +func (m *RerankerModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextReranker +} + +func (m *RerankerModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrap(err, "init NVIDIA reranker") + } + + return nil +} + +func (m *RerankerModule) initAdditional(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("NVIDIA_APIKEY") + client := client.New(apiKey, timeout, logger) + m.additionalPropertiesProvider = additionalprovider.NewRankerProvider(client) + m.reranker = client + return nil +} + +func (m *RerankerModule) MetaInfo() (map[string]interface{}, error) { + return m.reranker.MetaInfo() +} + +func (m *RerankerModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.AdditionalProperties(New()) + _ = modulecapabilities.MetaProvider(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/clients/ranker.go b/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/clients/ranker.go new file mode 100644 index 0000000000000000000000000000000000000000..d5c2c8050db2becb2210eb57884e5edfe615b68f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/clients/ranker.go @@ -0,0 +1,182 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package client + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "runtime" + "sync" + "time" + + enterrors "github.com/weaviate/weaviate/entities/errors" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/usecases/modulecomponents/ent" +) + +var _NUMCPU = runtime.NumCPU() + +type client struct { + lock sync.RWMutex + origin string + httpClient *http.Client + maxDocuments int + logger logrus.FieldLogger +} + +func New(origin string, timeout time.Duration, logger logrus.FieldLogger) *client { + return &client{ + origin: origin, + httpClient: &http.Client{Timeout: timeout}, + maxDocuments: 32, + logger: logger, + } +} + +func (c *client) Rank(ctx context.Context, + query string, documents []string, cfg moduletools.ClassConfig, +) (*ent.RankResult, error) { + eg := enterrors.NewErrorGroupWrapper(c.logger) + eg.SetLimit(_NUMCPU) + + chunkedDocuments := c.chunkDocuments(documents, c.maxDocuments) + documentScoreResponses := make([][]DocumentScore, len(chunkedDocuments)) + for i := range chunkedDocuments { + i := i // https://golang.org/doc/faq#closures_and_goroutines + eg.Go(func() error { + documentScoreResponse, err := c.performRank(ctx, query, chunkedDocuments[i], cfg) + if err != nil { + return err + } + c.lockGuard(func() { + documentScoreResponses[i] = documentScoreResponse + }) + return nil + }, chunkedDocuments[i]) + } + if err := eg.Wait(); err != nil { + return nil, err + } + + return c.toRankResult(query, documentScoreResponses), nil +} + +func (c *client) lockGuard(mutate func()) { + c.lock.Lock() + defer c.lock.Unlock() + mutate() +} + +func (c *client) toRankResult(query string, scores [][]DocumentScore) *ent.RankResult { + documentScores := []ent.DocumentScore{} + for _, docScores := range scores { + for i := range docScores { + documentScores = append(documentScores, ent.DocumentScore{ + Document: docScores[i].Document, + Score: docScores[i].Score, + }) + } + } + return &ent.RankResult{ + Query: query, + DocumentScores: documentScores, + } +} + +func (c *client) performRank(ctx context.Context, + query string, documents []string, cfg moduletools.ClassConfig, +) ([]DocumentScore, error) { + body, err := json.Marshal(RankInput{ + Query: query, + Documents: documents, + }) + if err != nil { + return nil, errors.Wrapf(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", c.url("/rerank"), + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + + res, err := c.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + var resBody RankResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if res.StatusCode != 200 { + if resBody.Error != "" { + return nil, errors.Errorf("fail with status %d: %s", res.StatusCode, + resBody.Error) + } + return nil, errors.Errorf("fail with status %d", res.StatusCode) + } + + return resBody.Scores, nil +} + +func (c *client) chunkDocuments(documents []string, chunkSize int) [][]string { + var requests [][]string + for i := 0; i < len(documents); i += chunkSize { + end := i + chunkSize + + if end > len(documents) { + end = len(documents) + } + + requests = append(requests, documents[i:end]) + } + + return requests +} + +func (c *client) url(path string) string { + return fmt.Sprintf("%s%s", c.origin, path) +} + +type RankInput struct { + Query string `json:"query"` + Documents []string `json:"documents"` + RankPropertyValue string `json:"property"` +} + +type DocumentScore struct { + Document string `json:"document"` + Score float64 `json:"score"` +} + +type RankResponse struct { + Query string `json:"query"` + Scores []DocumentScore `json:"scores"` + RankPropertyValue string `json:"property"` + Score float64 `json:"score"` + Error string `json:"error"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/clients/ranker_meta.go b/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/clients/ranker_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..fed2b975ed286ec23850856e4c1d7da9f8165b1c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/clients/ranker_meta.go @@ -0,0 +1,45 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package client + +import ( + "context" + "encoding/json" + "io" + "net/http" + + "github.com/pkg/errors" +) + +func (s *client) MetaInfo() (map[string]interface{}, error) { + req, err := http.NewRequestWithContext(context.Background(), "GET", s.url("/meta"), nil) + if err != nil { + return nil, errors.Wrap(err, "create GET meta request") + } + + res, err := s.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send GET meta request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read meta response body") + } + + var resBody map[string]interface{} + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, "unmarshal meta response body") + } + return resBody, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/clients/ranker_meta_test.go b/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/clients/ranker_meta_test.go new file mode 100644 index 0000000000000000000000000000000000000000..a40f3785ff1b440a73d86fe129db18b4d39d7bf3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/clients/ranker_meta_test.go @@ -0,0 +1,156 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package client + +import ( + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestGetMeta(t *testing.T) { + t.Run("when the server is providing meta", func(t *testing.T) { + server := httptest.NewServer(&testMetaHandler{t: t}) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + meta, err := c.MetaInfo() + + assert.Nil(t, err) + assert.NotNil(t, meta) + metaModel := meta["model"] + assert.True(t, metaModel != nil) + model, modelOK := metaModel.(map[string]interface{}) + assert.True(t, modelOK) + assert.True(t, model["_name_or_path"] != nil) + assert.True(t, model["architectures"] != nil) + }) +} + +type testMetaHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testMetaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/meta", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.Write([]byte(f.metaInfo())) +} + +func (f *testMetaHandler) metaInfo() string { + return `{ + "model": { + "_name_or_path": "dbmdz/bert-large-cased-finetuned-conll03-english", + "_num_labels": 9, + "add_cross_attention": false, + "architectures": [ + "BertForTokenClassification" + ], + "attention_probs_dropout_prob": 0.1, + "bad_words_ids": null, + "bos_token_id": null, + "chunk_size_feed_forward": 0, + "decoder_start_token_id": null, + "directionality": "bidi", + "diversity_penalty": 0, + "do_sample": false, + "early_stopping": false, + "encoder_no_repeat_ngram_size": 0, + "eos_token_id": null, + "finetuning_task": null, + "forced_bos_token_id": null, + "forced_eos_token_id": null, + "gradient_checkpointing": false, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 1024, + "id2label": { + "0": "O", + "1": "B-MISC", + "2": "I-MISC", + "3": "B-PER", + "4": "I-PER", + "5": "B-ORG", + "6": "I-ORG", + "7": "B-LOC", + "8": "I-LOC" + }, + "initializer_range": 0.02, + "intermediate_size": 4096, + "is_decoder": false, + "is_encoder_decoder": false, + "label2id": { + "B-LOC": 7, + "B-MISC": 1, + "B-ORG": 5, + "B-PER": 3, + "I-LOC": 8, + "I-MISC": 2, + "I-ORG": 6, + "I-PER": 4, + "O": 0 + }, + "layer_norm_eps": 1e-12, + "length_penalty": 1, + "max_length": 20, + "max_position_embeddings": 512, + "min_length": 0, + "model_type": "bert", + "no_repeat_ngram_size": 0, + "num_attention_heads": 16, + "num_beam_groups": 1, + "num_beams": 1, + "num_hidden_layers": 24, + "num_return_sequences": 1, + "output_attentions": false, + "output_hidden_states": false, + "output_scores": false, + "pad_token_id": 0, + "pooler_fc_size": 768, + "pooler_num_attention_heads": 12, + "pooler_num_fc_layers": 3, + "pooler_size_per_head": 128, + "pooler_type": "first_token_transform", + "position_embedding_type": "absolute", + "prefix": null, + "problem_type": null, + "pruned_heads": {}, + "remove_invalid_values": false, + "repetition_penalty": 1, + "return_dict": true, + "return_dict_in_generate": false, + "sep_token_id": null, + "task_specific_params": null, + "temperature": 1, + "tie_encoder_decoder": false, + "tie_word_embeddings": true, + "tokenizer_class": null, + "top_k": 50, + "top_p": 1, + "torchscript": false, + "transformers_version": "4.6.1", + "type_vocab_size": 2, + "use_bfloat16": false, + "use_cache": true, + "vocab_size": 28996 + } + }` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/clients/ranker_test.go b/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/clients/ranker_test.go new file mode 100644 index 0000000000000000000000000000000000000000..e47d87cff5893ec08716f0b6d3a95e39f0982633 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/clients/ranker_test.go @@ -0,0 +1,203 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package client + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "sync" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/usecases/modulecomponents/ent" +) + +func TestGetScore(t *testing.T) { + t.Run("when the server has a successful answer", func(t *testing.T) { + server := httptest.NewServer(&testCrossRankerHandler{ + t: t, + res: RankResponse{ + Query: "Where do I work?", + Scores: []DocumentScore{ + { + Document: "I work at Apple", + Score: 0.15, + }, + }, + }, + }) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + res, err := c.Rank(context.Background(), "Where do I work?", []string{"I work at Apple"}, nil) + + assert.Nil(t, err) + assert.Equal(t, ent.RankResult{ + Query: "Where do I work?", + DocumentScores: []ent.DocumentScore{ + { + Document: "I work at Apple", + Score: 0.15, + }, + }, + }, *res) + }) + + t.Run("when the server has an error", func(t *testing.T) { + server := httptest.NewServer(&testCrossRankerHandler{ + t: t, + res: RankResponse{ + Error: "some error from the server", + }, + }) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + _, err := c.Rank(context.Background(), "prop", + []string{"I work at Apple"}, nil) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "some error from the server") + }) + + t.Run("when we send requests in batches", func(t *testing.T) { + server := httptest.NewServer(&testCrossRankerHandler{ + t: t, + res: RankResponse{ + Query: "Where do I work?", + Scores: []DocumentScore{ + { + Document: "I work at Apple", + Score: 0.15, + }, + }, + }, + batchedResults: [][]DocumentScore{ + { + { + Document: "Response 1", + Score: 0.99, + }, + { + Document: "Response 2", + Score: 0.89, + }, + }, + { + { + Document: "Response 3", + Score: 0.19, + }, + { + Document: "Response 4", + Score: 0.29, + }, + }, + { + { + Document: "Response 5", + Score: 0.79, + }, + { + Document: "Response 6", + Score: 0.789, + }, + }, + { + { + Document: "Response 7", + Score: 0.0001, + }, + }, + }, + }) + defer server.Close() + + c := New(server.URL, 0, nullLogger()) + c.maxDocuments = 2 + + query := "Where do I work?" + documents := []string{ + "Response 1", "Response 2", "Response 3", "Response 4", + "Response 5", "Response 6", "Response 7", + } + + resp, err := c.Rank(context.Background(), query, documents, nil) + + require.Nil(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.DocumentScores) + for i := range resp.DocumentScores { + assert.Equal(t, documents[i], resp.DocumentScores[i].Document) + if i == 0 { + assert.Equal(t, 0.99, resp.DocumentScores[i].Score) + } + if i == len(documents)-1 { + assert.Equal(t, 0.0001, resp.DocumentScores[i].Score) + } + } + }) +} + +type testCrossRankerHandler struct { + lock sync.RWMutex + t *testing.T + res RankResponse + batchedResults [][]DocumentScore +} + +func (f *testCrossRankerHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + f.lock.Lock() + defer f.lock.Unlock() + + assert.Equal(f.t, "/rerank", r.URL.String()) + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.res.Error != "" { + w.WriteHeader(500) + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var req RankInput + require.Nil(f.t, json.Unmarshal(bodyBytes, &req)) + + containsDocument := func(req RankInput, in string) bool { + for _, doc := range req.Documents { + if doc == in { + return true + } + } + return false + } + + index := 0 + if len(f.batchedResults) > 0 { + if containsDocument(req, "Response 3") { + index = 1 + } + if containsDocument(req, "Response 5") { + index = 2 + } + if containsDocument(req, "Response 7") { + index = 3 + } + f.res.Scores = f.batchedResults[index] + } + + jsonBytes, _ := json.Marshal(f.res) + w.Write(jsonBytes) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/clients/startup.go b/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/clients/startup.go new file mode 100644 index 0000000000000000000000000000000000000000..75f23221a29f9cd070b2f4e3f856a20f9622f096 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/clients/startup.go @@ -0,0 +1,68 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package client + +import ( + "context" + "net/http" + "time" + + "github.com/pkg/errors" +) + +func (c *client) WaitForStartup(initCtx context.Context, + interval time.Duration, +) error { + t := time.NewTicker(interval) + defer t.Stop() + expired := initCtx.Done() + var lastErr error + for { + select { + case <-t.C: + lastErr = c.checkReady(initCtx) + if lastErr == nil { + return nil + } + c.logger. + WithField("action", "reranktransformers_remote_wait_for_startup"). + WithError(lastErr).Warnf("reranktransformers remote service not ready") + case <-expired: + return errors.Wrapf(lastErr, "init context expired before remote was ready") + } + } +} + +func (c *client) checkReady(initCtx context.Context) error { + // spawn a new context (derived on the overall context) which is used to + // consider an individual request timed out + requestCtx, cancel := context.WithTimeout(initCtx, 500*time.Millisecond) + defer cancel() + + req, err := http.NewRequestWithContext(requestCtx, http.MethodGet, + c.url("/.well-known/ready"), nil) + if err != nil { + return errors.Wrap(err, "create check ready request") + } + + res, err := c.httpClient.Do(req) + if err != nil { + return errors.Wrap(err, "send check ready request") + } + + defer res.Body.Close() + if res.StatusCode > 299 { + return errors.Errorf("not ready: status %d", res.StatusCode) + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/clients/startup_test.go b/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/clients/startup_test.go new file mode 100644 index 0000000000000000000000000000000000000000..f996c7f6459c9f0f198189321673321328fb6745 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/clients/startup_test.go @@ -0,0 +1,98 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package client + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestWaitForStartup(t *testing.T) { + t.Run("when the server is immediately ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{t: t}) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + err := c.WaitForStartup(context.Background(), 50*time.Millisecond) + + assert.Nil(t, err) + }) + + t.Run("when the server is down", func(t *testing.T) { + c := New("http://nothing-running-at-this-url", 0, nullLogger()) + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 150*time.Millisecond) + + require.NotNil(t, err, nullLogger()) + assert.Contains(t, err.Error(), "expired before remote was ready") + }) + + t.Run("when the server is alive, but not ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(1 * time.Minute), + }) + c := New(server.URL, 0, nullLogger()) + defer server.Close() + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 50*time.Millisecond) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "expired before remote was ready") + }) + + t.Run("when the server is initially not ready, but then becomes ready", + func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(100 * time.Millisecond), + }) + c := New(server.URL, 0, nullLogger()) + defer server.Close() + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 50*time.Millisecond) + + require.Nil(t, err) + }) +} + +type testReadyHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testReadyHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/.well-known/ready", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.WriteHeader(http.StatusNoContent) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/config.go b/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/config.go new file mode 100644 index 0000000000000000000000000000000000000000..76aab9e51e315c6ce1cde159d4d4f241f8171fc1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/config.go @@ -0,0 +1,39 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modrerankertransformers + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *ReRankerModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *ReRankerModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *ReRankerModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + return nil +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/module.go b/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/module.go new file mode 100644 index 0000000000000000000000000000000000000000..823fbfea8e6bd6b80e7a3f93b1cdead24d07446c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-transformers/module.go @@ -0,0 +1,102 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modrerankertransformers + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + entcfg "github.com/weaviate/weaviate/entities/config" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + client "github.com/weaviate/weaviate/modules/reranker-transformers/clients" + additionalprovider "github.com/weaviate/weaviate/usecases/modulecomponents/additional" + "github.com/weaviate/weaviate/usecases/modulecomponents/ent" +) + +const Name = "reranker-transformers" + +func New() *ReRankerModule { + return &ReRankerModule{} +} + +type ReRankerModule struct { + reranker ReRankerClient + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +type ReRankerClient interface { + Rank(ctx context.Context, query string, documents []string, cfg moduletools.ClassConfig) (*ent.RankResult, error) + MetaInfo() (map[string]interface{}, error) +} + +func (m *ReRankerModule) Name() string { + return Name +} + +func (m *ReRankerModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextReranker +} + +func (m *ReRankerModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrap(err, "init re encoder") + } + + return nil +} + +func (m *ReRankerModule) initAdditional(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + uri := os.Getenv("RERANKER_INFERENCE_API") + if uri == "" { + return errors.Errorf("required variable RERANKER_INFERENCE_API is not set") + } + + client := client.New(uri, timeout, logger) + + waitForStartup := true + if envWaitForStartup := os.Getenv("RERANKER_WAIT_FOR_STARTUP"); envWaitForStartup != "" { + waitForStartup = entcfg.Enabled(envWaitForStartup) + } + + m.reranker = client + if waitForStartup { + if err := client.WaitForStartup(ctx, 1*time.Second); err != nil { + return errors.Wrap(err, "init remote sum module") + } + } + + m.additionalPropertiesProvider = additionalprovider.NewRankerProvider(client) + return nil +} + +func (m *ReRankerModule) MetaInfo() (map[string]interface{}, error) { + return m.reranker.MetaInfo() +} + +func (m *ReRankerModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.AdditionalProperties(New()) + _ = modulecapabilities.MetaProvider(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-voyageai/clients/ranker.go b/platform/dbops/binaries/weaviate-src/modules/reranker-voyageai/clients/ranker.go new file mode 100644 index 0000000000000000000000000000000000000000..82569e5f932ddd83040c4f2be716c8cd9bd02a73 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-voyageai/clients/ranker.go @@ -0,0 +1,245 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "runtime" + "sync" + "time" + + enterrors "github.com/weaviate/weaviate/entities/errors" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/reranker-voyageai/config" + "github.com/weaviate/weaviate/usecases/modulecomponents/ent" +) + +var _NUMCPU = runtime.NumCPU() + +type client struct { + lock sync.RWMutex + apiKey string + host string + path string + httpClient *http.Client + maxDocuments int + logger logrus.FieldLogger +} + +func New(apiKey string, timeout time.Duration, logger logrus.FieldLogger) *client { + return &client{ + apiKey: apiKey, + httpClient: &http.Client{Timeout: timeout}, + host: "https://api.voyageai.com/v1", + path: "/rerank", + maxDocuments: 1000, + logger: logger, + } +} + +func (c *client) Rank(ctx context.Context, query string, documents []string, + cfg moduletools.ClassConfig, +) (*ent.RankResult, error) { + eg := enterrors.NewErrorGroupWrapper(c.logger) + eg.SetLimit(_NUMCPU) + + chunkedDocuments := c.chunkDocuments(documents, c.maxDocuments) + documentScoreResponses := make([][]ent.DocumentScore, len(chunkedDocuments)) + for i := range chunkedDocuments { + i := i // https://golang.org/doc/faq#closures_and_goroutines + eg.Go(func() error { + documentScoreResponse, err := c.performRank(ctx, query, chunkedDocuments[i], cfg) + if err != nil { + return err + } + c.lockGuard(func() { + documentScoreResponses[i] = documentScoreResponse + }) + return nil + }, chunkedDocuments[i]) + } + if err := eg.Wait(); err != nil { + return nil, err + } + + return c.toRankResult(query, documentScoreResponses), nil +} + +func (c *client) lockGuard(mutate func()) { + c.lock.Lock() + defer c.lock.Unlock() + mutate() +} + +func (c *client) performRank(ctx context.Context, query string, documents []string, + cfg moduletools.ClassConfig, +) ([]ent.DocumentScore, error) { + settings := config.NewClassSettings(cfg) + voyageAIUrl, err := url.JoinPath(c.host, c.path) + if err != nil { + return nil, errors.Wrap(err, "join VoyageAI API host and path") + } + + input := RankInput{ + Documents: documents, + Query: query, + Model: settings.Model(), + ReturnDocuments: false, + } + + body, err := json.Marshal(input) + if err != nil { + return nil, errors.Wrapf(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", voyageAIUrl, bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + + apiKey, err := c.getApiKey(ctx) + if err != nil { + return nil, errors.Wrapf(err, "VoyageAI API Key") + } + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", apiKey)) + req.Header.Add("Content-Type", "application/json") + req.Header.Add("Request-Source", "unspecified:weaviate") + + res, err := c.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + if res.StatusCode != 200 { + var apiError voyageAiApiError + err = json.Unmarshal(bodyBytes, &apiError) + if err != nil { + return nil, errors.Wrap(err, "unmarshal error from response body") + } + if apiError.Message != "" { + return nil, errors.Errorf("connection to VoyageAI API failed with status %d: %s", res.StatusCode, apiError.Message) + } + return nil, errors.Errorf("connection to VoyageAI API failed with status %d", res.StatusCode) + } + + var rankResponse RankResponse + if err := json.Unmarshal(bodyBytes, &rankResponse); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + return c.toDocumentScores(documents, rankResponse.Data), nil +} + +func (c *client) chunkDocuments(documents []string, chunkSize int) [][]string { + var requests [][]string + for i := 0; i < len(documents); i += chunkSize { + end := i + chunkSize + + if end > len(documents) { + end = len(documents) + } + + requests = append(requests, documents[i:end]) + } + + return requests +} + +func (c *client) toDocumentScores(documents []string, results []Data) []ent.DocumentScore { + documentScores := make([]ent.DocumentScore, len(results)) + for _, result := range results { + documentScores[result.Index] = ent.DocumentScore{ + Document: documents[result.Index], + Score: result.RelevanceScore, + } + } + return documentScores +} + +func (c *client) toRankResult(query string, results [][]ent.DocumentScore) *ent.RankResult { + documentScores := []ent.DocumentScore{} + for i := range results { + documentScores = append(documentScores, results[i]...) + } + return &ent.RankResult{ + Query: query, + DocumentScores: documentScores, + } +} + +func (c *client) getApiKey(ctx context.Context) (string, error) { + if len(c.apiKey) > 0 { + return c.apiKey, nil + } + key := "X-Voyageai-Api-Key" + + apiKey := ctx.Value(key) + // try getting header from GRPC if not successful + if apiKey == nil { + apiKey = modulecomponents.GetValueFromGRPC(ctx, key) + } + if apiKeyHeader, ok := apiKey.([]string); ok && + len(apiKeyHeader) > 0 && len(apiKeyHeader[0]) > 0 { + return apiKeyHeader[0], nil + } + return "", errors.New("no api key found " + + "neither in request header: X-Voyageai-Api-Key " + + "nor in environment variable under VOYAGEAI_APIKEY") +} + +type RankInput struct { + Documents []string `json:"documents"` + Query string `json:"query"` + Model string `json:"model"` + ReturnDocuments bool `json:"return_documents"` + Truncation bool `json:"truncation"` +} + +type Document struct { + Text string `json:"text"` +} + +type Data struct { + Index int `json:"index"` + RelevanceScore float64 `json:"relevance_score"` + Document Document `json:"document"` +} + +type Usage struct { + TotalTokens int `json:"total_tokens"` +} + +type RankResponse struct { + ID string `json:"id"` + Data []Data `json:"data"` + Usage Usage `json:"usage"` +} + +type voyageAiApiError struct { + Message string `json:"message"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-voyageai/clients/ranker_meta.go b/platform/dbops/binaries/weaviate-src/modules/reranker-voyageai/clients/ranker_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..b1e59d6b8467520b825868019bf3318845039166 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-voyageai/clients/ranker_meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (s *client) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Reranker - VoyageAI", + "documentationHref": "https://docs.voyageai.com/reference/reranker-api", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-voyageai/clients/ranker_test.go b/platform/dbops/binaries/weaviate-src/modules/reranker-voyageai/clients/ranker_test.go new file mode 100644 index 0000000000000000000000000000000000000000..09565d3fdc17624f8cede657abf91c90cb26d205 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-voyageai/clients/ranker_test.go @@ -0,0 +1,215 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "sync" + "testing" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/usecases/modulecomponents/ent" +) + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} + +func TestRank(t *testing.T) { + t.Run("when the server has a successful response", func(t *testing.T) { + handler := &testRankHandler{ + t: t, + response: RankResponse{ + Data: []Data{ + { + Index: 0, + RelevanceScore: 0.9, + }, + }, + }, + } + server := httptest.NewServer(handler) + defer server.Close() + + c := New("apiKey", 0, nullLogger()) + c.host = server.URL + + expected := &ent.RankResult{ + DocumentScores: []ent.DocumentScore{ + { + Document: "I work at Apple", + Score: 0.9, + }, + }, + Query: "Where do I work?", + } + + res, err := c.Rank(context.Background(), "Where do I work?", []string{"I work at Apple"}, nil) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when the server has an error", func(t *testing.T) { + handler := &testRankHandler{ + t: t, + response: RankResponse{ + Data: []Data{}, + }, + errorMessage: "some error from the server", + } + server := httptest.NewServer(handler) + defer server.Close() + + c := New("apiKey", 0, nullLogger()) + c.host = server.URL + + _, err := c.Rank(context.Background(), "I work at Apple", []string{"Where do I work?"}, nil) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "some error from the server") + }) + + t.Run("when we send requests in batches", func(t *testing.T) { + handler := &testRankHandler{ + t: t, + batchedResults: [][]Data{ + { + { + Index: 0, + RelevanceScore: 0.99, + }, + { + Index: 1, + RelevanceScore: 0.89, + }, + }, + { + { + Index: 0, + RelevanceScore: 0.19, + }, + { + Index: 1, + RelevanceScore: 0.29, + }, + }, + { + { + Index: 0, + RelevanceScore: 0.79, + }, + { + Index: 1, + RelevanceScore: 0.789, + }, + }, + { + { + Index: 0, + RelevanceScore: 0.0001, + }, + }, + }, + } + server := httptest.NewServer(handler) + defer server.Close() + + c := New("apiKey", 0, nullLogger()) + c.host = server.URL + // this will trigger 4 go routines + c.maxDocuments = 2 + + query := "Where do I work?" + documents := []string{ + "Response 1", "Response 2", "Response 3", "Response 4", + "Response 5", "Response 6", "Response 7", + } + + resp, err := c.Rank(context.Background(), query, documents, nil) + + require.Nil(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.DocumentScores) + for i := range resp.DocumentScores { + assert.Equal(t, documents[i], resp.DocumentScores[i].Document) + if i == 0 { + assert.Equal(t, 0.99, resp.DocumentScores[i].Score) + } + if i == len(documents)-1 { + assert.Equal(t, 0.0001, resp.DocumentScores[i].Score) + } + } + }) +} + +type testRankHandler struct { + lock sync.RWMutex + t *testing.T + response RankResponse + batchedResults [][]Data + errorMessage string +} + +func (f *testRankHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + f.lock.Lock() + defer f.lock.Unlock() + + if f.errorMessage != "" { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(`{"message":"` + f.errorMessage + `"}`)) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var req RankInput + require.Nil(f.t, json.Unmarshal(bodyBytes, &req)) + + containsDocument := func(req RankInput, in string) bool { + for _, doc := range req.Documents { + if doc == in { + return true + } + } + return false + } + + index := 0 + if len(f.batchedResults) > 0 { + if containsDocument(req, "Response 3") { + index = 1 + } + if containsDocument(req, "Response 5") { + index = 2 + } + if containsDocument(req, "Response 7") { + index = 3 + } + f.response.Data = f.batchedResults[index] + } + + outBytes, err := json.Marshal(f.response) + require.Nil(f.t, err) + + w.Write(outBytes) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-voyageai/config.go b/platform/dbops/binaries/weaviate-src/modules/reranker-voyageai/config.go new file mode 100644 index 0000000000000000000000000000000000000000..9ce282837c9dfb8eb3bff40f812386a773434617 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-voyageai/config.go @@ -0,0 +1,39 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modrerankervoyageai + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *ReRankerVoyageAIModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *ReRankerVoyageAIModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *ReRankerVoyageAIModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + return nil +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-voyageai/config/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/reranker-voyageai/config/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..5b826ce25146d663b84ff05d7887856159bc358c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-voyageai/config/class_settings.go @@ -0,0 +1,79 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + modelProperty = "model" +) + +var availableVoyageAIModels = []string{ + "rerank-2", + "rerank-2-lite", + "rerank-lite-1", + "rerank-1", +} + +// note it might not like this -- might want int values for e.g. MaxTokens +var ( + DefaultVoyageAIModel = "rerank-lite-1" +) + +type classSettings struct { + cfg moduletools.ClassConfig + propertyValuesHelper basesettings.PropertyValuesHelper +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, propertyValuesHelper: basesettings.NewPropertyValuesHelper("reranker-voyageai")} +} + +func (ic *classSettings) Validate(class *models.Class) error { + if ic.cfg == nil { + // we would receive a nil-config on cross-class requests, such as Explore{} + return errors.New("empty config") + } + model := ic.getStringProperty(modelProperty, DefaultVoyageAIModel) + if model == nil || !ic.validateModel(*model) { + return errors.Errorf("wrong VoyageAI model name, available model names are: %v", availableVoyageAIModels) + } + + return nil +} + +func (ic *classSettings) getStringProperty(name string, defaultValue string) *string { + asString := ic.propertyValuesHelper.GetPropertyAsStringWithNotExists(ic.cfg, name, "", defaultValue) + return &asString +} + +func (ic *classSettings) validateModel(model string) bool { + return contains(availableVoyageAIModels, model) +} + +func (ic *classSettings) Model() string { + return *ic.getStringProperty(modelProperty, DefaultVoyageAIModel) +} + +func contains[T comparable](s []T, e T) bool { + for _, v := range s { + if v == e { + return true + } + } + return false +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-voyageai/config/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/reranker-voyageai/config/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..979472f590edb488cab22ab44b57626cbfc74cdb --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-voyageai/config/class_settings_test.go @@ -0,0 +1,99 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package config + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_classSettings_Validate(t *testing.T) { + tests := []struct { + name string + cfg moduletools.ClassConfig + wantModel string + wantErr error + }{ + { + name: "default settings", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{}, + }, + wantModel: "rerank-lite-1", + }, + { + name: "custom settings", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "rerank-lite-1", + }, + }, + wantModel: "rerank-lite-1", + }, + { + name: "unsupported model error", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "rerank-large-2", + }, + }, + wantErr: fmt.Errorf("wrong VoyageAI model name, available model names are: [rerank-2 rerank-2-lite rerank-lite-1 rerank-1]"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.cfg) + if tt.wantErr != nil { + assert.EqualError(t, ic.Validate(nil), tt.wantErr.Error()) + } else { + assert.Equal(t, tt.wantModel, ic.Model()) + } + }) + } +} + +type fakeClassConfig struct { + classConfig map[string]interface{} +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + return nil +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/reranker-voyageai/module.go b/platform/dbops/binaries/weaviate-src/modules/reranker-voyageai/module.go new file mode 100644 index 0000000000000000000000000000000000000000..089c1ef08cc0a6a63b6014902fec412e4fb47dad --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/reranker-voyageai/module.go @@ -0,0 +1,86 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modrerankervoyageai + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/reranker-voyageai/clients" + rerankeradditional "github.com/weaviate/weaviate/usecases/modulecomponents/additional" + "github.com/weaviate/weaviate/usecases/modulecomponents/ent" +) + +const Name = "reranker-voyageai" + +func New() *ReRankerVoyageAIModule { + return &ReRankerVoyageAIModule{} +} + +type ReRankerVoyageAIModule struct { + reranker ReRankerVoyageAIClient + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +type ReRankerVoyageAIClient interface { + Rank(ctx context.Context, query string, documents []string, cfg moduletools.ClassConfig) (*ent.RankResult, error) + MetaInfo() (map[string]interface{}, error) +} + +func (m *ReRankerVoyageAIModule) Name() string { + return Name +} + +func (m *ReRankerVoyageAIModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextReranker +} + +func (m *ReRankerVoyageAIModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrap(err, "init cross encoder") + } + + return nil +} + +func (m *ReRankerVoyageAIModule) initAdditional(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("VOYAGEAI_APIKEY") + client := clients.New(apiKey, timeout, logger) + m.reranker = client + m.additionalPropertiesProvider = rerankeradditional.NewRankerProvider(m.reranker) + return nil +} + +func (m *ReRankerVoyageAIModule) MetaInfo() (map[string]interface{}, error) { + return m.reranker.MetaInfo() +} + +func (m *ReRankerVoyageAIModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.AdditionalProperties(New()) + _ = modulecapabilities.MetaProvider(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/models/models.go b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/models/models.go new file mode 100644 index 0000000000000000000000000000000000000000..320a7457ce60830926b157d4e746088ab986a8a7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/models/models.go @@ -0,0 +1,12 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package models diff --git a/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/provider.go b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/provider.go new file mode 100644 index 0000000000000000000000000000000000000000..c5cb151b1b7c6b925678485fd5c95b12d241d22e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/provider.go @@ -0,0 +1,59 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package additional + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + + "github.com/tailor-inc/graphql" + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/search" +) + +type AdditionalProperty interface { + AdditionalPropertyFn(ctx context.Context, + in []search.Result, params interface{}, limit *int, + argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig) ([]search.Result, error) + ExtractAdditionalFn(param []*ast.Argument, class *models.Class) interface{} + AdditionalPropertyDefaultValue() interface{} + AdditionalFieldFn(classname string) *graphql.Field +} + +type GraphQLAdditionalArgumentsProvider struct { + summaryProvider AdditionalProperty +} + +func New(summaryProvider AdditionalProperty) *GraphQLAdditionalArgumentsProvider { + return &GraphQLAdditionalArgumentsProvider{summaryProvider} +} + +func (p *GraphQLAdditionalArgumentsProvider) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + additionalProperties := map[string]modulecapabilities.AdditionalProperty{} + additionalProperties["summary"] = p.getSummary() + return additionalProperties +} + +func (p *GraphQLAdditionalArgumentsProvider) getSummary() modulecapabilities.AdditionalProperty { + return modulecapabilities.AdditionalProperty{ + GraphQLNames: []string{"summary"}, + GraphQLFieldFunction: p.summaryProvider.AdditionalFieldFn, + GraphQLExtractFunction: p.summaryProvider.ExtractAdditionalFn, + SearchFunctions: modulecapabilities.AdditionalSearch{ + ExploreGet: p.summaryProvider.AdditionalPropertyFn, + ExploreList: p.summaryProvider.AdditionalPropertyFn, + }, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary.go b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary.go new file mode 100644 index 0000000000000000000000000000000000000000..73d0d638e426c973763c45f92036b4cd7e0ef0ed --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary.go @@ -0,0 +1,59 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package summary + +import ( + "context" + "errors" + + "github.com/weaviate/weaviate/entities/models" + + "github.com/tailor-inc/graphql" + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/modules/sum-transformers/ent" +) + +type sumClient interface { + GetSummary(ctx context.Context, property, text string) ([]ent.SummaryResult, error) +} + +type SummaryProvider struct { + sum sumClient +} + +func New(sum sumClient) *SummaryProvider { + return &SummaryProvider{sum} +} + +func (p *SummaryProvider) AdditionalPropertyDefaultValue() interface{} { + return &Params{} +} + +func (p *SummaryProvider) ExtractAdditionalFn(param []*ast.Argument, class *models.Class) interface{} { + return p.parseSummaryArguments(param) +} + +func (p *SummaryProvider) AdditionalFieldFn(classname string) *graphql.Field { + return p.additionalSummaryField(classname) +} + +func (p *SummaryProvider) AdditionalPropertyFn(ctx context.Context, + in []search.Result, params interface{}, limit *int, + argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig, +) ([]search.Result, error) { + if parameters, ok := params.(*Params); ok { + return p.findSummary(ctx, in, parameters) + } + return nil, errors.New("wrong parameters") +} diff --git a/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary_graphql_field.go b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary_graphql_field.go new file mode 100644 index 0000000000000000000000000000000000000000..1c52b575038832f18dc71bda6a96581801293e3e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary_graphql_field.go @@ -0,0 +1,37 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package summary + +import ( + "fmt" + + "github.com/tailor-inc/graphql" +) + +func (p *SummaryProvider) additionalSummaryField(classname string) *graphql.Field { + return &graphql.Field{ + Args: graphql.FieldConfigArgument{ + "properties": &graphql.ArgumentConfig{ + Description: "Properties which contains text", + Type: graphql.NewList(graphql.String), + DefaultValue: nil, + }, + }, + Type: graphql.NewList(graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%sAdditionalSummary", classname), + Fields: graphql.Fields{ + "property": &graphql.Field{Type: graphql.String}, + "result": &graphql.Field{Type: graphql.String}, + }, + })), + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary_graphql_field_test.go b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary_graphql_field_test.go new file mode 100644 index 0000000000000000000000000000000000000000..0cd47dc314b6a70abc3c19830729d84ddd6961a5 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary_graphql_field_test.go @@ -0,0 +1,43 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package summary + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/tailor-inc/graphql" +) + +func Test_additionalSummaryField(t *testing.T) { + // given + summaryProvider := &SummaryProvider{} + classname := "Class" + + // when + summary := summaryProvider.additionalSummaryField(classname) + + assert.NotNil(t, summary) + assert.Equal(t, "ClassAdditionalSummary", summary.Type.Name()) + assert.NotNil(t, summary.Type) + summaryObjectList, summaryObjectListOK := summary.Type.(*graphql.List) + assert.True(t, summaryObjectListOK) + summaryObject, summaryObjectOK := summaryObjectList.OfType.(*graphql.Object) + assert.True(t, summaryObjectOK) + assert.Equal(t, 2, len(summaryObject.Fields())) + assert.NotNil(t, summaryObject.Fields()["property"]) + assert.NotNil(t, summaryObject.Fields()["result"]) + + assert.NotNil(t, summary.Args) + assert.Equal(t, 1, len(summary.Args)) + assert.NotNil(t, summary.Args["properties"]) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary_params.go b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary_params.go new file mode 100644 index 0000000000000000000000000000000000000000..aa5be690e4aede84da486835ef91a13775d78125 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary_params.go @@ -0,0 +1,24 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package summary + +type Params struct { + Properties []string +} + +func (n Params) GetProperties() []string { + return n.Properties +} + +func (n Params) GetPropertiesToExtract() []string { + return n.Properties +} diff --git a/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary_params_extractor.go b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary_params_extractor.go new file mode 100644 index 0000000000000000000000000000000000000000..92e2cdb486968411aa62c44166cf652978b20669 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary_params_extractor.go @@ -0,0 +1,40 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package summary + +import ( + "log" + + "github.com/tailor-inc/graphql/language/ast" +) + +func (p *SummaryProvider) parseSummaryArguments(args []*ast.Argument) *Params { + out := &Params{} + + for _, arg := range args { + switch arg.Name.Value { + case "properties": + inp := arg.Value.GetValue().([]ast.Value) + out.Properties = make([]string, len(inp)) + + for i, value := range inp { + out.Properties[i] = value.(*ast.StringValue).Value + } + + default: + // ignore what we don't recognize + log.Printf("Igonore not recognized value: %v", arg.Name.Value) + } + } + + return out +} diff --git a/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary_params_extractor_test.go b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary_params_extractor_test.go new file mode 100644 index 0000000000000000000000000000000000000000..da10bb14622ab25938a3382989205d12496d013b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary_params_extractor_test.go @@ -0,0 +1,83 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package summary + +import ( + "reflect" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/tailor-inc/graphql/language/ast" +) + +func Test_parseSummaryArguments(t *testing.T) { + type args struct { + args []*ast.Argument + } + tests := []struct { + name string + args args + want *Params + }{ + { + name: "Should create with no params", + args: args{}, + want: &Params{}, + }, + { + name: "Should create with all params", + args: args{ + args: []*ast.Argument{ + createListArg("properties", []string{"prop1", "prop2"}), + }, + }, + want: &Params{ + Properties: []string{"prop1", "prop2"}, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + p := &SummaryProvider{} + if got := p.parseSummaryArguments(tt.args.args); !reflect.DeepEqual(got, tt.want) { + t.Errorf("parseSummaryArguments() = %v, want %v", got, tt.want) + } + actual := p.parseSummaryArguments(tt.args.args) + assert.Equal(t, tt.want, actual) + }) + } +} + +func createListArg(name string, valuesIn []string) *ast.Argument { + n := ast.Name{ + Value: name, + } + + valuesAst := make([]ast.Value, len(valuesIn)) + for i, value := range valuesIn { + valuesAst[i] = &ast.StringValue{ + Kind: "Kind", + Value: value, + } + } + vals := ast.ListValue{ + Kind: "Kind", + Values: valuesAst, + } + arg := ast.Argument{ + Name: ast.NewName(&n), + Kind: "Kind", + Value: &vals, + } + a := ast.NewArgument(&arg) + return a +} diff --git a/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary_result.go b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary_result.go new file mode 100644 index 0000000000000000000000000000000000000000..65d9c278ecfce0a618a38f5ea1bf0a7c46dd0655 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary_result.go @@ -0,0 +1,88 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package summary + +import ( + "context" + "errors" + "fmt" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/modules/sum-transformers/ent" +) + +func (p *SummaryProvider) findSummary(ctx context.Context, + in []search.Result, params *Params, +) ([]search.Result, error) { + if len(in) == 0 { + return nil, nil + } else { + if params == nil { + return nil, fmt.Errorf("no params provided") + } + + properties := params.GetProperties() + + // check if user parameter values are valid + if len(properties) == 0 { + return in, errors.New("no properties provided") + } + + for i := range in { // for each result of the general GraphQL Query + ap := in[i].AdditionalProperties + if ap == nil { + ap = models.AdditionalProperties{} + } + + // check if the schema of the GraphQL data object contains the properties and they are text or string values + textProperties := map[string]string{} + schema := in[i].Object().Properties.(map[string]interface{}) + for property, value := range schema { + if p.containsProperty(property, properties) { + if valueString, ok := value.(string); ok && len(valueString) > 0 { + textProperties[property] = valueString + } + } + } + + summaryList := []ent.SummaryResult{} + + // for each text property result, call the SUM function and add to additional result + for property, value := range textProperties { + summary, err := p.sum.GetSummary(ctx, property, value) + if err != nil { + return in, err + } + + summaryList = append(summaryList, summary...) + } + + ap["summary"] = summaryList + + in[i].AdditionalProperties = ap + } + } + return in, nil +} + +func (p *SummaryProvider) containsProperty(property string, properties []string) bool { + if len(properties) == 0 { + return true + } + for i := range properties { + if properties[i] == property { + return true + } + } + return false +} diff --git a/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary_test.go b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary_test.go new file mode 100644 index 0000000000000000000000000000000000000000..86ebb20515d4d02fafeaae232eb46567e7264939 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/additional/summary/summary_test.go @@ -0,0 +1,115 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package summary + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/modules/sum-transformers/ent" +) + +func TestAdditionalAnswerProvider(t *testing.T) { + t.Run("should fail with empty content", func(t *testing.T) { + // given + sumClient := &fakeSUMClient{} + summaryProvider := New(sumClient) + in := []search.Result{ + { + ID: "some-uuid", + }, + } + fakeParams := &Params{} + limit := 1 + argumentModuleParams := map[string]interface{}{} + + // when + out, err := summaryProvider.AdditionalPropertyFn(context.Background(), in, fakeParams, &limit, argumentModuleParams, nil) + + // then + require.NotNil(t, err) + require.NotEmpty(t, out) + assert.Error(t, err, "empty schema content") + }) + + t.Run("should fail with empty params", func(t *testing.T) { + // given + sumClient := &fakeSUMClient{} + summaryProvider := New(sumClient) + in := []search.Result{ + { + ID: "some-uuid", + Schema: map[string]interface{}{ + "content": "content", + }, + }, + } + fakeParams := &Params{} + limit := 1 + argumentModuleParams := map[string]interface{}{} + + // when + out, err := summaryProvider.AdditionalPropertyFn(context.Background(), in, fakeParams, &limit, argumentModuleParams, nil) + + // then + require.NotNil(t, err) + require.NotEmpty(t, out) + assert.Error(t, err, "empty params") + }) + + t.Run("should summarize", func(t *testing.T) { + sumClient := &fakeSUMClient{} + summaryProvider := New(sumClient) + in := []search.Result{ + { + ID: "some-uuid", + Schema: map[string]interface{}{ + "content": "this is the content", + }, + }, + } + fakeParams := &Params{Properties: []string{"content"}} + limit := 1 + argumentModuleParams := map[string]interface{}{} + + // when + out, err := summaryProvider.AdditionalPropertyFn(context.Background(), in, fakeParams, &limit, argumentModuleParams, nil) + // then + require.Nil(t, err) + require.NotEmpty(t, out) + assert.Equal(t, 1, len(in)) + answer, answerOK := in[0].AdditionalProperties["summary"] + assert.True(t, answerOK) + assert.NotNil(t, answer) + answerAdditional, answerAdditionalOK := answer.([]ent.SummaryResult) + assert.True(t, answerAdditionalOK) + assert.Equal(t, "this is the summary", answerAdditional[0].Result) + assert.Equal(t, "content", answerAdditional[0].Property) + }) +} + +type fakeSUMClient struct{} + +func (c *fakeSUMClient) GetSummary(ctx context.Context, property, text string, +) ([]ent.SummaryResult, error) { + return c.getSummary(property), nil +} + +func (c *fakeSUMClient) getSummary(property string) []ent.SummaryResult { + return []ent.SummaryResult{{ + Property: property, + Result: "this is the summary", + }} +} diff --git a/platform/dbops/binaries/weaviate-src/modules/sum-transformers/client/client.go b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/client/client.go new file mode 100644 index 0000000000000000000000000000000000000000..d2423de0b0d032eefbea46d6d27cc616159ab948 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/client/client.go @@ -0,0 +1,107 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package client + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/sum-transformers/ent" +) + +type client struct { + origin string + httpClient *http.Client + logger logrus.FieldLogger +} + +type sumInput struct { + Text string `json:"text"` +} + +type summaryResponse struct { + // Property string `json:"property"` + Result string `json:"result"` +} + +type sumResponse struct { + Error string + sumInput + Summary []summaryResponse `json:"summary"` +} + +func New(origin string, timeout time.Duration, logger logrus.FieldLogger) *client { + return &client{ + origin: origin, + httpClient: &http.Client{ + Timeout: timeout, + }, + logger: logger, + } +} + +func (c *client) GetSummary(ctx context.Context, property, text string, +) ([]ent.SummaryResult, error) { + body, err := json.Marshal(sumInput{ + Text: text, + }) + if err != nil { + return nil, errors.Wrapf(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", c.url("/sum/"), + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + + res, err := c.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + var resBody sumResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if res.StatusCode > 399 { + return nil, errors.Errorf("fail with status %d: %s", res.StatusCode, resBody.Error) + } + + out := make([]ent.SummaryResult, len(resBody.Summary)) + + for i, elem := range resBody.Summary { + out[i].Result = elem.Result + out[i].Property = property + } + + // format resBody to nerResult + return out, nil +} + +func (c *client) url(path string) string { + return fmt.Sprintf("%s%s", c.origin, path) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/sum-transformers/client/startup.go b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/client/startup.go new file mode 100644 index 0000000000000000000000000000000000000000..8941f4f63de62e89cbe042f562417cc1f9c0f63b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/client/startup.go @@ -0,0 +1,68 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package client + +import ( + "context" + "net/http" + "time" + + "github.com/pkg/errors" +) + +func (c *client) WaitForStartup(initCtx context.Context, + interval time.Duration, +) error { + t := time.NewTicker(interval) + defer t.Stop() + expired := initCtx.Done() + var lastErr error + for { + select { + case <-t.C: + lastErr = c.checkReady(initCtx) + if lastErr == nil { + return nil + } + c.logger. + WithField("action", "sum_remote_wait_for_startup"). + WithError(lastErr).Warnf("sum remote service not ready") + case <-expired: + return errors.Wrapf(lastErr, "init context expired before remote was ready") + } + } +} + +func (c *client) checkReady(initCtx context.Context) error { + // spawn a new context (derived on the overall context) which is used to + // consider an individual request timed out + requestCtx, cancel := context.WithTimeout(initCtx, 500*time.Millisecond) + defer cancel() + + req, err := http.NewRequestWithContext(requestCtx, http.MethodGet, + c.url("/.well-known/ready"), nil) + if err != nil { + return errors.Wrap(err, "create check ready request") + } + + res, err := c.httpClient.Do(req) + if err != nil { + return errors.Wrap(err, "send check ready request") + } + + defer res.Body.Close() + if res.StatusCode > 299 { + return errors.Errorf("not ready: status %d", res.StatusCode) + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/sum-transformers/client/startup_test.go b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/client/startup_test.go new file mode 100644 index 0000000000000000000000000000000000000000..f996c7f6459c9f0f198189321673321328fb6745 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/client/startup_test.go @@ -0,0 +1,98 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package client + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestWaitForStartup(t *testing.T) { + t.Run("when the server is immediately ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{t: t}) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + err := c.WaitForStartup(context.Background(), 50*time.Millisecond) + + assert.Nil(t, err) + }) + + t.Run("when the server is down", func(t *testing.T) { + c := New("http://nothing-running-at-this-url", 0, nullLogger()) + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 150*time.Millisecond) + + require.NotNil(t, err, nullLogger()) + assert.Contains(t, err.Error(), "expired before remote was ready") + }) + + t.Run("when the server is alive, but not ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(1 * time.Minute), + }) + c := New(server.URL, 0, nullLogger()) + defer server.Close() + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 50*time.Millisecond) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "expired before remote was ready") + }) + + t.Run("when the server is initially not ready, but then becomes ready", + func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(100 * time.Millisecond), + }) + c := New(server.URL, 0, nullLogger()) + defer server.Close() + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 50*time.Millisecond) + + require.Nil(t, err) + }) +} + +type testReadyHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testReadyHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/.well-known/ready", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.WriteHeader(http.StatusNoContent) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} diff --git a/platform/dbops/binaries/weaviate-src/modules/sum-transformers/client/sum_meta.go b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/client/sum_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..dad745e0e89344174720eb739bded983a41d4222 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/client/sum_meta.go @@ -0,0 +1,45 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package client + +import ( + "context" + "encoding/json" + "io" + "net/http" + + "github.com/pkg/errors" +) + +func (c *client) MetaInfo() (map[string]interface{}, error) { + req, err := http.NewRequestWithContext(context.Background(), "GET", c.url("/meta"), nil) + if err != nil { + return nil, errors.Wrap(err, "create GET meta request") + } + + res, err := c.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send GET meta request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read meta response body") + } + + var resBody map[string]interface{} + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, "unmarshal meta response body") + } + return resBody, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/sum-transformers/client/sum_meta_test.go b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/client/sum_meta_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ca46f8dd532c832bdc00ac53d68c52234108492e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/client/sum_meta_test.go @@ -0,0 +1,156 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package client + +import ( + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestGetMeta(t *testing.T) { + t.Run("when the server is providing meta", func(t *testing.T) { + server := httptest.NewServer(&testMetaHandler{t: t}) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + meta, err := c.MetaInfo() + + assert.Nil(t, err) + assert.NotNil(t, meta) + metaModel := meta["model"] + assert.True(t, metaModel != nil) + model, modelOK := metaModel.(map[string]interface{}) + assert.True(t, modelOK) + assert.True(t, model["_name_or_path"] != nil) + assert.True(t, model["architectures"] != nil) + }) +} + +type testMetaHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testMetaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/meta", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.Write([]byte(f.metaInfo())) +} + +func (f *testMetaHandler) metaInfo() string { + return `{ + "model": { + "_name_or_path": "dbmdz/bert-large-cased-finetuned-conll03-english", + "_num_labels": 9, + "add_cross_attention": false, + "architectures": [ + "BertForTokenClassification" + ], + "attention_probs_dropout_prob": 0.1, + "bad_words_ids": null, + "bos_token_id": null, + "chunk_size_feed_forward": 0, + "decoder_start_token_id": null, + "directionality": "bidi", + "diversity_penalty": 0, + "do_sample": false, + "early_stopping": false, + "encoder_no_repeat_ngram_size": 0, + "eos_token_id": null, + "finetuning_task": null, + "forced_bos_token_id": null, + "forced_eos_token_id": null, + "gradient_checkpointing": false, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 1024, + "id2label": { + "0": "O", + "1": "B-MISC", + "2": "I-MISC", + "3": "B-PER", + "4": "I-PER", + "5": "B-ORG", + "6": "I-ORG", + "7": "B-LOC", + "8": "I-LOC" + }, + "initializer_range": 0.02, + "intermediate_size": 4096, + "is_decoder": false, + "is_encoder_decoder": false, + "label2id": { + "B-LOC": 7, + "B-MISC": 1, + "B-ORG": 5, + "B-PER": 3, + "I-LOC": 8, + "I-MISC": 2, + "I-ORG": 6, + "I-PER": 4, + "O": 0 + }, + "layer_norm_eps": 1e-12, + "length_penalty": 1, + "max_length": 20, + "max_position_embeddings": 512, + "min_length": 0, + "model_type": "bert", + "no_repeat_ngram_size": 0, + "num_attention_heads": 16, + "num_beam_groups": 1, + "num_beams": 1, + "num_hidden_layers": 24, + "num_return_sequences": 1, + "output_attentions": false, + "output_hidden_states": false, + "output_scores": false, + "pad_token_id": 0, + "pooler_fc_size": 768, + "pooler_num_attention_heads": 12, + "pooler_num_fc_layers": 3, + "pooler_size_per_head": 128, + "pooler_type": "first_token_transform", + "position_embedding_type": "absolute", + "prefix": null, + "problem_type": null, + "pruned_heads": {}, + "remove_invalid_values": false, + "repetition_penalty": 1, + "return_dict": true, + "return_dict_in_generate": false, + "sep_token_id": null, + "task_specific_params": null, + "temperature": 1, + "tie_encoder_decoder": false, + "tie_word_embeddings": true, + "tokenizer_class": null, + "top_k": 50, + "top_p": 1, + "torchscript": false, + "transformers_version": "4.6.1", + "type_vocab_size": 2, + "use_bfloat16": false, + "use_cache": true, + "vocab_size": 28996 + } + }` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/sum-transformers/client/sum_test.go b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/client/sum_test.go new file mode 100644 index 0000000000000000000000000000000000000000..3a923a8851ec19f44e8e816cc716fe0f66e04dd1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/client/sum_test.go @@ -0,0 +1,88 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package client + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/modules/sum-transformers/ent" +) + +func TestGetAnswer(t *testing.T) { + t.Run("when the server has a successful answer", func(t *testing.T) { + server := httptest.NewServer(&testSUMHandler{ + t: t, + res: sumResponse{ + sumInput: sumInput{ + Text: "I work at Apple", + }, + Summary: []summaryResponse{ + { + Result: "Apple", + }, + }, + }, + }) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + res, err := c.GetSummary(context.Background(), "prop", + "I work at Apple") + + assert.Nil(t, err) + assert.Equal(t, []ent.SummaryResult{ + { + Result: "Apple", + Property: "prop", + }, + }, res) + }) + + t.Run("when the server has a an error", func(t *testing.T) { + server := httptest.NewServer(&testSUMHandler{ + t: t, + res: sumResponse{ + Error: "some error from the server", + }, + }) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + _, err := c.GetSummary(context.Background(), "prop", + "I work at Apple") + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "some error from the server") + }) +} + +type testSUMHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + res sumResponse +} + +func (f *testSUMHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/sum/", r.URL.String()) + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.res.Error != "" { + w.WriteHeader(500) + } + + jsonBytes, _ := json.Marshal(f.res) + w.Write(jsonBytes) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/sum-transformers/config.go b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/config.go new file mode 100644 index 0000000000000000000000000000000000000000..bc191a73eb37c4edd87adb1074a127a8a86c4957 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/config.go @@ -0,0 +1,38 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modsum + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *SUMModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *SUMModule) PropertyConfigDefaults(dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *SUMModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + return nil +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/sum-transformers/ent/sum_result.go b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/ent/sum_result.go new file mode 100644 index 0000000000000000000000000000000000000000..60d8a20e56c62c2ea45d03b445d10ebceb75554c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/ent/sum_result.go @@ -0,0 +1,21 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type SummaryResult struct { + Property string + Result string +} + +type SumResult struct { + Summary []SummaryResult +} diff --git a/platform/dbops/binaries/weaviate-src/modules/sum-transformers/module.go b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/module.go new file mode 100644 index 0000000000000000000000000000000000000000..0552cdabdf287d7cf76255a88e02a3c721477417 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/sum-transformers/module.go @@ -0,0 +1,104 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modsum + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + entcfg "github.com/weaviate/weaviate/entities/config" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + sumadditional "github.com/weaviate/weaviate/modules/sum-transformers/additional" + sumadditionalsummary "github.com/weaviate/weaviate/modules/sum-transformers/additional/summary" + "github.com/weaviate/weaviate/modules/sum-transformers/client" + "github.com/weaviate/weaviate/modules/sum-transformers/ent" +) + +const Name = "sum-transformers" + +func New() *SUMModule { + return &SUMModule{} +} + +type SUMModule struct { + sum sumClient + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +type sumClient interface { + GetSummary(ctx context.Context, property, text string) ([]ent.SummaryResult, error) + MetaInfo() (map[string]interface{}, error) +} + +func (m *SUMModule) Name() string { + return Name +} + +func (m *SUMModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2TextSummarize +} + +func (m *SUMModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + if err := m.initAdditional(ctx, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()); err != nil { + return errors.Wrap(err, "init additional") + } + return nil +} + +func (m *SUMModule) initAdditional(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + uri := os.Getenv("SUM_INFERENCE_API") + if uri == "" { + return errors.Errorf("required variable SUM_INFERENCE_API is not set") + } + + waitForStartup := true + if envWaitForStartup := os.Getenv("SUM_WAIT_FOR_STARTUP"); envWaitForStartup != "" { + waitForStartup = entcfg.Enabled(envWaitForStartup) + } + + client := client.New(uri, timeout, logger) + if waitForStartup { + if err := client.WaitForStartup(ctx, 1*time.Second); err != nil { + return errors.Wrap(err, "init remote sum module") + } + } + + m.sum = client + + tokenProvider := sumadditionalsummary.New(m.sum) + m.additionalPropertiesProvider = sumadditional.New(tokenProvider) + + return nil +} + +func (m *SUMModule) MetaInfo() (map[string]interface{}, error) { + return m.sum.MetaInfo() +} + +func (m *SUMModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.AdditionalProperties(New()) + _ = modulecapabilities.MetaProvider(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/models/models.go b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/models/models.go new file mode 100644 index 0000000000000000000000000000000000000000..d4d459d431d87b098dcf20da305b70e16068083a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/models/models.go @@ -0,0 +1,28 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package models + +// SpellCheckChange describes the misspellings +type SpellCheckChange struct { + Original string `json:"original,omitempty"` + Corrected string `json:"corrected,omitempty"` +} + +// SpellCheck presents proper text without misspellings +// and the list of words that were misspelled +type SpellCheck struct { + OriginalText string `json:"originalText,omitempty"` + DidYouMean string `json:"didYouMean,omitempty"` + Location string `json:"location,omitempty"` + NumberOfCorrections int `json:"numberOfCorrections,omitempty"` + Changes []SpellCheckChange `json:"changes,omitempty"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/provider.go b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/provider.go new file mode 100644 index 0000000000000000000000000000000000000000..e64b44eae9c7950e05b98306a7975e79a5d4c137 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/provider.go @@ -0,0 +1,59 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package additional + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + + "github.com/tailor-inc/graphql" + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/search" +) + +type AdditionalProperty interface { + AdditionalPropertyFn(ctx context.Context, + in []search.Result, params interface{}, limit *int, + argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig) ([]search.Result, error) + ExtractAdditionalFn(param []*ast.Argument, class *models.Class) interface{} + AdditionalPropertyDefaultValue() interface{} + AdditionalFieldFn(classname string) *graphql.Field +} + +type GraphQLAdditionalArgumentsProvider struct { + spellCheckProvider AdditionalProperty +} + +func New(spellCheckProvider AdditionalProperty) *GraphQLAdditionalArgumentsProvider { + return &GraphQLAdditionalArgumentsProvider{spellCheckProvider} +} + +func (p *GraphQLAdditionalArgumentsProvider) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + additionalProperties := map[string]modulecapabilities.AdditionalProperty{} + additionalProperties["spellCheck"] = p.getSpellCheck() + return additionalProperties +} + +func (p *GraphQLAdditionalArgumentsProvider) getSpellCheck() modulecapabilities.AdditionalProperty { + return modulecapabilities.AdditionalProperty{ + GraphQLNames: []string{"spellCheck"}, + GraphQLFieldFunction: p.spellCheckProvider.AdditionalFieldFn, + GraphQLExtractFunction: p.spellCheckProvider.ExtractAdditionalFn, + SearchFunctions: modulecapabilities.AdditionalSearch{ + ExploreGet: p.spellCheckProvider.AdditionalPropertyFn, + ExploreList: p.spellCheckProvider.AdditionalPropertyFn, + }, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/spellcheck/param_helper.go b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/spellcheck/param_helper.go new file mode 100644 index 0000000000000000000000000000000000000000..9564b3ef0bf5af576f7ed785b3fa30258fdf00b3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/spellcheck/param_helper.go @@ -0,0 +1,82 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package spellcheck + +import ( + "encoding/json" +) + +type paramHelper struct{} + +func newParamHelper() *paramHelper { + return ¶mHelper{} +} + +func (p *paramHelper) getTexts(argumentModuleParams map[string]interface{}) (string, []string, error) { + if argumentModuleParams["nearText"] != nil { + texts, err := p.parseNearText(argumentModuleParams["nearText"]) + return "nearText", texts, err + } + if argumentModuleParams["ask"] != nil { + texts, err := p.parseAsk(argumentModuleParams["ask"]) + return "ask", texts, err + } + return "", []string{}, nil +} + +func (p *paramHelper) toJsonParam(arg interface{}) (map[string]interface{}, error) { + data, err := json.Marshal(arg) + if err != nil { + return nil, err + } + var argument map[string]interface{} + err = json.Unmarshal(data, &argument) + if err != nil { + return nil, err + } + return argument, nil +} + +func (p *paramHelper) parseNearText(arg interface{}) ([]string, error) { + argument, err := p.toJsonParam(arg) + if err != nil { + return nil, err + } + if argument["Values"] != nil { + values, ok := argument["Values"].([]interface{}) + if ok { + texts := []string{} + for _, value := range values { + strVal, ok := value.(string) + if ok { + texts = append(texts, strVal) + } + } + return texts, nil + } + } + return []string{}, nil +} + +func (p *paramHelper) parseAsk(arg interface{}) ([]string, error) { + argument, err := p.toJsonParam(arg) + if err != nil { + return nil, err + } + if argument["Question"] != nil { + question, ok := argument["Question"].(string) + if ok { + return []string{question}, nil + } + } + return []string{}, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/spellcheck/param_helper_test.go b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/spellcheck/param_helper_test.go new file mode 100644 index 0000000000000000000000000000000000000000..28b084b583186000968964a05f5d1fae11d42fae --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/spellcheck/param_helper_test.go @@ -0,0 +1,86 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package spellcheck + +import ( + "reflect" + "testing" +) + +type fakeNearText struct { + Values []string +} + +type fakeAsk struct { + Question string +} + +func Test_paramHelper_getTexts(t *testing.T) { + type args struct { + argumentModuleParams map[string]interface{} + } + tests := []struct { + name string + args args + want string + want1 []string + wantErr bool + }{ + { + name: "should get values from nearText", + args: args{ + argumentModuleParams: map[string]interface{}{ + "nearText": fakeNearText{Values: []string{"a", "b"}}, + }, + }, + want: "nearText", + want1: []string{"a", "b"}, + wantErr: false, + }, + { + name: "should get values from ask", + args: args{ + argumentModuleParams: map[string]interface{}{ + "ask": fakeAsk{Question: "a"}, + }, + }, + want: "ask", + want1: []string{"a"}, + wantErr: false, + }, + { + name: "should be empty", + args: args{ + argumentModuleParams: map[string]interface{}{}, + }, + want: "", + want1: []string{}, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + p := ¶mHelper{} + got, got1, err := p.getTexts(tt.args.argumentModuleParams) + if (err != nil) != tt.wantErr { + t.Errorf("paramHelper.getTexts() error = %v, wantErr %v", err, tt.wantErr) + return + } + if got != tt.want { + t.Errorf("paramHelper.getTexts() got = %v, want %v", got, tt.want) + } + if !reflect.DeepEqual(got1, tt.want1) { + t.Errorf("paramHelper.getTexts() got1 = %v, want %v", got1, tt.want1) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/spellcheck/spellcheck.go b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/spellcheck/spellcheck.go new file mode 100644 index 0000000000000000000000000000000000000000..c5d26671aea91281da414f06f3bf8360b00ad058 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/spellcheck/spellcheck.go @@ -0,0 +1,62 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package spellcheck + +import ( + "context" + "errors" + + "github.com/weaviate/weaviate/entities/models" + + "github.com/tailor-inc/graphql" + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/modules/text-spellcheck/ent" +) + +type Params struct{} + +type spellCheckClient interface { + Check(ctx context.Context, text []string) (*ent.SpellCheckResult, error) +} + +type SpellCheckProvider struct { + spellCheck spellCheckClient + paramHelper *paramHelper +} + +func New(spellCheck spellCheckClient) *SpellCheckProvider { + return &SpellCheckProvider{spellCheck, newParamHelper()} +} + +func (p *SpellCheckProvider) AdditionalPropertyDefaultValue() interface{} { + return &Params{} +} + +func (p *SpellCheckProvider) ExtractAdditionalFn(param []*ast.Argument, class *models.Class) interface{} { + return &Params{} +} + +func (p *SpellCheckProvider) AdditionalFieldFn(classname string) *graphql.Field { + return p.additionalSpellCheckField(classname) +} + +func (p *SpellCheckProvider) AdditionalPropertyFn(ctx context.Context, + in []search.Result, params interface{}, limit *int, + argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig, +) ([]search.Result, error) { + if parameters, ok := params.(*Params); ok { + return p.findSpellCheck(ctx, in, parameters, limit, argumentModuleParams) + } + return nil, errors.New("wrong parameters") +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/spellcheck/spellcheck_graphql_field.go b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/spellcheck/spellcheck_graphql_field.go new file mode 100644 index 0000000000000000000000000000000000000000..87996cab1351ccb12b9248a257bb1115202165a1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/spellcheck/spellcheck_graphql_field.go @@ -0,0 +1,49 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package spellcheck + +import ( + "fmt" + + "github.com/tailor-inc/graphql" +) + +func (p *SpellCheckProvider) additionalSpellCheckField(classname string) *graphql.Field { + return &graphql.Field{ + Type: graphql.NewList(p.additionalSpellCheckObj(classname)), + } +} + +func (p *SpellCheckProvider) additionalSpellCheckObj(classname string) *graphql.Object { + return graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%sAdditionalSpellCheck", classname), + Fields: graphql.Fields{ + "originalText": &graphql.Field{Type: graphql.String}, + "didYouMean": &graphql.Field{Type: graphql.String}, + "location": &graphql.Field{Type: graphql.String}, + "numberOfCorrections": &graphql.Field{Type: graphql.Int}, + "changes": &graphql.Field{ + Type: graphql.NewList(p.additionalSpellCheckChangesObj(classname)), + }, + }, + }) +} + +func (p *SpellCheckProvider) additionalSpellCheckChangesObj(classname string) *graphql.Object { + return graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%sAdditionalSpellCheckChanges", classname), + Fields: graphql.Fields{ + "original": &graphql.Field{Type: graphql.String}, + "corrected": &graphql.Field{Type: graphql.String}, + }, + }) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/spellcheck/spellcheck_graphql_field_test.go b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/spellcheck/spellcheck_graphql_field_test.go new file mode 100644 index 0000000000000000000000000000000000000000..a9014a6073c68c85724c1c2f81fd336ad2484519 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/spellcheck/spellcheck_graphql_field_test.go @@ -0,0 +1,66 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package spellcheck + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/tailor-inc/graphql" +) + +func TestSpellCheckField(t *testing.T) { + t.Run("should generate spellCheck argument properly", func(t *testing.T) { + // given + spellCheckProvider := &SpellCheckProvider{} + classname := "Class" + + // when + spellCheck := spellCheckProvider.additionalSpellCheckField(classname) + + // then + // the built graphQL field needs to support this structure: + // Type: { + // spellCheck: [{ + // "originalText": "What did the monkey do?", + // "didYouMean": "What did the monkey do?"", + // "location": "nearText.concepts[0]", + // "numberOfCorrections": 1, + // "changes": [{ + // "original": "misspelling", + // "didYouMean": "correction" + // }] + // }] + // } + assert.NotNil(t, spellCheck) + assert.Equal(t, "ClassAdditionalSpellCheck", spellCheck.Type.Name()) + assert.NotNil(t, spellCheck.Type) + spellCheckObjectList, spellCheckObjectListOK := spellCheck.Type.(*graphql.List) + assert.True(t, spellCheckObjectListOK) + spellCheckObject, spellCheckObjectOK := spellCheckObjectList.OfType.(*graphql.Object) + assert.True(t, spellCheckObjectOK) + assert.Equal(t, 5, len(spellCheckObject.Fields())) + assert.NotNil(t, spellCheckObject.Fields()["originalText"]) + assert.NotNil(t, spellCheckObject.Fields()["didYouMean"]) + assert.NotNil(t, spellCheckObject.Fields()["location"]) + assert.NotNil(t, spellCheckObject.Fields()["numberOfCorrections"]) + assert.NotNil(t, spellCheckObject.Fields()["changes"]) + changes := spellCheckObject.Fields()["changes"] + spellCheckChangesObjectList, spellCheckChangesObjectListOK := changes.Type.(*graphql.List) + assert.True(t, spellCheckChangesObjectListOK) + spellCheckChangesObject, spellCheckChangesObjectOK := spellCheckChangesObjectList.OfType.(*graphql.Object) + assert.True(t, spellCheckChangesObjectOK) + assert.Equal(t, 2, len(spellCheckChangesObject.Fields())) + assert.NotNil(t, spellCheckChangesObject.Fields()["original"]) + assert.NotNil(t, spellCheckChangesObject.Fields()["corrected"]) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/spellcheck/spellcheck_result.go b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/spellcheck/spellcheck_result.go new file mode 100644 index 0000000000000000000000000000000000000000..ee8b0fc98cc613e8b8710dc64b3161576fdf1fe1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/additional/spellcheck/spellcheck_result.go @@ -0,0 +1,97 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package spellcheck + +import ( + "context" + "errors" + "fmt" + "strings" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/search" + spellcheckmodels "github.com/weaviate/weaviate/modules/text-spellcheck/additional/models" + "github.com/weaviate/weaviate/modules/text-spellcheck/ent" +) + +func (p *SpellCheckProvider) findSpellCheck(ctx context.Context, + in []search.Result, params *Params, limit *int, + argumentModuleParams map[string]interface{}, +) ([]search.Result, error) { + if len(in) > 0 { + name, texts, err := p.paramHelper.getTexts(argumentModuleParams) + if err != nil { + return in, errors.New("cannot get texts") + } + spellCheckAdditionalProperty, err := p.performSpellCheck(ctx, name, texts) + if err != nil { + return in, err + } + for i := range in { + ap := in[i].AdditionalProperties + if ap == nil { + ap = models.AdditionalProperties{} + } + ap["spellCheck"] = spellCheckAdditionalProperty + in[i].AdditionalProperties = ap + } + } + return in, nil +} + +func (p *SpellCheckProvider) performSpellCheck(ctx context.Context, name string, texts []string) ([]*spellcheckmodels.SpellCheck, error) { + if len(texts) == 0 { + return []*spellcheckmodels.SpellCheck{}, nil + } + spellCheckResult, err := p.spellCheck.Check(ctx, texts) + if err != nil { + return nil, err + } + return p.getSpellCheckAdditionalProperty(name, spellCheckResult), nil +} + +func (p *SpellCheckProvider) getSpellCheckAdditionalProperty(name string, spellCheckResult *ent.SpellCheckResult) []*spellcheckmodels.SpellCheck { + spellCheck := []*spellcheckmodels.SpellCheck{} + for i, t := range spellCheckResult.Text { + spellCheck = append(spellCheck, p.getSpellCheckAdditionalPropertyObject(t, p.getSpellCheckLocation(name, i), spellCheckResult)) + } + return spellCheck +} + +func (p *SpellCheckProvider) getSpellCheckLocation(name string, i int) string { + if name == "nearText" { + return fmt.Sprintf("nearText.concepts[%v]", i) + } + return "ask.question" +} + +func (p *SpellCheckProvider) getSpellCheckAdditionalPropertyObject(originalText, location string, spellCheckResult *ent.SpellCheckResult) *spellcheckmodels.SpellCheck { + didYouMean := originalText + changes := []spellcheckmodels.SpellCheckChange{} + for _, change := range spellCheckResult.Changes { + if strings.Contains(strings.ToLower(didYouMean), change.Original) { + didYouMean = strings.ReplaceAll(strings.ToLower(didYouMean), change.Original, change.Correction) + change := spellcheckmodels.SpellCheckChange{ + Original: change.Original, + Corrected: change.Correction, + } + changes = append(changes, change) + } + } + return &spellcheckmodels.SpellCheck{ + OriginalText: originalText, + DidYouMean: didYouMean, + Location: location, + NumberOfCorrections: len(changes), + Changes: changes, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/clients/spellcheck.go b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/clients/spellcheck.go new file mode 100644 index 0000000000000000000000000000000000000000..6aa03ee8baef2128157ed53f612703a9d645c230 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/clients/spellcheck.go @@ -0,0 +1,114 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/text-spellcheck/ent" +) + +type spellCheckInput struct { + Text []string `json:"text"` +} + +type spellCheckCorrection struct { + Original string `json:"original"` + Correction string `json:"correction"` +} + +type spellCheckResponse struct { + spellCheckInput + Changes []spellCheckCorrection `json:"changes"` +} + +type spellCheck struct { + origin string + httpClient *http.Client + logger logrus.FieldLogger +} + +func New(origin string, timeout time.Duration, logger logrus.FieldLogger) *spellCheck { + return &spellCheck{ + origin: origin, + httpClient: &http.Client{ + Timeout: timeout, + }, + logger: logger, + } +} + +func (s *spellCheck) Check(ctx context.Context, text []string) (*ent.SpellCheckResult, error) { + body, err := json.Marshal(spellCheckInput{ + Text: text, + }) + if err != nil { + return nil, errors.Wrapf(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", s.url("/spellcheck/"), + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + + res, err := s.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + var resBody spellCheckResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if res.StatusCode > 399 { + return nil, errors.Errorf("fail with status %d", res.StatusCode) + } + + return &ent.SpellCheckResult{ + Text: resBody.Text, + Changes: s.getCorrections(resBody.Changes), + }, nil +} + +func (s *spellCheck) url(path string) string { + return fmt.Sprintf("%s%s", s.origin, path) +} + +func (s *spellCheck) getCorrections(changes []spellCheckCorrection) []ent.SpellCheckCorrection { + if len(changes) == 0 { + return nil + } + corrections := make([]ent.SpellCheckCorrection, len(changes)) + for i := range changes { + corrections[i] = ent.SpellCheckCorrection{ + Original: changes[i].Original, + Correction: changes[i].Correction, + } + } + return corrections +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/clients/spellcheck_meta.go b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/clients/spellcheck_meta.go new file mode 100644 index 0000000000000000000000000000000000000000..fe9cdcf608099d847b67aeea0a318565ec15ec6f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/clients/spellcheck_meta.go @@ -0,0 +1,45 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + + "github.com/pkg/errors" +) + +func (s *spellCheck) MetaInfo() (map[string]interface{}, error) { + req, err := http.NewRequestWithContext(context.Background(), "GET", s.url("/meta"), nil) + if err != nil { + return nil, errors.Wrap(err, "create GET meta request") + } + + res, err := s.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send GET meta request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read meta response body") + } + + var resBody map[string]interface{} + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, "unmarshal meta response body") + } + return resBody, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/clients/spellcheck_meta_test.go b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/clients/spellcheck_meta_test.go new file mode 100644 index 0000000000000000000000000000000000000000..72a467368bbf4abf7d79a930e2dfc717d14eff9d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/clients/spellcheck_meta_test.go @@ -0,0 +1,63 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestGetMeta(t *testing.T) { + t.Run("when the server is providing meta", func(t *testing.T) { + server := httptest.NewServer(&testMetaHandler{t: t}) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + meta, err := c.MetaInfo() + + assert.Nil(t, err) + assert.NotNil(t, meta) + metaModel := meta["model"] + assert.True(t, metaModel != nil) + model, modelOK := metaModel.(map[string]interface{}) + assert.True(t, modelOK) + assert.True(t, model["name"] != nil) + }) +} + +type testMetaHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testMetaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/meta", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.Write([]byte(f.metaInfo())) +} + +func (f *testMetaHandler) metaInfo() string { + return `{ + "model": { + "name": "pyspellchecker" + } +}` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/clients/startup.go b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/clients/startup.go new file mode 100644 index 0000000000000000000000000000000000000000..3e7eb82afd7becb59c4cdbb2f6630243600d9762 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/clients/startup.go @@ -0,0 +1,68 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "net/http" + "time" + + "github.com/pkg/errors" +) + +func (s *spellCheck) WaitForStartup(initCtx context.Context, + interval time.Duration, +) error { + t := time.NewTicker(interval) + defer t.Stop() + expired := initCtx.Done() + var lastErr error + for { + select { + case <-t.C: + lastErr = s.checkReady(initCtx) + if lastErr == nil { + return nil + } + s.logger. + WithField("action", "qna_remote_wait_for_startup"). + WithError(lastErr).Warnf("qna remote service not ready") + case <-expired: + return errors.Wrapf(lastErr, "init context expired before remote was ready") + } + } +} + +func (s *spellCheck) checkReady(initCtx context.Context) error { + // spawn a new context (derived on the overall context) which is used to + // consider an individual request timed out + requestCtx, cancel := context.WithTimeout(initCtx, 500*time.Millisecond) + defer cancel() + + req, err := http.NewRequestWithContext(requestCtx, http.MethodGet, + s.url("/.well-known/ready"), nil) + if err != nil { + return errors.Wrap(err, "create check ready request") + } + + res, err := s.httpClient.Do(req) + if err != nil { + return errors.Wrap(err, "send check ready request") + } + + defer res.Body.Close() + if res.StatusCode > 299 { + return errors.Errorf("not ready: status %d", res.StatusCode) + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/clients/startup_test.go b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/clients/startup_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ff65558b1e87f7f143c6c43afdaf422dd3829c34 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/clients/startup_test.go @@ -0,0 +1,98 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestWaitForStartup(t *testing.T) { + t.Run("when the server is immediately ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{t: t}) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + err := c.WaitForStartup(context.Background(), 50*time.Millisecond) + + assert.Nil(t, err) + }) + + t.Run("when the server is down", func(t *testing.T) { + c := New("http://nothing-running-at-this-url", 0, nullLogger()) + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 150*time.Millisecond) + + require.NotNil(t, err, nullLogger()) + assert.Contains(t, err.Error(), "expired before remote was ready") + }) + + t.Run("when the server is alive, but not ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(1 * time.Minute), + }) + c := New(server.URL, 0, nullLogger()) + defer server.Close() + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 50*time.Millisecond) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "expired before remote was ready") + }) + + t.Run("when the server is initially not ready, but then becomes ready", + func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(100 * time.Millisecond), + }) + c := New(server.URL, 0, nullLogger()) + defer server.Close() + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 50*time.Millisecond) + + require.Nil(t, err) + }) +} + +type testReadyHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testReadyHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/.well-known/ready", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.WriteHeader(http.StatusNoContent) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/config.go b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/config.go new file mode 100644 index 0000000000000000000000000000000000000000..58bfca43b768acd5df2fe184c590c99d37d8d0fa --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/config.go @@ -0,0 +1,39 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modspellcheck + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *SpellCheckModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *SpellCheckModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *SpellCheckModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + return nil +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/ent/spellcheck_result.go b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/ent/spellcheck_result.go new file mode 100644 index 0000000000000000000000000000000000000000..c451f206878ad421a116345ed0c0ce90f6add459 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/ent/spellcheck_result.go @@ -0,0 +1,21 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type SpellCheckCorrection struct { + Original, Correction string +} + +type SpellCheckResult struct { + Text []string + Changes []SpellCheckCorrection +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/module.go b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/module.go new file mode 100644 index 0000000000000000000000000000000000000000..5f2d6ba37698a48bc98e4451dd3b9d30339d17c4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/module.go @@ -0,0 +1,105 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modspellcheck + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + spellcheckadditional "github.com/weaviate/weaviate/modules/text-spellcheck/additional" + spellcheckadditionalspellcheck "github.com/weaviate/weaviate/modules/text-spellcheck/additional/spellcheck" + "github.com/weaviate/weaviate/modules/text-spellcheck/clients" + "github.com/weaviate/weaviate/modules/text-spellcheck/ent" + spellchecktexttransformer "github.com/weaviate/weaviate/modules/text-spellcheck/transformer" + spellchecktexttransformerautocorrect "github.com/weaviate/weaviate/modules/text-spellcheck/transformer/autocorrect" +) + +const Name = "text-spellcheck" + +func New() *SpellCheckModule { + return &SpellCheckModule{} +} + +type SpellCheckModule struct { + spellCheck spellCheckClient + additionalPropertiesProvider modulecapabilities.AdditionalProperties + textTransformersProvider modulecapabilities.TextTransformers +} + +type spellCheckClient interface { + Check(ctx context.Context, text []string) (*ent.SpellCheckResult, error) + MetaInfo() (map[string]interface{}, error) +} + +func (m *SpellCheckModule) Name() string { + return Name +} + +func (m *SpellCheckModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Extension +} + +func (m *SpellCheckModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + uri := os.Getenv("SPELLCHECK_INFERENCE_API") + if uri == "" { + return errors.Errorf("required variable SPELLCHECK_INFERENCE_API is not set") + } + + client := clients.New(uri, params.GetConfig().ModuleHttpClientTimeout, params.GetLogger()) + + if err := client.WaitForStartup(ctx, 1*time.Second); err != nil { + return errors.Wrap(err, "init remote spell check module") + } + + m.spellCheck = client + + m.initTextTransformers() + m.initAdditional() + + return nil +} + +func (m *SpellCheckModule) initTextTransformers() { + autocorrectProvider := spellchecktexttransformerautocorrect.New(m.spellCheck) + m.textTransformersProvider = spellchecktexttransformer.New(autocorrectProvider) +} + +func (m *SpellCheckModule) initAdditional() { + spellCheckProvider := spellcheckadditionalspellcheck.New(m.spellCheck) + m.additionalPropertiesProvider = spellcheckadditional.New(spellCheckProvider) +} + +func (m *SpellCheckModule) MetaInfo() (map[string]interface{}, error) { + return m.spellCheck.MetaInfo() +} + +func (m *SpellCheckModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +func (m *SpellCheckModule) TextTransformers() map[string]modulecapabilities.TextTransform { + return m.textTransformersProvider.TextTransformers() +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.AdditionalProperties(New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.TextTransformers(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/transformer/autocorrect/autocorrect.go b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/transformer/autocorrect/autocorrect.go new file mode 100644 index 0000000000000000000000000000000000000000..1136f2d530f43a8e090d12c289167dbe65cfb146 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/transformer/autocorrect/autocorrect.go @@ -0,0 +1,48 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package autocorrect + +import ( + "context" + "strings" + + "github.com/weaviate/weaviate/modules/text-spellcheck/ent" +) + +type spellCheckClient interface { + Check(ctx context.Context, text []string) (*ent.SpellCheckResult, error) +} + +type AutocorrectTransformer struct { + spellCheckClient spellCheckClient +} + +func New(spellCheckClient spellCheckClient) *AutocorrectTransformer { + return &AutocorrectTransformer{spellCheckClient} +} + +func (t *AutocorrectTransformer) Transform(in []string) ([]string, error) { + spellCheckResult, err := t.spellCheckClient.Check(context.Background(), in) + if err != nil { + return nil, err + } + result := make([]string, len(in)) + changes := spellCheckResult.Changes + for i, txt := range spellCheckResult.Text { + didYouMean := txt + for _, change := range changes { + didYouMean = strings.ReplaceAll(strings.ToLower(didYouMean), change.Original, change.Correction) + } + result[i] = didYouMean + } + return result, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/transformer/provider.go b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/transformer/provider.go new file mode 100644 index 0000000000000000000000000000000000000000..c7edeb6cf0eae45482539f4a93c02b74ae5d8f87 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text-spellcheck/transformer/provider.go @@ -0,0 +1,31 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package texttransformer + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" +) + +type TextTransformerProvider struct { + autocorrecProvider modulecapabilities.TextTransform +} + +func New(autocorrecProvider modulecapabilities.TextTransform) *TextTransformerProvider { + return &TextTransformerProvider{autocorrecProvider} +} + +func (p *TextTransformerProvider) TextTransformers() map[string]modulecapabilities.TextTransform { + textTransformers := map[string]modulecapabilities.TextTransform{} + textTransformers["nearText"] = p.autocorrecProvider + textTransformers["ask"] = p.autocorrecProvider + return textTransformers +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/clients/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/clients/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..66b3424d989ced1b66c11b705780a757ec395d48 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/clients/fakes_for_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizePropertyName bool + skippedProperty string + excludedProperty string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/clients/jinaai.go b/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/clients/jinaai.go new file mode 100644 index 0000000000000000000000000000000000000000..a1230481bb195044c83b320270ff3413072bd258 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/clients/jinaai.go @@ -0,0 +1,84 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "sort" + "strings" + "time" + + "github.com/weaviate/weaviate/entities/moduletools" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/text2multivec-jinaai/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/jinaai" +) + +const ( + defaultRPM = 500 // from https://jina.ai/embeddings/ + defaultTPM = 1_000_000 +) + +type vectorizer struct { + client *jinaai.Client[[][]float32] + logger logrus.FieldLogger +} + +func New(jinaAIApiKey string, timeout time.Duration, logger logrus.FieldLogger) *vectorizer { + return &vectorizer{ + client: jinaai.New[[][]float32](jinaAIApiKey, timeout, defaultRPM, defaultTPM, jinaai.MultiVectorBuildUrlFn, logger), + logger: logger, + } +} + +func (v *vectorizer) Vectorize(ctx context.Context, input []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[][]float32], *modulecomponents.RateLimits, int, error) { + settings := ent.NewClassSettings(cfg) + + res, _, usage, err := v.client.Vectorize(ctx, input, jinaai.Settings{ + BaseURL: settings.BaseURL(), + Model: settings.Model(), + Dimensions: settings.Dimensions(), + Task: jinaai.RetrievalPassage, + }) + return res, nil, usage, err +} + +func (v *vectorizer) VectorizeQuery(ctx context.Context, input []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[][]float32], error) { + // make sure that for ColBERT we get always one embedding back + // always sort the input and concatenate the string into one + // we have to do this bc there's no easy way of combining ColBERT embeddings + sort.Strings(input) + sortedInput := strings.Join(input, " ") + settings := ent.NewClassSettings(cfg) + res, _, _, err := v.client.Vectorize(ctx, []string{sortedInput}, jinaai.Settings{ + BaseURL: settings.BaseURL(), + Model: settings.Model(), + Dimensions: settings.Dimensions(), + Task: jinaai.RetrievalQuery, + }) + return res, err +} + +func (v *vectorizer) GetApiKeyHash(ctx context.Context, config moduletools.ClassConfig) [32]byte { + return v.client.GetApiKeyHash(ctx, config) +} + +func (v *vectorizer) GetVectorizerRateLimit(ctx context.Context, cfg moduletools.ClassConfig) *modulecomponents.RateLimits { + return v.client.GetVectorizerRateLimit(ctx, cfg) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/clients/jinaai_test.go b/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/clients/jinaai_test.go new file mode 100644 index 0000000000000000000000000000000000000000..33127544b16a1275c790274387f7f7db114815f2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/clients/jinaai_test.go @@ -0,0 +1,181 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/modules/text2multivec-jinaai/ent" +) + +func TestClient(t *testing.T) { + t.Run("when all is fine", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + + c := New("apiKey", 0, nullLogger()) + + expected := &modulecomponents.VectorizationResult[[][]float32]{ + Text: []string{"This is my text"}, + Vector: [][][]float32{{{0.1, 0.2, 0.3}, {0.11, 0.22, 0.33}, {0.111, 0.222, 0.333}}}, + Dimensions: 3, + } + res, _, _, err := c.Vectorize(context.Background(), []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Model": ent.DefaultJinaAIModel, "baseURL": server.URL}}) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when the context is expired", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("apiKey", 0, nullLogger()) + + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, _, _, err := c.Vectorize(ctx, []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{}}) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "context deadline exceeded") + }) + + t.Run("when the server returns an error", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{ + t: t, + serverError: errors.Errorf("nope, not gonna happen"), + }) + defer server.Close() + c := New("apiKey", 0, nullLogger()) + + _, _, _, err := c.Vectorize(context.Background(), []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"baseURL": server.URL}}) + + require.NotNil(t, err) + assert.EqualError(t, err, "connection to: JinaAI API failed with status: 500 error: nope, not gonna happen") + }) + + t.Run("when JinaAI key is passed using X-Jinaai-Api-Key header", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("", 0, nullLogger()) + + ctxWithValue := context.WithValue(context.Background(), + "X-Jinaai-Api-Key", []string{"some-key"}) + + expected := &modulecomponents.VectorizationResult[[][]float32]{ + Text: []string{"This is my text"}, + Vector: [][][]float32{{{0.1, 0.2, 0.3}, {0.11, 0.22, 0.33}, {0.111, 0.222, 0.333}}}, + Dimensions: 3, + } + res, _, _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Model": ent.DefaultJinaAIModel, "baseURL": server.URL}}) + + require.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when JinaAI key is empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("", 0, nullLogger()) + + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, _, _, err := c.Vectorize(ctx, []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{}}) + + require.NotNil(t, err) + assert.EqualError(t, err, "API Key: no api key found "+ + "neither in request header: X-Jinaai-Api-Key "+ + "nor in environment variable under JINAAI_APIKEY") + }) + + t.Run("when X-Jinaai-Api-Key header is passed but empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("", 0, nullLogger()) + + ctxWithValue := context.WithValue(context.Background(), + "X-Jinaai-Api-Key", []string{""}) + + _, _, _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Model": ent.DefaultJinaAIModel}}) + + require.NotNil(t, err) + assert.EqualError(t, err, "API Key: no api key found "+ + "neither in request header: X-Jinaai-Api-Key "+ + "nor in environment variable under JINAAI_APIKEY") + }) +} + +type fakeHandler struct { + t *testing.T + serverError error +} + +func (f *fakeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.serverError != nil { + embedding := map[string]interface{}{ + "detail": f.serverError.Error(), + } + outBytes, err := json.Marshal(embedding) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b map[string]interface{} + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + textInputArray := b["input"].([]interface{}) + textInput := textInputArray[0].(string) + assert.Greater(f.t, len(textInput), 0) + + embeddingData := map[string]interface{}{ + "object": textInput, + "index": 0, + "embedding": [][]float32{{0.1, 0.2, 0.3}, {0.11, 0.22, 0.33}, {0.111, 0.222, 0.333}}, + } + embedding := map[string]interface{}{ + "object": "list", + "data": []interface{}{embeddingData}, + } + + outBytes, err := json.Marshal(embedding) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..c3905fd9f2521a30d25eedcaa20d5f4069b46e0c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/clients/meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *vectorizer) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "JinaAI Multivec Module", + "documentationHref": "https://jina.ai/embeddings/", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/config.go b/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/config.go new file mode 100644 index 0000000000000000000000000000000000000000..add2ae3365f7927210bcffb474eb8d95d12c8980 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/config.go @@ -0,0 +1,50 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modjinaai + +import ( + "context" + + "github.com/weaviate/weaviate/modules/text2multivec-jinaai/ent" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *JinaAIModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{ + "vectorizeClassName": ent.DefaultVectorizeClassName, + "model": ent.DefaultJinaAIModel, + "baseURL": ent.DefaultBaseURL, + "dimensions": &ent.DefaultDimensions, + } +} + +func (m *JinaAIModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{ + "skip": !ent.DefaultPropertyIndexed, + "vectorizePropertyName": ent.DefaultVectorizePropertyName, + } +} + +func (m *JinaAIModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := ent.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/ent/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/ent/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..b724748610b207ca6f2d65ef3b427091546474fa --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/ent/class_settings.go @@ -0,0 +1,59 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + // Default values for URL (model is ok) cannot be changed before we solve how old classes that have the defaults + // NOT set will handle the change + DefaultJinaAIModel = "jina-colbert-v2" + DefaultVectorizeClassName = true + DefaultPropertyIndexed = true + DefaultVectorizePropertyName = false + DefaultBaseURL = "https://api.jina.ai" + LowerCaseInput = false +) + +var DefaultDimensions int64 = 128 + +type classSettings struct { + basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, BaseClassSettings: *basesettings.NewBaseClassSettingsWithAltNames(cfg, LowerCaseInput, "text2multivec-jinaai", []string{"text2colbert-jinaai"}, nil)} +} + +func (cs *classSettings) Model() string { + return cs.BaseClassSettings.GetPropertyAsString("model", DefaultJinaAIModel) +} + +func (cs *classSettings) BaseURL() string { + return cs.BaseClassSettings.GetPropertyAsString("baseURL", DefaultBaseURL) +} + +func (cs *classSettings) Dimensions() *int64 { + return cs.BaseClassSettings.GetPropertyAsInt64("dimensions", &DefaultDimensions) +} + +func (cs *classSettings) Validate(class *models.Class) error { + if err := cs.BaseClassSettings.Validate(class); err != nil { + return err + } + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/module.go b/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/module.go new file mode 100644 index 0000000000000000000000000000000000000000..4eb7d88143b56a605b80c1d8ddfa2ca774b898fa --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/module.go @@ -0,0 +1,169 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modjinaai + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/text2multivec-jinaai/clients" + "github.com/weaviate/weaviate/modules/text2multivec-jinaai/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents/additional" + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + "github.com/weaviate/weaviate/usecases/modulecomponents/text2vecbase" +) + +const ( + Name = "text2multivec-jinaai" + LegacyName = "text2colbert-jinaai" +) + +var batchSettings = batch.Settings{ + // the encoding is different than OpenAI, but the code is not available in Go and too complicated to port. + // using 30% more than the OpenAI model is a rough estimate but seems to work + TokenMultiplier: 1.3, + MaxTimePerBatch: float64(10), + MaxObjectsPerBatch: 512, // Info from jina + // real limit is 8192, but the vectorization times go up by A LOT if the batches are larger + MaxTokensPerBatch: func(cfg moduletools.ClassConfig) int { return 2500 }, + HasTokenLimit: true, + ReturnsRateLimit: false, +} + +func New() *JinaAIModule { + return &JinaAIModule{} +} + +type JinaAIModule struct { + // This needs to be changed to [][]float32 but it can't be done right now bc this interface type change + // is not possible now with the current implementation. Will change that later in next PR's + vectorizer text2vecbase.TextVectorizerBatch[[][]float32] + metaProvider text2vecbase.MetaProvider + graphqlProvider modulecapabilities.GraphQLArguments + searcher modulecapabilities.Searcher[[][]float32] + nearTextTransformer modulecapabilities.TextTransform + logger logrus.FieldLogger + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +func (m *JinaAIModule) Name() string { + return Name +} + +func (m *JinaAIModule) AltNames() []string { + return []string{LegacyName} +} + +func (m *JinaAIModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2Multivec +} + +func (m *JinaAIModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, m.logger); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initAdditionalPropertiesProvider(); err != nil { + return errors.Wrap(err, "init additional properties provider") + } + + return nil +} + +func (m *JinaAIModule) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init graphql provider") + } + return nil +} + +func (m *JinaAIModule) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + jinaAIApiKey := os.Getenv("JINAAI_APIKEY") + + client := clients.New(jinaAIApiKey, timeout, logger) + + m.vectorizer = text2vecbase.New(client, + batch.NewBatchVectorizer(client, 50*time.Second, batchSettings, logger, m.Name()), + batch.ReturnBatchTokenizer(batchSettings.TokenMultiplier, m.Name(), ent.LowerCaseInput), + ) + m.metaProvider = client + + return nil +} + +func (m *JinaAIModule) initAdditionalPropertiesProvider() error { + m.additionalPropertiesProvider = additional.NewText2VecProvider() + return nil +} + +func (m *JinaAIModule) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([][]float32, models.AdditionalProperties, error) { + return m.vectorizer.Object(ctx, obj, cfg, ent.NewClassSettings(cfg)) +} + +func (m *JinaAIModule) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + return true, nil, nil +} + +func (m *JinaAIModule) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][][]float32, []models.AdditionalProperties, map[int]error) { + vecs, errs := m.vectorizer.ObjectBatch(ctx, objs, skipObject, cfg) + return vecs, nil, errs +} + +func (m *JinaAIModule) MetaInfo() (map[string]interface{}, error) { + return m.metaProvider.MetaInfo() +} + +func (m *JinaAIModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +func (m *JinaAIModule) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([][]float32, error) { + return m.vectorizer.Texts(ctx, []string{input}, cfg) +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[][]float32](New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.Searcher[[][]float32](New()) + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.ModuleHasAltNames(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/nearText.go b/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/nearText.go new file mode 100644 index 0000000000000000000000000000000000000000..fc8d172a2b8aaa6798b1fb2aa4865e14c4e3c081 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2multivec-jinaai/nearText.go @@ -0,0 +1,36 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modjinaai + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +func (m *JinaAIModule) initNearText() error { + m.searcher = nearText.NewSearcher(m.vectorizer) + m.graphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *JinaAIModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return m.graphqlProvider.Arguments() +} + +func (m *JinaAIModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[][]float32] { + return m.searcher.VectorSearches() +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[][]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/clients/aws.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/clients/aws.go new file mode 100644 index 0000000000000000000000000000000000000000..7cfa66b475e288bde3c4939a11760fb8d8e76600 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/clients/aws.go @@ -0,0 +1,364 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "net/url" + "strings" + "time" + + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/credentials" + "github.com/aws/aws-sdk-go-v2/service/bedrockruntime" + "github.com/aws/aws-sdk-go-v2/service/sagemakerruntime" + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/text2vec-aws/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents" +) + +type operationType string + +var ( + vectorizeObject operationType = "vectorize_object" + vectorizeQuery operationType = "vectorize_query" +) + +func buildBedrockUrl(service, region, model string) string { + serviceName := service + if strings.HasPrefix(model, "cohere") { + serviceName = fmt.Sprintf("%s-runtime", serviceName) + } + urlTemplate := "https://%s.%s.amazonaws.com/model/%s/invoke" + return fmt.Sprintf(urlTemplate, serviceName, region, model) +} + +func buildSagemakerUrl(service, region, endpoint string) string { + urlTemplate := "https://runtime.%s.%s.amazonaws.com/endpoints/%s/invocations" + return fmt.Sprintf(urlTemplate, service, region, endpoint) +} + +type awsClient struct { + awsAccessKey string + awsSecret string + awsSessionToken string + buildBedrockUrlFn func(service, region, model string) string + buildSagemakerUrlFn func(service, region, endpoint string) string + httpClient *http.Client + logger logrus.FieldLogger +} + +func New(awsAccessKey, awsSecret, awsSessionToken string, timeout time.Duration, logger logrus.FieldLogger) *awsClient { + return &awsClient{ + awsAccessKey: awsAccessKey, + awsSecret: awsSecret, + awsSessionToken: awsSessionToken, + httpClient: &http.Client{ + Timeout: timeout, + }, + buildBedrockUrlFn: buildBedrockUrl, + buildSagemakerUrlFn: buildSagemakerUrl, + logger: logger, + } +} + +func (v *awsClient) Vectorize(ctx context.Context, input []string, + config ent.VectorizationConfig, +) (*ent.VectorizationResult, error) { + return v.vectorize(ctx, input, vectorizeObject, config) +} + +func (v *awsClient) VectorizeQuery(ctx context.Context, input []string, + config ent.VectorizationConfig, +) (*ent.VectorizationResult, error) { + return v.vectorize(ctx, input, vectorizeQuery, config) +} + +func (v *awsClient) vectorize(ctx context.Context, input []string, operation operationType, config ent.VectorizationConfig) (*ent.VectorizationResult, error) { + accessKey, err := v.getAwsAccessKey(ctx) + if err != nil { + return nil, errors.Wrapf(err, "AWS Access Key") + } + secretKey, err := v.getAwsAccessSecret(ctx) + if err != nil { + return nil, errors.Wrapf(err, "AWS Secret Key") + } + awsSessionToken, err := v.getAwsSessionToken(ctx) + if err != nil { + return nil, err + } + maxRetries := 5 + + service := v.getService(config) + if v.isBedrock(service) { + return v.sendBedrockRequest(ctx, input, operation, maxRetries, accessKey, secretKey, awsSessionToken, config) + } else { + return v.sendSagemakerRequest(ctx, input, maxRetries, accessKey, secretKey, awsSessionToken, config) + } +} + +func (v *awsClient) getConfig(ctx context.Context, + awsKey, awsSecret, awsSessionToken string, + region string, + maxRetries int, +) (aws.Config, error) { + return config.LoadDefaultConfig(ctx, + config.WithRegion(region), + config.WithCredentialsProvider( + credentials.NewStaticCredentialsProvider(awsKey, awsSecret, awsSessionToken), + ), + config.WithRetryMaxAttempts(maxRetries), + ) +} + +func (v *awsClient) sendBedrockRequest(ctx context.Context, + input []string, + operation operationType, + maxRetries int, + awsKey, awsSecret, awsSessionToken string, + cfg ent.VectorizationConfig, +) (*ent.VectorizationResult, error) { + model := cfg.Model + region := cfg.Region + + req, err := createRequestBody(model, input, operation) + if err != nil { + return nil, fmt.Errorf("failed to create request for model %s: %w", model, err) + } + + body, err := json.Marshal(req) + if err != nil { + return nil, fmt.Errorf("failed to marshal request for model %s: %w", model, err) + } + + sdkConfig, err := v.getConfig(ctx, awsKey, awsSecret, awsSessionToken, region, maxRetries) + if err != nil { + return nil, fmt.Errorf("failed to load AWS configuration: %w", err) + } + + client := bedrockruntime.NewFromConfig(sdkConfig) + result, err := client.InvokeModel(ctx, &bedrockruntime.InvokeModelInput{ + ModelId: aws.String(model), + ContentType: aws.String("application/json"), + Body: body, + }) + if err != nil { + errMsg := err.Error() + if strings.Contains(errMsg, "no such host") { + return nil, fmt.Errorf("Bedrock service is not available in the selected region. " + + "Please double-check the service availability for your region at " + + "https://aws.amazon.com/about-aws/global-infrastructure/regional-product-services/") + } else if strings.Contains(errMsg, "Could not resolve the foundation model") { + return nil, fmt.Errorf("could not resolve the foundation model from model identifier: \"%v\". "+ + "Please verify that the requested model exists and is accessible within the specified region", model) + } else { + return nil, fmt.Errorf("couldn't invoke %s model: %w", model, err) + } + } + + return v.parseBedrockResponse(result.Body, input) +} + +func (v *awsClient) parseBedrockResponse(bodyBytes []byte, input []string) (*ent.VectorizationResult, error) { + var resBody bedrockEmbeddingResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + if len(resBody.Embedding) == 0 && len(resBody.Embeddings) == 0 { + return nil, fmt.Errorf("could not obtain vector from AWS Bedrock") + } + + embedding := resBody.Embedding + if len(resBody.Embeddings) > 0 { + embedding = resBody.Embeddings[0] + } + + return &ent.VectorizationResult{ + Text: input[0], + Dimensions: len(embedding), + Vector: embedding, + }, nil +} + +func (v *awsClient) sendSagemakerRequest(ctx context.Context, + input []string, + maxRetries int, + awsKey, awsSecret, awsSessionToken string, + cfg ent.VectorizationConfig, +) (*ent.VectorizationResult, error) { + region := v.getRegion(cfg) + endpoint := v.getEndpoint(cfg) + + body, err := json.Marshal(sagemakerEmbeddingsRequest{ + Inputs: input, + }) + if err != nil { + return nil, errors.Wrapf(err, "marshal body") + } + + sdkConfig, err := v.getConfig(ctx, awsKey, awsSecret, awsSessionToken, region, maxRetries) + if err != nil { + return nil, fmt.Errorf("failed to load AWS configuration: %w", err) + } + + svc := sagemakerruntime.NewFromConfig(sdkConfig) + result, err := svc.InvokeEndpoint(ctx, &sagemakerruntime.InvokeEndpointInput{ + EndpointName: aws.String(endpoint), + ContentType: aws.String("application/json"), + Body: body, + }) + if err != nil { + return nil, fmt.Errorf("invoke request: %w", err) + } + + return v.parseSagemakerResponse(result.Body, input) +} + +func (v *awsClient) parseSagemakerResponse(bodyBytes []byte, input []string) (*ent.VectorizationResult, error) { + var smEmbeddings [][]float32 + if err := json.Unmarshal(bodyBytes, &smEmbeddings); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if len(smEmbeddings) == 0 { + return nil, errors.Errorf("empty embeddings response") + } + + return &ent.VectorizationResult{ + Text: input[0], + Dimensions: len(smEmbeddings[0]), + Vector: smEmbeddings[0], + }, nil +} + +func (v *awsClient) isBedrock(service string) bool { + return service == "bedrock" +} + +func (v *awsClient) getAwsAccessKey(ctx context.Context) (string, error) { + if awsAccessKey := modulecomponents.GetValueFromContext(ctx, "X-Aws-Access-Key"); awsAccessKey != "" { + return awsAccessKey, nil + } + if v.awsAccessKey != "" { + return v.awsAccessKey, nil + } + return "", errors.New("no access key found " + + "neither in request header: X-AWS-Access-Key " + + "nor in environment variable under AWS_ACCESS_KEY_ID or AWS_ACCESS_KEY") +} + +func (v *awsClient) getAwsAccessSecret(ctx context.Context) (string, error) { + if awsSecret := modulecomponents.GetValueFromContext(ctx, "X-Aws-Secret-Key"); awsSecret != "" { + return awsSecret, nil + } + if v.awsSecret != "" { + return v.awsSecret, nil + } + return "", errors.New("no secret found " + + "neither in request header: X-AWS-Secret-Key " + + "nor in environment variable under AWS_SECRET_ACCESS_KEY or AWS_SECRET_KEY") +} + +func (v *awsClient) getAwsSessionToken(ctx context.Context) (string, error) { + if awsSessionToken := modulecomponents.GetValueFromContext(ctx, "X-Aws-Session-Token"); awsSessionToken != "" { + return awsSessionToken, nil + } + if v.awsSessionToken != "" { + return v.awsSessionToken, nil + } + return "", nil +} + +func (v *awsClient) getRegion(config ent.VectorizationConfig) string { + return config.Region +} + +func (v *awsClient) getService(config ent.VectorizationConfig) string { + return config.Service +} + +func (v *awsClient) getEndpoint(config ent.VectorizationConfig) string { + return config.Endpoint +} + +type bedrockEmbeddingsRequest struct { + InputText string `json:"inputText,omitempty"` +} + +type bedrockCohereEmbeddingRequest struct { + Texts []string `json:"texts"` + InputType string `json:"input_type"` +} + +type sagemakerEmbeddingsRequest struct { + Inputs []string `json:"inputs,omitempty"` +} + +type bedrockEmbeddingResponse struct { + InputTextTokenCount int `json:"InputTextTokenCount,omitempty"` + Embedding []float32 `json:"embedding,omitempty"` + Embeddings [][]float32 `json:"embeddings,omitempty"` + Message *string `json:"message,omitempty"` +} +type sagemakerEmbeddingResponse struct { + Embedding [][]float32 `json:"embedding,omitempty"` + ErrorCode *string `json:"ErrorCode,omitempty"` + LogStreamArn *string `json:"LogStreamArn,omitempty"` + OriginalMessage *string `json:"OriginalMessage,omitempty"` + Message *string `json:"Message,omitempty"` + OriginalStatusCode *int `json:"OriginalStatusCode,omitempty"` +} + +func extractHostAndPath(endpointUrl string) (string, string, error) { + u, err := url.Parse(endpointUrl) + if err != nil { + return "", "", err + } + + if u.Host == "" || u.Path == "" { + return "", "", fmt.Errorf("invalid endpoint URL: %s", endpointUrl) + } + + return u.Host, u.Path, nil +} + +func createRequestBody(model string, texts []string, operation operationType) (interface{}, error) { + modelParts := strings.Split(model, ".") + if len(modelParts) == 0 { + return nil, fmt.Errorf("invalid model: %s", model) + } + + modelProvider := modelParts[0] + + switch modelProvider { + case "amazon": + return bedrockEmbeddingsRequest{ + InputText: texts[0], + }, nil + case "cohere": + inputType := "search_document" + if operation == vectorizeQuery { + inputType = "search_query" + } + return bedrockCohereEmbeddingRequest{ + Texts: texts, + InputType: inputType, + }, nil + default: + return nil, fmt.Errorf("unknown model provider: %s", modelProvider) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/clients/aws_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/clients/aws_test.go new file mode 100644 index 0000000000000000000000000000000000000000..0d27559eff031017c4bc91c797ccd53c1793982e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/clients/aws_test.go @@ -0,0 +1,382 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "os" + "strings" + "testing" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/modules/text2vec-aws/ent" +) + +func TestClient(t *testing.T) { + t.Run("when all is fine", func(t *testing.T) { + t.Skip("Skipping this test for now") + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &awsClient{ + httpClient: &http.Client{}, + logger: nullLogger(), + awsAccessKey: "access_key", + awsSecret: "secret", + buildBedrockUrlFn: func(service, region, model string) string { + return server.URL + }, + buildSagemakerUrlFn: func(service, region, endpoint string) string { + return server.URL + }, + } + expected := &ent.VectorizationResult{ + Text: "This is my text", + Vector: []float32{0.1, 0.2, 0.3}, + Dimensions: 3, + } + res, err := c.Vectorize(context.Background(), []string{"This is my text"}, + ent.VectorizationConfig{ + Service: "bedrock", + Region: "region", + Model: "model", + }) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when the server returns an error", func(t *testing.T) { + t.Skip("Skipping this test for now") + server := httptest.NewServer(&fakeHandler{ + t: t, + serverError: errors.Errorf("nope, not gonna happen"), + }) + defer server.Close() + c := &awsClient{ + httpClient: &http.Client{}, + logger: nullLogger(), + awsAccessKey: "access_key", + awsSecret: "secret", + buildBedrockUrlFn: func(service, region, model string) string { + return server.URL + }, + buildSagemakerUrlFn: func(service, region, endpoint string) string { + return server.URL + }, + } + _, err := c.Vectorize(context.Background(), []string{"This is my text"}, + ent.VectorizationConfig{ + Service: "bedrock", + }) + + require.NotNil(t, err) + assert.EqualError(t, err, "connection to AWS failed with status: 500 error: nope, not gonna happen") + }) + + t.Run("when AWS key is passed using X-Aws-Api-Key header", func(t *testing.T) { + t.Skip("Skipping this test for now") + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &awsClient{ + httpClient: &http.Client{}, + logger: nullLogger(), + awsAccessKey: "access_key", + awsSecret: "secret", + buildBedrockUrlFn: func(service, region, model string) string { + return server.URL + }, + buildSagemakerUrlFn: func(service, region, endpoint string) string { + return server.URL + }, + } + ctxWithValue := context.WithValue(context.Background(), + "X-Aws-Api-Key", []string{"some-key"}) + + expected := &ent.VectorizationResult{ + Text: "This is my text", + Vector: []float32{0.1, 0.2, 0.3}, + Dimensions: 3, + } + res, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, ent.VectorizationConfig{ + Service: "bedrock", + }) + + require.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when X-Aws-Access-Key header is passed but empty", func(t *testing.T) { + t.Skip("Skipping this test for now") + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &awsClient{ + httpClient: &http.Client{}, + logger: nullLogger(), + awsAccessKey: "", + awsSecret: "123", + buildBedrockUrlFn: func(service, region, model string) string { + return server.URL + }, + buildSagemakerUrlFn: func(service, region, endpoint string) string { + return server.URL + }, + } + ctxWithValue := context.WithValue(context.Background(), + "X-Aws-Api-Key", []string{""}) + + _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, ent.VectorizationConfig{ + Service: "bedrock", + }) + + require.NotNil(t, err) + assert.Equal(t, err.Error(), "AWS Access Key: no access key found neither in request header: "+ + "X-Aws-Access-Key nor in environment variable under AWS_ACCESS_KEY_ID") + }) + + t.Run("when X-Aws-Secret-Key header is passed but empty", func(t *testing.T) { + t.Skip("Skipping this test for now") + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &awsClient{ + httpClient: &http.Client{}, + logger: nullLogger(), + awsAccessKey: "123", + awsSecret: "", + buildBedrockUrlFn: func(service, region, model string) string { + return server.URL + }, + buildSagemakerUrlFn: func(service, region, endpoint string) string { + return server.URL + }, + } + ctxWithValue := context.WithValue(context.Background(), + "X-Aws-Api-Key", []string{""}) + + _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, ent.VectorizationConfig{ + Service: "bedrock", + }) + + require.NotNil(t, err) + assert.Equal(t, err.Error(), "AWS Secret Key: no secret found neither in request header: "+ + "X-Aws-Access-Secret nor in environment variable under AWS_SECRET_ACCESS_KEY") + }) +} + +func TestBuildBedrockUrl(t *testing.T) { + service := "bedrock" + region := "us-east-1" + t.Run("when using a Cohere", func(t *testing.T) { + model := "cohere.embed-english-v3" + + expected := "https://bedrock-runtime.us-east-1.amazonaws.com/model/cohere.embed-english-v3/invoke" + result := buildBedrockUrl(service, region, model) + + if result != expected { + t.Errorf("Expected %s but got %s", expected, result) + } + }) + + t.Run("When using an AWS model", func(t *testing.T) { + model := "amazon.titan-e1t-medium" + + expected := "https://bedrock.us-east-1.amazonaws.com/model/amazon.titan-e1t-medium/invoke" + result := buildBedrockUrl(service, region, model) + + if result != expected { + t.Errorf("Expected %s but got %s", expected, result) + } + }) +} + +func TestCreateRequestBody(t *testing.T) { + input := []string{"Hello, world!"} + + t.Run("Create request for Amazon embedding model", func(t *testing.T) { + model := "amazon.titan-e1t-medium" + req, _ := createRequestBody(model, input, vectorizeObject) + _, ok := req.(bedrockEmbeddingsRequest) + if !ok { + t.Fatalf("Expected req to be a bedrockEmbeddingsRequest, got %T", req) + } + }) + + t.Run("Create request for Cohere embedding model", func(t *testing.T) { + model := "cohere.embed-english-v3" + req, _ := createRequestBody(model, input, vectorizeObject) + _, ok := req.(bedrockCohereEmbeddingRequest) + if !ok { + t.Fatalf("Expected req to be a bedrockCohereEmbeddingRequest, got %T", req) + } + }) + + t.Run("Create request for unknown embedding model", func(t *testing.T) { + model := "unknown.model" + _, err := createRequestBody(model, input, vectorizeObject) + if err == nil { + t.Errorf("Expected an error for unknown model, got nil") + } + }) +} + +func TestVectorize(t *testing.T) { + ctx := context.Background() + input := []string{"Hello, world!"} + + t.Run("Vectorize using an Amazon model", func(t *testing.T) { + t.Skip("Skipping because CI doesnt have the right credentials") + config := ent.VectorizationConfig{ + Model: "amazon.titan-e1t-medium", + Service: "bedrock", + Region: "us-east-1", + } + + awsAccessKeyID := os.Getenv("AWS_ACCESS_KEY_ID_AMAZON") + awsSecretAccessKey := os.Getenv("AWS_SECRET_ACCESS_KEY_AMAZON") + + aws := New(awsAccessKeyID, awsSecretAccessKey, "sessionToken", 60*time.Second, nil) + + _, err := aws.Vectorize(ctx, input, config) + if err != nil { + t.Errorf("Vectorize returned an error: %v", err) + } + }) + + t.Run("Vectorize using a Cohere model", func(t *testing.T) { + t.Skip("Skipping because CI doesnt have the right credentials") + config := ent.VectorizationConfig{ + Model: "cohere.embed-english-v3", + Service: "bedrock", + Region: "us-east-1", + } + + awsAccessKeyID := os.Getenv("AWS_ACCESS_KEY_ID_COHERE") + awsSecretAccessKey := os.Getenv("AWS_SECRET_ACCESS_KEY_COHERE") + + aws := New(awsAccessKeyID, awsSecretAccessKey, "sessionToken", 60*time.Second, nil) + + _, err := aws.Vectorize(ctx, input, config) + if err != nil { + t.Errorf("Vectorize returned an error: %v", err) + } + }) +} + +func TestExtractHostAndPath(t *testing.T) { + t.Run("valid URL", func(t *testing.T) { + endpointUrl := "https://service.region.amazonaws.com/model/model-name/invoke" + expectedHost := "service.region.amazonaws.com" + expectedPath := "/model/model-name/invoke" + + host, path, err := extractHostAndPath(endpointUrl) + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + if host != expectedHost { + t.Errorf("Expected host %s but got %s", expectedHost, host) + } + if path != expectedPath { + t.Errorf("Expected path %s but got %s", expectedPath, path) + } + }) + + t.Run("URL without host or path", func(t *testing.T) { + endpointUrl := "https://" + + _, _, err := extractHostAndPath(endpointUrl) + + if err == nil { + t.Error("Expected error but got nil") + } + }) +} + +type fakeHandler struct { + t *testing.T + serverError error +} + +func (f *fakeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, http.MethodPost, r.Method) + + authHeader := r.Header["Authorization"][0] + if f.serverError != nil { + var outBytes []byte + var err error + + if strings.Contains(authHeader, "bedrock") { + embeddingResponse := &bedrockEmbeddingResponse{ + Message: ptString(f.serverError.Error()), + } + outBytes, err = json.Marshal(embeddingResponse) + } else { + embeddingResponse := &sagemakerEmbeddingResponse{ + Message: ptString(f.serverError.Error()), + } + outBytes, err = json.Marshal(embeddingResponse) + } + + require.Nil(f.t, err) + + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var outBytes []byte + if strings.Contains(authHeader, "bedrock") { + var req bedrockEmbeddingsRequest + require.Nil(f.t, json.Unmarshal(bodyBytes, &req)) + + textInput := req.InputText + assert.Greater(f.t, len(textInput), 0) + embeddingResponse := &bedrockEmbeddingResponse{ + Embedding: []float32{0.1, 0.2, 0.3}, + } + outBytes, err = json.Marshal(embeddingResponse) + } else { + var req sagemakerEmbeddingsRequest + require.Nil(f.t, json.Unmarshal(bodyBytes, &req)) + + textInputs := req.Inputs + assert.Greater(f.t, len(textInputs), 0) + embeddingResponse := &sagemakerEmbeddingResponse{ + Embedding: [][]float32{{0.1, 0.2, 0.3}}, + } + outBytes, err = json.Marshal(embeddingResponse) + } + + require.Nil(f.t, err) + + w.Write(outBytes) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} + +func ptString(in string) *string { + return &in +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..bea4f358b7b0a12a45d462aa82c52a0b641bca2e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/clients/meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *awsClient) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "AWS Module", + "documentationHref": "https://docs.aws.amazon.com/bedrock/latest/userguide/titan-embedding-models.html", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/config.go new file mode 100644 index 0000000000000000000000000000000000000000..8c098732e360e57a0d8d73394c852a6e6fdb1c94 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/config.go @@ -0,0 +1,48 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modaws + +import ( + "context" + + "github.com/weaviate/weaviate/modules/text2vec-aws/vectorizer" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *AwsModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{ + "vectorizeClassName": vectorizer.DefaultVectorizeClassName, + vectorizer.ServiceProperty: vectorizer.DefaultService, + } +} + +func (m *AwsModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{ + "skip": !vectorizer.DefaultPropertyIndexed, + "vectorizePropertyName": vectorizer.DefaultVectorizePropertyName, + } +} + +func (m *AwsModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := vectorizer.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/ent/vectorization_config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/ent/vectorization_config.go new file mode 100644 index 0000000000000000000000000000000000000000..14507ae5e27d58bf309a6575e73254c3b4703e70 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/ent/vectorization_config.go @@ -0,0 +1,21 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type VectorizationConfig struct { + Service string + Region string + Model string + Endpoint string + TargetModel string + TargetVariant string +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/ent/vectorization_result.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/ent/vectorization_result.go new file mode 100644 index 0000000000000000000000000000000000000000..ba40dffb40d1f2d0bc1507ba7a839452c9485986 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/ent/vectorization_result.go @@ -0,0 +1,18 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type VectorizationResult struct { + Text string + Dimensions int + Vector []float32 +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/module.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/module.go new file mode 100644 index 0000000000000000000000000000000000000000..cd9ca9e9a5ee60698ac032bf5617da243ea98615 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/module.go @@ -0,0 +1,158 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modaws + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/text2vec-aws/clients" + "github.com/weaviate/weaviate/modules/text2vec-aws/vectorizer" + "github.com/weaviate/weaviate/usecases/modulecomponents/additional" + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + "github.com/weaviate/weaviate/usecases/modulecomponents/text2vecbase" +) + +const Name = "text2vec-aws" + +func New() *AwsModule { + return &AwsModule{} +} + +type AwsModule struct { + vectorizer text2vecbase.TextVectorizer[[]float32] + metaProvider text2vecbase.MetaProvider + graphqlProvider modulecapabilities.GraphQLArguments + searcher modulecapabilities.Searcher[[]float32] + nearTextTransformer modulecapabilities.TextTransform + logger logrus.FieldLogger + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +func (m *AwsModule) Name() string { + return "text2vec-aws" +} + +func (m *AwsModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2Vec +} + +func (m *AwsModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, m.logger); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initAdditionalPropertiesProvider(); err != nil { + return errors.Wrap(err, "init additional properties provider") + } + + return nil +} + +func (m *AwsModule) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init graphql provider") + } + return nil +} + +func (m *AwsModule) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + awsAccessKey := m.getAWSAccessKey() + awsSecret := m.getAWSSecretAccessKey() + awsSessionToken := os.Getenv("AWS_SESSION_TOKEN") + client := clients.New(awsAccessKey, awsSecret, awsSessionToken, timeout, logger) + + m.vectorizer = vectorizer.New(client) + m.metaProvider = client + + return nil +} + +func (m *AwsModule) getAWSAccessKey() string { + if os.Getenv("AWS_ACCESS_KEY_ID") != "" { + return os.Getenv("AWS_ACCESS_KEY_ID") + } + return os.Getenv("AWS_ACCESS_KEY") +} + +func (m *AwsModule) getAWSSecretAccessKey() string { + if os.Getenv("AWS_SECRET_ACCESS_KEY") != "" { + return os.Getenv("AWS_SECRET_ACCESS_KEY") + } + return os.Getenv("AWS_SECRET_KEY") +} + +func (m *AwsModule) initAdditionalPropertiesProvider() error { + m.additionalPropertiesProvider = additional.NewText2VecProvider() + return nil +} + +func (m *AwsModule) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + return m.vectorizer.Object(ctx, obj, cfg) +} + +func (m *AwsModule) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + return batch.VectorizeBatch(ctx, objs, skipObject, cfg, m.logger, m.vectorizer.Object) +} + +func (m *AwsModule) MetaInfo() (map[string]interface{}, error) { + return m.metaProvider.MetaInfo() +} + +func (m *AwsModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +func (m *AwsModule) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + return true, nil, nil +} + +func (m *AwsModule) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + return m.vectorizer.Texts(ctx, []string{input}, cfg) +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.Searcher[[]float32](New()) + _ = modulecapabilities.GraphQLArguments(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/nearText.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/nearText.go new file mode 100644 index 0000000000000000000000000000000000000000..0c55c43a1a12af527d2411d3631244c6458e8047 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/nearText.go @@ -0,0 +1,36 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modaws + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +func (m *AwsModule) initNearText() error { + m.searcher = nearText.NewSearcher(m.vectorizer) + m.graphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *AwsModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return m.graphqlProvider.Arguments() +} + +func (m *AwsModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + return m.searcher.VectorSearches() +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/vectorizer/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/vectorizer/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..ed40c9ce98bef5450bf89e5ce210b5ef14bc5bcf --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/vectorizer/class_settings.go @@ -0,0 +1,195 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "fmt" + "strings" + + "github.com/pkg/errors" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + ServiceProperty = "service" + regionProperty = "region" + modelProperty = "model" + endpointProperty = "endpoint" + targetModelProperty = "targetModel" + targetVariantProperty = "targetVariant" +) + +// Default values for service cannot be changed before we solve how old classes +// that have the defaults NOT set will handle the change +const ( + DefaultVectorizeClassName = false + DefaultPropertyIndexed = true + DefaultVectorizePropertyName = false + DefaultService = "bedrock" +) + +var availableAWSServices = []string{ + "bedrock", + "sagemaker", +} + +var availableAWSBedrockModels = []string{ + "amazon.titan-embed-text-v1", + "amazon.titan-embed-text-v2:0", + "cohere.embed-english-v3", + "cohere.embed-multilingual-v3", +} + +type classSettings struct { + basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, BaseClassSettings: *basesettings.NewBaseClassSettings(cfg, false)} +} + +func (ic *classSettings) Validate(class *models.Class) error { + var errorMessages []string + if err := ic.BaseClassSettings.Validate(class); err != nil { + errorMessages = append(errorMessages, err.Error()) + } + + service := ic.Service() + if service == "" || !ic.validatAvailableAWSSetting(service, availableAWSServices) { + errorMessages = append(errorMessages, fmt.Sprintf("wrong %s, available services are: %v", ServiceProperty, availableAWSServices)) + } + region := ic.Region() + if region == "" { + errorMessages = append(errorMessages, fmt.Sprintf("%s cannot be empty", regionProperty)) + } + + if isBedrock(service) { + model := ic.Model() + if model == "" || !ic.validatAvailableAWSSetting(model, availableAWSBedrockModels) { + errorMessages = append(errorMessages, fmt.Sprintf("wrong %s, available models are: %v", modelProperty, availableAWSBedrockModels)) + } + endpoint := ic.Endpoint() + if endpoint != "" { + errorMessages = append(errorMessages, fmt.Sprintf("wrong configuration: %s, not applicable to %s", endpoint, service)) + } + } + + if isSagemaker(service) { + endpoint := ic.Endpoint() + if endpoint == "" { + errorMessages = append(errorMessages, fmt.Sprintf("%s cannot be empty", endpointProperty)) + } + model := ic.Model() + if model != "" { + errorMessages = append(errorMessages, fmt.Sprintf("wrong configuration: %s, not applicable to %s. did you mean %s", modelProperty, service, targetModelProperty)) + } + } + + if len(errorMessages) > 0 { + return fmt.Errorf("%s", strings.Join(errorMessages, ", ")) + } + + err := ic.validateIndexState(class, ic) + if err != nil { + return err + } + + return nil +} + +func (ic *classSettings) validatAvailableAWSSetting(value string, availableValues []string) bool { + for i := range availableValues { + if value == availableValues[i] { + return true + } + } + return false +} + +func (ic *classSettings) getStringProperty(name, defaultValue string) string { + return ic.BaseClassSettings.GetPropertyAsString(name, defaultValue) +} + +func (cv *classSettings) validateIndexState(class *models.Class, settings ClassSettings) error { + if settings.VectorizeClassName() { + // if the user chooses to vectorize the classname, vector-building will + // always be possible, no need to investigate further + + return nil + } + + // search if there is at least one indexed, string/text prop. If found pass + // validation + for _, prop := range class.Properties { + if len(prop.DataType) < 1 { + return errors.Errorf("property %s must have at least one datatype: "+ + "got %v", prop.Name, prop.DataType) + } + + if prop.DataType[0] != string(schema.DataTypeString) && + prop.DataType[0] != string(schema.DataTypeText) { + // we can only vectorize text-like props + continue + } + + if settings.PropertyIndexed(prop.Name) { + // found at least one, this is a valid schema + return nil + } + } + + return fmt.Errorf("invalid properties: didn't find a single property which is " + + "of type string or text and is not excluded from indexing. In addition the " + + "class name is excluded from vectorization as well, meaning that it cannot be " + + "used to determine the vector position. To fix this, set 'vectorizeClassName' " + + "to true if the class name is contextionary-valid. Alternatively add at least " + + "contextionary-valid text/string property which is not excluded from " + + "indexing") +} + +// Aws params +func (ic *classSettings) Service() string { + return ic.getStringProperty(ServiceProperty, DefaultService) +} + +func (ic *classSettings) Region() string { + return ic.getStringProperty(regionProperty, "") +} + +func (ic *classSettings) Model() string { + return ic.getStringProperty(modelProperty, "") +} + +func (ic *classSettings) Endpoint() string { + return ic.getStringProperty(endpointProperty, "") +} + +func (ic *classSettings) TargetModel() string { + return ic.getStringProperty(targetModelProperty, "") +} + +func (ic *classSettings) TargetVariant() string { + return ic.getStringProperty(targetVariantProperty, "") +} + +func isSagemaker(service string) bool { + return service == "sagemaker" +} + +func isBedrock(service string) bool { + return service == "bedrock" +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/vectorizer/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/vectorizer/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..5d0e484361b82d50317871d273f26d777510d975 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/vectorizer/class_settings_test.go @@ -0,0 +1,137 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer_test + +import ( + "testing" + + "github.com/weaviate/weaviate/modules/text2vec-aws/vectorizer" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/moduletools" +) + +func Test_classSettings_Validate(t *testing.T) { + tests := []struct { + name string + cfg moduletools.ClassConfig + wantService string + wantRegion string + wantModel string + wantEndpoint string + wantTargetModel string + wantTargetVariant string + wantErr error + }{ + { + name: "happy flow - Bedrock", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "service": "bedrock", + "region": "us-east-1", + "model": "amazon.titan-embed-text-v1", + }, + }, + wantService: "bedrock", + wantRegion: "us-east-1", + wantModel: "amazon.titan-embed-text-v1", + wantErr: nil, + }, + { + name: "happy flow - SageMaker", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "service": "sagemaker", + "region": "us-east-1", + "endpoint": "my-sagemaker", + }, + }, + wantService: "sagemaker", + wantRegion: "us-east-1", + wantEndpoint: "my-sagemaker", + wantErr: nil, + }, + { + name: "empty service", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "region": "us-east-1", + "model": "amazon.titan-embed-text-v1", + }, + }, + wantService: "bedrock", + wantRegion: "us-east-1", + wantModel: "amazon.titan-embed-text-v1", + }, + { + name: "empty region - Bedrock", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "service": "bedrock", + "model": "amazon.titan-embed-text-v1", + }, + }, + wantErr: errors.Errorf("region cannot be empty"), + }, + { + name: "wrong model", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "service": "bedrock", + "region": "us-west-1", + "model": "wrong-model", + }, + }, + wantErr: errors.Errorf("wrong model, available models are: [amazon.titan-embed-text-v1 amazon.titan-embed-text-v2:0 cohere.embed-english-v3 cohere.embed-multilingual-v3]"), + }, + { + name: "all wrong", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "service": "", + "region": "", + "model": "", + }, + }, + wantErr: errors.Errorf("wrong service, available services are: [bedrock sagemaker], " + + "region cannot be empty"), + }, + { + name: "wrong properties", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "service": "bedrock", + "region": "us-west-1", + "model": "cohere.embed-multilingual-v3", + "properties": []interface{}{"prop1", 1111}, + }, + }, + wantErr: errors.Errorf("properties field value: 1111 must be a string"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := vectorizer.NewClassSettings(tt.cfg) + if tt.wantErr != nil { + assert.EqualError(t, ic.Validate(nil), tt.wantErr.Error()) + } else { + assert.Equal(t, tt.wantService, ic.Service()) + assert.Equal(t, tt.wantRegion, ic.Region()) + assert.Equal(t, tt.wantModel, ic.Model()) + assert.Equal(t, tt.wantEndpoint, ic.Endpoint()) + assert.Equal(t, tt.wantTargetModel, ic.TargetModel()) + assert.Equal(t, tt.wantTargetVariant, ic.TargetVariant()) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/vectorizer/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/vectorizer/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..96712c6c61ed78ff092fbabfe09392a728f01bf3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/vectorizer/fakes_for_test.go @@ -0,0 +1,119 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer_test + +import ( + "context" + + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/text2vec-aws/ent" + "github.com/weaviate/weaviate/usecases/config" +) + +type fakeClient struct { + lastInput []string + lastConfig ent.VectorizationConfig +} + +func (c *fakeClient) Vectorize(ctx context.Context, + text []string, cfg ent.VectorizationConfig, +) (*ent.VectorizationResult, error) { + c.lastInput = text + c.lastConfig = cfg + return &ent.VectorizationResult{ + Vector: []float32{0, 1, 2, 3}, + Dimensions: 4, + Text: text[0], + }, nil +} + +func (c *fakeClient) VectorizeQuery(ctx context.Context, + text []string, cfg ent.VectorizationConfig, +) (*ent.VectorizationResult, error) { + c.lastInput = text + c.lastConfig = cfg + return &ent.VectorizationResult{ + Vector: []float32{0.1, 1.1, 2.1, 3.1}, + Dimensions: 4, + Text: text[0], + }, nil +} + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizeClassName bool + vectorizePropertyName bool + skippedProperty string + excludedProperty string + // module specific settings + service string + region string + model string + endpoint string + targetModel string + targetVariant string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + if len(f.classConfig) > 0 { + return f.classConfig + } + classSettings := map[string]interface{}{ + "vectorizeClassName": f.vectorizeClassName, + "service": f.service, + "region": f.region, + "model": f.model, + "endpoint": f.endpoint, + "targetModel": f.targetModel, + "targetVariant": f.targetVariant, + } + return classSettings +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/vectorizer/objects.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/vectorizer/objects.go new file mode 100644 index 0000000000000000000000000000000000000000..fe902669cff630c285f5aa0b99535c8a0f7eeaaf --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/vectorizer/objects.go @@ -0,0 +1,79 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/text2vec-aws/ent" + objectsvectorizer "github.com/weaviate/weaviate/usecases/modulecomponents/vectorizer" +) + +type Vectorizer struct { + client Client + objectVectorizer *objectsvectorizer.ObjectVectorizer +} + +func New(client Client) *Vectorizer { + return &Vectorizer{ + client: client, + objectVectorizer: objectsvectorizer.New(), + } +} + +type Client interface { + Vectorize(ctx context.Context, input []string, + config ent.VectorizationConfig) (*ent.VectorizationResult, error) + VectorizeQuery(ctx context.Context, input []string, + config ent.VectorizationConfig) (*ent.VectorizationResult, error) +} + +// IndexCheck returns whether a property of a class should be indexed +type ClassSettings interface { + PropertyIndexed(property string) bool + VectorizePropertyName(propertyName string) bool + VectorizeClassName() bool + Service() string + Region() string + Model() string + Endpoint() string + TargetModel() string + TargetVariant() string +} + +func (v *Vectorizer) Object(ctx context.Context, object *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + vec, err := v.object(ctx, object, cfg) + return vec, nil, err +} + +func (v *Vectorizer) object(ctx context.Context, object *models.Object, cfg moduletools.ClassConfig, +) ([]float32, error) { + icheck := NewClassSettings(cfg) + text := v.objectVectorizer.Texts(ctx, object, icheck) + + res, err := v.client.Vectorize(ctx, []string{text}, ent.VectorizationConfig{ + Service: icheck.Service(), + Region: icheck.Region(), + Model: icheck.Model(), + Endpoint: icheck.Endpoint(), + TargetModel: icheck.TargetModel(), + TargetVariant: icheck.TargetVariant(), + }) + if err != nil { + return nil, err + } + + return res.Vector, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/vectorizer/texts.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/vectorizer/texts.go new file mode 100644 index 0000000000000000000000000000000000000000..f83b27f053bba70dbf742705da0af4c604a7dba8 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/vectorizer/texts.go @@ -0,0 +1,44 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/text2vec-aws/ent" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +func (v *Vectorizer) Texts(ctx context.Context, inputs []string, + cfg moduletools.ClassConfig, +) ([]float32, error) { + settings := NewClassSettings(cfg) + vectors := make([][]float32, len(inputs)) + for i := range inputs { + res, err := v.client.VectorizeQuery(ctx, []string{inputs[i]}, ent.VectorizationConfig{ + Service: settings.Service(), + Region: settings.Region(), + Model: settings.Model(), + Endpoint: settings.Endpoint(), + TargetModel: settings.TargetModel(), + TargetVariant: settings.TargetVariant(), + }) + if err != nil { + return nil, errors.Wrap(err, "remote client vectorize") + } + vectors[i] = res.Vector + } + + return libvectorizer.CombineVectors(vectors), nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/vectorizer/texts_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/vectorizer/texts_test.go new file mode 100644 index 0000000000000000000000000000000000000000..e3003f7fb4e9daf0778a05cf63ca2fb2c4aa8990 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-aws/vectorizer/texts_test.go @@ -0,0 +1,102 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer_test + +import ( + "context" + "strings" + "testing" + + "github.com/weaviate/weaviate/modules/text2vec-aws/vectorizer" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// as used in the nearText searcher +func TestVectorizingTexts(t *testing.T) { + type testCase struct { + name string + input []string + expectedClientCall string + expectedService string + expectedRegion string + expectedModel string + } + + tests := []testCase{ + { + name: "single word", + input: []string{"hello"}, + expectedClientCall: "hello", + expectedService: "bedrock", + }, + { + name: "multiple words", + input: []string{"hello world, this is me!"}, + expectedClientCall: "hello world, this is me!", + expectedService: "bedrock", + }, + { + name: "multiple sentences (joined with a dot)", + input: []string{"this is sentence 1", "and here's number 2"}, + expectedClientCall: "and here's number 2", + expectedService: "bedrock", + }, + { + name: "multiple sentences already containing a dot", + input: []string{"this is sentence 1.", "and here's number 2"}, + expectedClientCall: "and here's number 2", + expectedService: "bedrock", + }, + { + name: "multiple sentences already containing a question mark", + input: []string{"this is sentence 1?", "and here's number 2"}, + expectedClientCall: "and here's number 2", + expectedService: "bedrock", + }, + { + name: "multiple sentences already containing an exclamation mark", + input: []string{"this is sentence 1!", "and here's number 2"}, + expectedClientCall: "and here's number 2", + expectedService: "bedrock", + }, + { + name: "multiple sentences already containing comma", + input: []string{"this is sentence 1,", "and here's number 2"}, + expectedClientCall: "and here's number 2", + expectedService: "bedrock", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + client := &fakeClient{} + + v := vectorizer.New(client) + + settings := &fakeClassConfig{ + service: "bedrock", + region: "", + model: "", + } + vec, err := v.Texts(context.Background(), test.input, settings) + + require.Nil(t, err) + assert.Equal(t, []float32{0.1, 1.1, 2.1, 3.1}, vec) + assert.Equal(t, test.expectedClientCall, strings.Join(client.lastInput, ",")) + assert.Equal(t, test.expectedService, client.lastConfig.Service) + assert.Equal(t, test.expectedRegion, client.lastConfig.Region) + assert.Equal(t, test.expectedModel, client.lastConfig.Model) + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-bigram/bigram.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-bigram/bigram.go new file mode 100644 index 0000000000000000000000000000000000000000..4588fee2aca16823671fe8bbf501f9ef5ff120fd --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-bigram/bigram.go @@ -0,0 +1,319 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package t2vbigram + +import ( + "context" + "fmt" + "os" + "regexp" + "strings" + "time" + + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/modulecomponents/additional" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +const Name = "text2vec-bigram" + +func New() *BigramModule { + return &BigramModule{} +} + +type BigramModule struct { + vectors map[string][]float32 + storageProvider moduletools.StorageProvider + GraphqlProvider modulecapabilities.GraphQLArguments + Searcher modulecapabilities.Searcher[[]float32] + NearTextTransformer modulecapabilities.TextTransform + logger logrus.FieldLogger + AdditionalPropertiesProvider modulecapabilities.AdditionalProperties + activeVectoriser string +} + +func (m *BigramModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return map[string]modulecapabilities.GraphQLArgument{} +} + +func (m *BigramModule) Name() string { + return Name +} + +func (m *BigramModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2Vec +} + +func (m *BigramModule) Init(ctx context.Context, params moduletools.ModuleInitParams) error { + m.storageProvider = params.GetStorageProvider() + m.logger = params.GetLogger() + + switch strings.ToLower(os.Getenv("BIGRAM")) { + case "alphabet": + m.activeVectoriser = "alphabet" + case "trigram": + m.activeVectoriser = "trigram" + case "bytepairs": + m.activeVectoriser = "bytepairs" + default: + m.activeVectoriser = "mod26" + } + + return nil +} + +func (m *BigramModule) InitExtension(modules []modulecapabilities.Module) error { + return nil +} + +func (m *BigramModule) InitVectorizer(ctx context.Context, timeout time.Duration, logger logrus.FieldLogger) error { + return nil +} + +func (m *BigramModule) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + return true, []string{}, nil +} + +func (m *BigramModule) InitAdditionalPropertiesProvider() error { + return nil +} + +func (m *BigramModule) VectorizeObject(ctx context.Context, obj *models.Object, cfg moduletools.ClassConfig) ([]float32, models.AdditionalProperties, error) { + var text string + for _, prop := range obj.Properties.(map[string]interface{}) { + text += fmt.Sprintf("%v", prop) + } + vector, error := m.VectorizeInput(ctx, text, cfg) + return vector, nil, error +} + +func (m *BigramModule) MetaInfo() (map[string]interface{}, error) { + return nil, nil +} + +func (m *BigramModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return additional.NewText2VecProvider().AdditionalProperties() +} + +func alphabetOrdinal(letter rune) int { + return int(letter - 'a') +} + +func ord(letter rune) int { + return int(letter) +} + +func stripNonAlphabets(input string) (string, error) { + reg, err := regexp.Compile("[^a-zA-Z]+") + if err != nil { + return "", err + } + return reg.ReplaceAllString(input, ""), nil +} + +func alphabet2Vector(input string) ([]float32, error) { + // Strip everything out of the in that is not a letter + // and convert to lower case + in, err := stripNonAlphabets(input) + if err != nil { + return nil, err + } + in = strings.ToLower(in) + vector := make([]float32, 26*26) + for i := 0; i < len(in)-1; i++ { + first := alphabetOrdinal(rune(in[i])) + second := alphabetOrdinal(rune(in[i+1])) + index := first*26 + second + vector[index] = vector[index] + 1 + } + var sum float32 + for _, v := range vector { + sum += v + } + + for i, v := range vector { + vector[i] = v / sum + } + return vector, nil +} + +// Maybe we should do this for bytes instead of letters? +func mod26Vector(input string) ([]float32, error) { + input = strings.ToLower(input) + vector := make([]float32, 26*26) + for i := 0; i < len(input)-1; i++ { + first := int(input[i]) % 26 + second := int(input[i+1]) % 26 + index := first*26 + second + vector[index] = vector[index] + 1 + } + + return normaliseVector(vector), nil +} + +func normaliseVector(vector []float32) []float32 { + var sum float32 + for _, v := range vector { + sum += v + } + + for i, v := range vector { + vector[i] = v / sum + } + return vector +} + +func trigramVector(input string) ([]float32, error) { + input = strings.ToLower(input) + vector := make([]float32, 26*26*26) + for i := 0; i < len(input)-2; i++ { + first := ord(rune(input[i])) % 26 + second := ord(rune(input[i+1])) % 26 + third := ord(rune(input[i+2])) % 26 + index := first*26*26 + second*26 + third + vector[index] = vector[index] + 1 + } + + return normaliseVector(vector), nil +} + +func bytePairs2Vector(input string) ([]float32, error) { + vector := make([]float32, 256*256) + for i := 0; i < len(input)-1; i++ { + bigram := input[i : i+2] + + index := int(bigram[0]) * int(bigram[1]) + vector[index] = vector[index] + 1 + } + + return normaliseVector(vector[1:]), nil // Max length is 16k-1 +} + +func text2vec(input, activeVectoriser string) ([]float32, error) { + switch activeVectoriser { + case "alphabet": + return alphabet2Vector(input) + case "trigram": + return trigramVector(input) + case "bytepairs": + return bytePairs2Vector(input) + case "mod26": + return mod26Vector(input) + default: + return nil, fmt.Errorf("unsupported vectoriser: %s", activeVectoriser) + } +} + +func (m *BigramModule) VectorizeInput(ctx context.Context, input string, cfg moduletools.ClassConfig) ([]float32, error) { + vector, err := text2vec(input, m.activeVectoriser) + return vector, err +} + +func (m *BigramModule) AddVector(text string, vector []float32) error { + if m.vectors == nil { + m.vectors = map[string][]float32{} + } + m.vectors[text] = vector + return nil +} + +func (m *BigramModule) VectorFromParams(ctx context.Context, params interface{}, className string, findVectorFn modulecapabilities.FindVectorFn[[]float32], cfg moduletools.ClassConfig) ([]float32, error) { + switch thing := params.(type) { + case *nearText.NearTextParams: + return m.Texts(ctx, params.(*nearText.NearTextParams).Values, cfg) + default: + return nil, fmt.Errorf("unsupported params type: %T, %v", params, thing) + } +} + +func (m *BigramModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + vectorSearches := map[string]modulecapabilities.VectorForParams[[]float32]{} + + vectorSearches["nearText"] = &vectorForParams{m.VectorFromParams} + return vectorSearches +} + +func (m *BigramModule) Texts(ctx context.Context, inputs []string, cfg moduletools.ClassConfig) ([]float32, error) { + var vectors [][]float32 + for _, input := range inputs { + vector, err := text2vec(input, m.activeVectoriser) + if err != nil { + return nil, err + } + vectors = append(vectors, vector) + } + return libvectorizer.CombineVectors(vectors), nil +} + +func (m *BigramModule) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + var ( + vectors [][]float32 + errors = map[int]error{} + ) + for i, obj := range objs { + if skipObject[i] { + continue + } + vector, _, err := m.VectorizeObject(ctx, obj, cfg) + if err != nil { + errors[i] = err + } + vectors = append(vectors, vector) + } + return vectors, nil, errors +} + +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) + +func (m *BigramModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{ + "vectorizeClassName": true, + } +} + +func (m *BigramModule) PropertyConfigDefaults(dt *schema.DataType) map[string]interface{} { + return map[string]interface{}{ + "skip": false, + "vectorizePropertyName": true, + } +} + +func (m *BigramModule) ValidateClass(ctx context.Context, class *models.Class, cfg moduletools.ClassConfig) error { + return nil +} + +var _ = modulecapabilities.ClassConfigurator(New()) + +type vectorForParams struct { + fn func(ctx context.Context, + params interface{}, className string, + findVectorFn modulecapabilities.FindVectorFn[[]float32], + cfg moduletools.ClassConfig, + ) ([]float32, error) +} + +func (v *vectorForParams) VectorForParams(ctx context.Context, params interface{}, className string, + findVectorFn modulecapabilities.FindVectorFn[[]float32], + cfg moduletools.ClassConfig, +) ([]float32, error) { + return v.fn(ctx, params, className, findVectorFn, cfg) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-bigram/bigram_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-bigram/bigram_test.go new file mode 100644 index 0000000000000000000000000000000000000000..4761a3e99e511eb61a555ccbbcc3490dbfaa19d2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-bigram/bigram_test.go @@ -0,0 +1,145 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package t2vbigram + +import ( + "context" + "testing" + + "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/usecases/modules" +) + +func TestBigramModule_Name(t *testing.T) { + mod := New() + assert.Equal(t, Name, mod.Name()) +} + +func TestBigramModule_Type(t *testing.T) { + mod := New() + assert.Equal(t, modulecapabilities.Text2Vec, mod.Type()) +} + +func TestBigramModule_Init(t *testing.T) { + t.Setenv("BIGRAM", "alphabet") + mod := New() + params := moduletools.NewMockModuleInitParams(t) + params.EXPECT().GetLogger().Return(logrus.New()) + params.EXPECT().GetStorageProvider().Return(&fakeStorageProvider{dataPath: t.TempDir()}) + err := mod.Init(context.Background(), params) + assert.NoError(t, err) + assert.Equal(t, "alphabet", mod.activeVectoriser) +} + +type fakeStorageProvider struct { + dataPath string +} + +func (f *fakeStorageProvider) Storage(name string) (moduletools.Storage, error) { + return nil, nil +} + +func (f *fakeStorageProvider) DataPath() string { + return f.dataPath +} + +func TestBigramModule_VectorizeInput(t *testing.T) { + mod := New() + mod.activeVectoriser = "alphabet" + input := "hello world" + expectedVector, _ := alphabet2Vector(input) + cfg := modules.NewClassBasedModuleConfig(&models.Class{}, mod.Name(), "", "", nil) + vector, err := mod.VectorizeInput(context.Background(), input, cfg) + assert.NoError(t, err) + assert.Equal(t, expectedVector, vector) +} + +func TestText2Vec(t *testing.T) { + input := "hello world" + activeVectoriser := "alphabet" + expectedVector, _ := alphabet2Vector(input) + vector, err := text2vec(input, activeVectoriser) + assert.NoError(t, err) + assert.Equal(t, expectedVector, vector) + + activeVectoriser = "trigram" + expectedVector, _ = trigramVector(input) + vector, err = text2vec(input, activeVectoriser) + assert.NoError(t, err) + assert.Equal(t, expectedVector, vector) + + activeVectoriser = "bytepairs" + expectedVector, _ = bytePairs2Vector(input) + vector, err = text2vec(input, activeVectoriser) + assert.NoError(t, err) + assert.Equal(t, expectedVector, vector) + + activeVectoriser = "mod26" + expectedVector, _ = mod26Vector(input) + vector, err = text2vec(input, activeVectoriser) + assert.NoError(t, err) + assert.Equal(t, expectedVector, vector) +} + +func TestAlphabet2Vector(t *testing.T) { + input := "hello world" + vector, err := alphabet2Vector(input) + assert.NoError(t, err) + assert.NotNil(t, vector) + assert.Equal(t, 26*26, len(vector)) +} + +func TestMod26Vector(t *testing.T) { + input := "hello world" + vector, err := mod26Vector(input) + assert.NoError(t, err) + assert.NotNil(t, vector) + assert.Equal(t, 26*26, len(vector)) +} + +func TestTrigramVector(t *testing.T) { + input := "hello world" + vector, err := trigramVector(input) + assert.NoError(t, err) + assert.NotNil(t, vector) + assert.Equal(t, 26*26*26, len(vector)) +} + +func TestBytePairs2Vector(t *testing.T) { + input := "hello world" + vector, err := bytePairs2Vector(input) + assert.NoError(t, err) + assert.NotNil(t, vector) + assert.Equal(t, 256*256-1, len(vector)) +} + +func TestStripNonAlphabets(t *testing.T) { + input := "hello, world!" + expected := "helloworld" + output, err := stripNonAlphabets(input) + require.NoError(t, err) + assert.Equal(t, expected, output) +} + +func TestAddVector(t *testing.T) { + mod := New() + vector := []float32{1, 2, 3} + err := mod.AddVector("hello", vector) + assert.NoError(t, err) + assert.Equal(t, vector, mod.vectors["hello"]) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/clients/cohere.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/clients/cohere.go new file mode 100644 index 0000000000000000000000000000000000000000..19ca6f83ff5f103475dc847168819932b3fb6476 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/clients/cohere.go @@ -0,0 +1,70 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "time" + + "github.com/weaviate/weaviate/entities/moduletools" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/cohere" + + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/text2vec-cohere/ent" +) + +type vectorizer struct { + client *cohere.Client + logger logrus.FieldLogger +} + +func New(apiKey string, timeout time.Duration, logger logrus.FieldLogger) *vectorizer { + return &vectorizer{ + client: cohere.New(apiKey, timeout, logger), + logger: logger, + } +} + +func (v *vectorizer) Vectorize(ctx context.Context, input []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], *modulecomponents.RateLimits, int, error) { + icheck := ent.NewClassSettings(cfg) + res, err := v.client.Vectorize(ctx, input, cohere.Settings{ + Model: icheck.Model(), + BaseURL: icheck.BaseURL(), + Truncate: icheck.Truncate(), + InputType: cohere.SearchDocument, + }) + return res, nil, 0, err +} + +func (v *vectorizer) VectorizeQuery(ctx context.Context, input []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], error) { + icheck := ent.NewClassSettings(cfg) + return v.client.Vectorize(ctx, input, cohere.Settings{ + Model: icheck.Model(), + BaseURL: icheck.BaseURL(), + Truncate: icheck.Truncate(), + InputType: cohere.SearchQuery, + }) +} + +func (v *vectorizer) GetApiKeyHash(ctx context.Context, config moduletools.ClassConfig) [32]byte { + return v.client.GetApiKeyHash(ctx, config) +} + +func (v *vectorizer) GetVectorizerRateLimit(ctx context.Context, cfg moduletools.ClassConfig) *modulecomponents.RateLimits { + return v.client.GetVectorizerRateLimit(ctx, cfg) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/clients/cohere_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/clients/cohere_test.go new file mode 100644 index 0000000000000000000000000000000000000000..105da1f812727b4d54bb393336ca2b50642827e9 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/clients/cohere_test.go @@ -0,0 +1,266 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/modulecomponents" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/cohere" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestClient(t *testing.T) { + t.Run("when all is fine", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + client: cohere.New("apiKey", 1*time.Minute, nullLogger()), + logger: nullLogger(), + } + cfg := fakeClassConfig{classConfig: map[string]interface{}{"Model": "large", "baseURL": server.URL}} + expected := &modulecomponents.VectorizationResult[[]float32]{ + Text: []string{"This is my text"}, + Vector: [][]float32{{0.1, 0.2, 0.3}}, + Dimensions: 3, + } + res, _, _, err := c.Vectorize(context.Background(), []string{"This is my text"}, cfg) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when the context is expired", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + client: cohere.New("apiKey", 1*time.Minute, nullLogger()), + logger: nullLogger(), + } + cfg := fakeClassConfig{classConfig: map[string]interface{}{"Model": "large", "baseURL": server.URL}} + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, _, _, err := c.Vectorize(ctx, []string{"This is my text"}, cfg) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "context deadline exceeded") + }) + + t.Run("when the server returns an error", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{ + t: t, + serverError: errors.Errorf("nope, not gonna happen"), + }) + defer server.Close() + cfg := fakeClassConfig{classConfig: map[string]interface{}{"Model": "large", "baseURL": server.URL}} + c := &vectorizer{ + client: cohere.New("apiKey", 1*time.Minute, nullLogger()), + logger: nullLogger(), + } + _, _, _, err := c.Vectorize(context.Background(), []string{"This is my text"}, cfg) + + require.NotNil(t, err) + assert.Equal(t, err.Error(), "connection to Cohere failed with status: 500 error: nope, not gonna happen") + }) + + t.Run("when Cohere key is passed using X-Cohere-Api-Key header", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + client: cohere.New("apiKey", 1*time.Minute, nullLogger()), + logger: nullLogger(), + } + cfg := fakeClassConfig{classConfig: map[string]interface{}{"Model": "large", "baseURL": server.URL}} + + ctxWithValue := context.WithValue(context.Background(), + "X-Cohere-Api-Key", []string{"some-key"}) + + expected := &modulecomponents.VectorizationResult[[]float32]{ + Text: []string{"This is my text"}, + Vector: [][]float32{{0.1, 0.2, 0.3}}, + Dimensions: 3, + } + res, _, _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, cfg) + + require.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when Cohere key is empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + client: cohere.New("", 1*time.Minute, nullLogger()), + logger: nullLogger(), + } + cfg := fakeClassConfig{classConfig: map[string]interface{}{"Model": "large", "baseURL": server.URL}} + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, _, _, err := c.Vectorize(ctx, []string{"This is my text"}, cfg) + + require.NotNil(t, err) + assert.Equal(t, err.Error(), "Cohere API Key: no api key found "+ + "neither in request header: X-Cohere-Api-Key "+ + "nor in environment variable under COHERE_APIKEY") + }) + + t.Run("when X-Cohere-Api-Key header is passed but empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + client: cohere.New("", 1*time.Minute, nullLogger()), + logger: nullLogger(), + } + cfg := fakeClassConfig{classConfig: map[string]interface{}{"Model": "large", "baseURL": server.URL}} + + ctxWithValue := context.WithValue(context.Background(), + "X-Cohere-Api-Key", []string{""}) + + _, _, _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, cfg) + + require.NotNil(t, err) + assert.Equal(t, err.Error(), "Cohere API Key: no api key found "+ + "neither in request header: X-Cohere-Api-Key "+ + "nor in environment variable under COHERE_APIKEY") + }) + + t.Run("pass rate limit headers requests", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + client: cohere.New("apiKey", 1*time.Minute, nullLogger()), + logger: nullLogger(), + } + + ctxWithValue := context.WithValue(context.Background(), + "X-Cohere-Ratelimit-RequestPM-Embedding", []string{"50"}) + + rl := c.GetVectorizerRateLimit(ctxWithValue, fakeClassConfig{classConfig: map[string]interface{}{}}) + assert.Equal(t, 50, rl.LimitRequests) + assert.Equal(t, 50, rl.RemainingRequests) + }) +} + +type fakeHandler struct { + t *testing.T + serverError error +} + +func (f *fakeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.serverError != nil { + embeddingError := map[string]interface{}{ + "message": f.serverError.Error(), + "type": "invalid_request_error", + } + embeddingResponse := map[string]interface{}{ + "message": embeddingError["message"], + } + outBytes, err := json.Marshal(embeddingResponse) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b map[string]interface{} + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + textInput := b["texts"].([]interface{}) + assert.Greater(f.t, len(textInput), 0) + + embeddingResponse := map[string]interface{}{ + "embeddings": map[string]interface{}{ + "float": [][]float32{{0.1, 0.2, 0.3}}, + }, + } + outBytes, err := json.Marshal(embeddingResponse) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizePropertyName bool + skippedProperty string + excludedProperty string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..c40f5c9ea690d7bbdfdb92474cd695fed969f87c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/clients/meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *vectorizer) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Cohere Module", + "documentationHref": "https://docs.cohere.ai/embedding-wiki/", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/config.go new file mode 100644 index 0000000000000000000000000000000000000000..f01b59e07c901191129794df355555780736e30a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/config.go @@ -0,0 +1,50 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modcohere + +import ( + "context" + + "github.com/weaviate/weaviate/modules/text2vec-cohere/ent" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *CohereModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{ + "vectorizeClassName": ent.DefaultVectorizeClassName, + "model": ent.DefaultCohereModel, + "truncate": ent.DefaultTruncate, + "baseUrl": ent.DefaultBaseURL, + } +} + +func (m *CohereModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{ + "skip": !ent.DefaultPropertyIndexed, + "vectorizePropertyName": ent.DefaultVectorizePropertyName, + } +} + +func (m *CohereModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := ent.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/ent/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/ent/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..c1957771a4a7516883085fe05fa6adf55f2fd8ee --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/ent/class_settings.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + // Default values for model, baseURL and truncate cannot be changed before we solve how old classes + // that have the defaults NOT set will handle the change + DefaultBaseURL = "https://api.cohere.ai" + DefaultCohereModel = "embed-multilingual-v3.0" + DefaultTruncate = "END" + DefaultVectorizeClassName = true + DefaultPropertyIndexed = true + DefaultVectorizePropertyName = false + LowerCaseInput = false +) + +var availableTruncates = []string{"NONE", "START", "END", "LEFT", "RIGHT"} + +type classSettings struct { + basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, BaseClassSettings: *basesettings.NewBaseClassSettings(cfg, LowerCaseInput)} +} + +func (cs *classSettings) Model() string { + return cs.BaseClassSettings.GetPropertyAsString("model", DefaultCohereModel) +} + +func (cs *classSettings) Truncate() string { + return cs.BaseClassSettings.GetPropertyAsString("truncate", DefaultTruncate) +} + +func (cs *classSettings) BaseURL() string { + return cs.BaseClassSettings.GetPropertyAsString("baseURL", DefaultBaseURL) +} + +func (cs *classSettings) Validate(class *models.Class) error { + if err := cs.BaseClassSettings.Validate(class); err != nil { + return err + } + + truncate := cs.Truncate() + if !basesettings.ValidateSetting(truncate, availableTruncates) { + return errors.Errorf("wrong truncate type, available types are: %v", availableTruncates) + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/module.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/module.go new file mode 100644 index 0000000000000000000000000000000000000000..e943d2bb2bc52b554222b2e896f2f707ff6030f0 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/module.go @@ -0,0 +1,157 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modcohere + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/text2vec-cohere/clients" + "github.com/weaviate/weaviate/modules/text2vec-cohere/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents/additional" + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + "github.com/weaviate/weaviate/usecases/modulecomponents/text2vecbase" +) + +const Name = "text2vec-cohere" + +var batchSettings = batch.Settings{ + TokenMultiplier: 0, // no token limit + MaxTimePerBatch: float64(10), + MaxObjectsPerBatch: 96, // https://docs.cohere.com/reference/embed + MaxTokensPerBatch: func(cfg moduletools.ClassConfig) int { return 500000 }, // there does not seem to be a limit + HasTokenLimit: false, + ReturnsRateLimit: false, +} + +func New() *CohereModule { + return &CohereModule{} +} + +type CohereModule struct { + vectorizer text2vecbase.TextVectorizerBatch[[]float32] + metaProvider text2vecbase.MetaProvider + graphqlProvider modulecapabilities.GraphQLArguments + searcher modulecapabilities.Searcher[[]float32] + nearTextTransformer modulecapabilities.TextTransform + logger logrus.FieldLogger + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +func (m *CohereModule) Name() string { + return Name +} + +func (m *CohereModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2ManyVec +} + +func (m *CohereModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, m.logger); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initAdditionalPropertiesProvider(); err != nil { + return errors.Wrap(err, "init additional properties provider") + } + + return nil +} + +func (m *CohereModule) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init graphql provider") + } + return nil +} + +func (m *CohereModule) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("COHERE_APIKEY") + client := clients.New(apiKey, timeout, logger) + + m.vectorizer = text2vecbase.New(client, + batch.NewBatchVectorizer(client, 50*time.Second, batchSettings, logger, m.Name()), + batch.ReturnBatchTokenizer(batchSettings.TokenMultiplier, m.Name(), ent.LowerCaseInput), + ) + m.metaProvider = client + + return nil +} + +func (m *CohereModule) initAdditionalPropertiesProvider() error { + m.additionalPropertiesProvider = additional.NewText2VecProvider() + return nil +} + +func (m *CohereModule) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + return m.vectorizer.Object(ctx, obj, cfg, ent.NewClassSettings(cfg)) +} + +func (m *CohereModule) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + vecs, errs := m.vectorizer.ObjectBatch(ctx, objs, skipObject, cfg) + + return vecs, nil, errs +} + +func (m *CohereModule) MetaInfo() (map[string]interface{}, error) { + return m.metaProvider.MetaInfo() +} + +func (m *CohereModule) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + return true, nil, nil +} + +func (m *CohereModule) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + return m.vectorizer.Texts(ctx, []string{input}, cfg) +} + +func (m *CohereModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.Searcher[[]float32](New()) + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.InputVectorizer[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/nearText.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/nearText.go new file mode 100644 index 0000000000000000000000000000000000000000..76484c2a5a5cf38ec2a1ede66c3f81c235fd6a81 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/nearText.go @@ -0,0 +1,36 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modcohere + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +func (m *CohereModule) initNearText() error { + m.searcher = nearText.NewSearcher(m.vectorizer) + m.graphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *CohereModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return m.graphqlProvider.Arguments() +} + +func (m *CohereModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + return m.searcher.VectorSearches() +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/vectorizer/batch_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/vectorizer/batch_test.go new file mode 100644 index 0000000000000000000000000000000000000000..cebb62a63ffa754afc5e9e10abf8da4ef772084a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/vectorizer/batch_test.go @@ -0,0 +1,105 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + "github.com/weaviate/weaviate/usecases/modulecomponents/text2vecbase" + + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" +) + +func TestBatch(t *testing.T) { + client := &fakeBatchClient{} + cfg := &fakeClassConfig{vectorizePropertyName: false, classConfig: map[string]interface{}{"vectorizeClassName": false}} + logger, _ := test.NewNullLogger() + cases := []struct { + name string + objects []*models.Object + skip []bool + wantErrors map[int]error + deadline time.Duration + }{ + {name: "skip all", objects: []*models.Object{{Class: "Car"}}, skip: []bool{true}}, + {name: "skip first", objects: []*models.Object{{Class: "Car"}, {Class: "Car", Properties: map[string]interface{}{"test": "test"}}}, skip: []bool{true, false}}, + {name: "one object errors", objects: []*models.Object{{Class: "Car", Properties: map[string]interface{}{"test": "test"}}, {Class: "Car", Properties: map[string]interface{}{"test": "error something"}}}, skip: []bool{false, false}, wantErrors: map[int]error{1: fmt.Errorf("something")}}, + {name: "first object errors", objects: []*models.Object{{Class: "Car", Properties: map[string]interface{}{"test": "error something"}}, {Class: "Car", Properties: map[string]interface{}{"test": "test"}}}, skip: []bool{false, false}, wantErrors: map[int]error{0: fmt.Errorf("something")}}, + {name: "vectorize all", objects: []*models.Object{{Class: "Car", Properties: map[string]interface{}{"test": "test"}}, {Class: "Car", Properties: map[string]interface{}{"test": "something"}}}, skip: []bool{false, false}}, + {name: "multiple vectorizer batches", objects: []*models.Object{ + {Class: "Car", Properties: map[string]interface{}{"test": "tokens 25"}}, // set limit so next 3 objects are one batch + {Class: "Car", Properties: map[string]interface{}{"test": "first object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "second object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "third object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "first object second batch"}}, // rate is 100 again + {Class: "Car", Properties: map[string]interface{}{"test": "second object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "third object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "fourth object second batch"}}, + }, skip: []bool{false, false, false, false, false, false, false, false}}, + {name: "multiple vectorizer batches with skips and errors", objects: []*models.Object{ + {Class: "Car", Properties: map[string]interface{}{"test": "tokens 25"}}, // set limit so next 3 objects are one batch + {Class: "Car", Properties: map[string]interface{}{"test": "first object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "second object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "error something"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "first object second batch"}}, // rate is 100 again + {Class: "Car", Properties: map[string]interface{}{"test": "second object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "third object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "fourth object second batch"}}, + }, skip: []bool{false, true, false, false, false, true, false, false}, wantErrors: map[int]error{3: fmt.Errorf("something")}}, + {name: "skip last item", objects: []*models.Object{ + {Class: "Car", Properties: map[string]interface{}{"test": "fir test object"}}, // set limit + {Class: "Car", Properties: map[string]interface{}{"test": "first object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "second object first batch"}}, + }, skip: []bool{false, false, true}}, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + v := text2vecbase.New(client, + batch.NewBatchVectorizer( + client, 50*time.Second, + batch.Settings{MaxObjectsPerBatch: 100, MaxTokensPerBatch: func(cfg moduletools.ClassConfig) int { return 500000 }, MaxTimePerBatch: 10}, + logger, "test"), + batch.ReturnBatchTokenizer(0, "", false), + ) + deadline := time.Now().Add(10 * time.Second) + if tt.deadline != 0 { + deadline = time.Now().Add(tt.deadline) + } + + ctx, cancl := context.WithDeadline(context.Background(), deadline) + vecs, errs := v.ObjectBatch( + ctx, tt.objects, tt.skip, cfg, + ) + + require.Len(t, errs, len(tt.wantErrors)) + require.Len(t, vecs, len(tt.objects)) + + for i := range tt.objects { + if tt.wantErrors[i] != nil { + require.Equal(t, tt.wantErrors[i], errs[i]) + } else if tt.skip[i] { + require.Nil(t, vecs[i]) + } else { + require.NotNil(t, vecs[i]) + } + } + cancl() + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/vectorizer/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/vectorizer/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..0d694db0190d5a74e574882de8b1e00770891b27 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-cohere/vectorizer/fakes_for_test.go @@ -0,0 +1,163 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "fmt" + "strconv" + "strings" + "time" + + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +type fakeBatchClient struct { + defaultResetRate int +} + +func (c *fakeBatchClient) Vectorize(ctx context.Context, + text []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], *modulecomponents.RateLimits, int, error) { + if c.defaultResetRate == 0 { + c.defaultResetRate = 60 + } + + vectors := make([][]float32, len(text)) + errors := make([]error, len(text)) + rateLimit := &modulecomponents.RateLimits{RemainingTokens: 100, RemainingRequests: 100, LimitTokens: 200, ResetTokens: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second), ResetRequests: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second)} + for i := range text { + if len(text[i]) >= len("error ") && text[i][:6] == "error " { + errors[i] = fmt.Errorf("%s", text[i][6:]) + continue + } + + req := len("requests ") + if len(text[i]) >= req && text[i][:req] == "requests " { + reqs, _ := strconv.Atoi(strings.Split(text[i][req:], " ")[0]) + rateLimit.RemainingRequests = reqs + rateLimit.LimitRequests = 2 * reqs + } + + if len(text[i]) >= len("wait ") && text[i][:5] == "wait " { + wait, _ := strconv.Atoi(text[i][5:]) + time.Sleep(time.Duration(wait) * time.Millisecond) + } + vectors[i] = []float32{0, 1, 2, 3} + } + + return &modulecomponents.VectorizationResult[[]float32]{ + Vector: vectors, + Dimensions: 4, + Text: text, + Errors: errors, + }, rateLimit, 0, nil +} + +func (c *fakeBatchClient) VectorizeQuery(ctx context.Context, + text []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], error) { + return &modulecomponents.VectorizationResult[[]float32]{ + Vector: [][]float32{{0.1, 1.1, 2.1, 3.1}}, + Dimensions: 4, + Text: text, + }, nil +} + +func (c *fakeBatchClient) GetVectorizerRateLimit(ctx context.Context, cfg moduletools.ClassConfig) *modulecomponents.RateLimits { + return &modulecomponents.RateLimits{RemainingTokens: 100, RemainingRequests: 100, LimitTokens: 200, ResetTokens: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second), ResetRequests: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second)} +} + +func (c *fakeBatchClient) GetApiKeyHash(ctx context.Context, cfg moduletools.ClassConfig) [32]byte { + return [32]byte{} +} + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizeClassName bool + vectorizePropertyName bool + skippedProperty string + excludedProperty string + // module specific settings + cohereModel string + truncateType string + baseURL string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + classSettings := map[string]interface{}{ + "vectorizeClassName": f.vectorizeClassName, + "model": f.cohereModel, + "truncate": f.truncateType, + "baseURL": f.baseURL, + } + return classSettings +} + +func (f fakeClassConfig) PropertyIndexed(property string) bool { + return !((property == f.skippedProperty) || (property == f.excludedProperty)) +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) VectorizeClassName() bool { + return f.classConfig["vectorizeClassName"].(bool) +} + +func (f fakeClassConfig) VectorizePropertyName(propertyName string) bool { + return f.vectorizePropertyName +} + +func (f fakeClassConfig) Properties() []string { + return nil +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/additional_arguments.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/additional_arguments.go new file mode 100644 index 0000000000000000000000000000000000000000..3c2f48dcaf20ec756dbe9da6cc97177d6f8bad3f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/additional_arguments.go @@ -0,0 +1,106 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package additional + +import ( + "fmt" + + "github.com/tailor-inc/graphql" +) + +func additionalNearestNeighborsField(classname string) *graphql.Field { + return &graphql.Field{ + Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%sAdditionalNearestNeighbors", classname), + Fields: graphql.Fields{ + "neighbors": &graphql.Field{Type: graphql.NewList(graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%sAdditionalNearestNeighborsNeighbor", classname), + Fields: graphql.Fields{ + "concept": &graphql.Field{Type: graphql.String}, + "distance": &graphql.Field{Type: graphql.Float}, + }, + }))}, + }, + }), + } +} + +func additionalFeatureProjectionField(classname string) *graphql.Field { + return &graphql.Field{ + Args: graphql.FieldConfigArgument{ + "algorithm": &graphql.ArgumentConfig{ + Type: graphql.String, + DefaultValue: nil, + }, + "dimensions": &graphql.ArgumentConfig{ + Type: graphql.Int, + DefaultValue: nil, + }, + "learningRate": &graphql.ArgumentConfig{ + Type: graphql.Int, + DefaultValue: nil, + }, + "iterations": &graphql.ArgumentConfig{ + Type: graphql.Int, + DefaultValue: nil, + }, + "perplexity": &graphql.ArgumentConfig{ + Type: graphql.Int, + DefaultValue: nil, + }, + }, + Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%sAdditionalFeatureProjection", classname), + Fields: graphql.Fields{ + "vector": &graphql.Field{Type: graphql.NewList(graphql.Float)}, + }, + }), + } +} + +func additionalSemanticPathField(classname string) *graphql.Field { + return &graphql.Field{ + Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%sAdditionalSemanticPath", classname), + Fields: graphql.Fields{ + "path": &graphql.Field{Type: graphql.NewList(graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%sAdditionalSemanticPathElement", classname), + Fields: graphql.Fields{ + "concept": &graphql.Field{Type: graphql.String}, + "distanceToQuery": &graphql.Field{Type: graphql.Float}, + "distanceToResult": &graphql.Field{Type: graphql.Float}, + "distanceToNext": &graphql.Field{Type: graphql.Float}, + "distanceToPrevious": &graphql.Field{Type: graphql.Float}, + }, + }))}, + }, + }), + } +} + +func additionalInterpretationField(classname string) *graphql.Field { + return &graphql.Field{ + Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%sAdditionalInterpretation", classname), + Fields: graphql.Fields{ + "source": &graphql.Field{Type: graphql.NewList(graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%sAdditionalInterpretationSource", classname), + Fields: graphql.Fields{ + "concept": &graphql.Field{Type: graphql.String}, + "weight": &graphql.Field{Type: graphql.Float}, + "occurrence": &graphql.Field{Type: graphql.Int}, + }, + }))}, + }, + }), + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/additional_arguments_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/additional_arguments_test.go new file mode 100644 index 0000000000000000000000000000000000000000..a06a67e8b0573e0cc48df09c90c7d8f2a86c242c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/additional_arguments_test.go @@ -0,0 +1,163 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package additional + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/tailor-inc/graphql" +) + +func TestNearestNeighborsField(t *testing.T) { + t.Run("should generate nearestNeighbors argument properly", func(t *testing.T) { + // given + classname := "Class" + + // when + nearestNeighbors := additionalNearestNeighborsField(classname) + + // then + // the built graphQL field needs to support this structure: + // Type: { + // neighbors: { + // concept: "c1", + // distance: 0.8 + // } + // } + assert.NotNil(t, nearestNeighbors) + assert.Equal(t, "ClassAdditionalNearestNeighbors", nearestNeighbors.Type.Name()) + assert.NotNil(t, nearestNeighbors.Type) + nearestNeighborsObject, nearestNeighborsObjectOK := nearestNeighbors.Type.(*graphql.Object) + assert.True(t, nearestNeighborsObjectOK) + assert.Equal(t, 1, len(nearestNeighborsObject.Fields())) + neighbors, neighborsOK := nearestNeighborsObject.Fields()["neighbors"] + assert.True(t, neighborsOK) + neighborsList, neighborsListOK := neighbors.Type.(*graphql.List) + assert.True(t, neighborsListOK) + neighborsListObjects, neighborsListObjectsOK := neighborsList.OfType.(*graphql.Object) + assert.True(t, neighborsListObjectsOK) + assert.Equal(t, 2, len(neighborsListObjects.Fields())) + assert.NotNil(t, neighborsListObjects.Fields()["concept"]) + assert.NotNil(t, neighborsListObjects.Fields()["distance"]) + }) +} + +func TestFeatureProjectionField(t *testing.T) { + t.Run("should generate featureProjection argument properly", func(t *testing.T) { + // given + classname := "Class" + + // when + featureProjection := additionalFeatureProjectionField(classname) + + // then + // the built graphQL field needs to support this structure: + // Args: { + // algorithm: "a", + // dimensions: 1, + // learningRate: 2, + // iterations: 3, + // perplexity: 4 + // } + // Type: { + // vector: [0, 1] + // } + assert.NotNil(t, featureProjection) + assert.Equal(t, "ClassAdditionalFeatureProjection", featureProjection.Type.Name()) + assert.NotNil(t, featureProjection.Args) + assert.Equal(t, 5, len(featureProjection.Args)) + assert.NotNil(t, featureProjection.Args["algorithm"]) + assert.NotNil(t, featureProjection.Args["dimensions"]) + assert.NotNil(t, featureProjection.Args["learningRate"]) + assert.NotNil(t, featureProjection.Args["iterations"]) + assert.NotNil(t, featureProjection.Args["perplexity"]) + featureProjectionObject, featureProjectionObjectOK := featureProjection.Type.(*graphql.Object) + assert.True(t, featureProjectionObjectOK) + assert.Equal(t, 1, len(featureProjectionObject.Fields())) + assert.NotNil(t, featureProjectionObject.Fields()["vector"]) + }) +} + +func TestSemanticPathField(t *testing.T) { + t.Run("should generate semanticPath argument properly", func(t *testing.T) { + // given + classname := "Class" + + // when + semanticPath := additionalSemanticPathField(classname) + + // then + // the built graphQL field needs to support this structure: + // Type: { + // path: [ + // { + // concept: "c1", + // distanceToQuery: 0.1, + // distanceToResult: 0.2, + // distanceToNext: 0.3, + // distanceToPrevious: 0.4, + // } + // } + assert.NotNil(t, semanticPath) + assert.Equal(t, "ClassAdditionalSemanticPath", semanticPath.Type.Name()) + semanticPathObject, semanticPathObjectOK := semanticPath.Type.(*graphql.Object) + assert.True(t, semanticPathObjectOK) + assert.Equal(t, 1, len(semanticPathObject.Fields())) + assert.NotNil(t, semanticPathObject.Fields()["path"]) + semanticPathObjectList, semanticPathObjectListOK := semanticPathObject.Fields()["path"].Type.(*graphql.List) + assert.True(t, semanticPathObjectListOK) + semanticPathObjectListObjects, semanticPathObjectListOK := semanticPathObjectList.OfType.(*graphql.Object) + assert.True(t, semanticPathObjectListOK) + assert.Equal(t, 5, len(semanticPathObjectListObjects.Fields())) + assert.NotNil(t, semanticPathObjectListObjects.Fields()["concept"]) + assert.NotNil(t, semanticPathObjectListObjects.Fields()["distanceToQuery"]) + assert.NotNil(t, semanticPathObjectListObjects.Fields()["distanceToResult"]) + assert.NotNil(t, semanticPathObjectListObjects.Fields()["distanceToNext"]) + assert.NotNil(t, semanticPathObjectListObjects.Fields()["distanceToPrevious"]) + }) +} + +func TestNearestInterpretationField(t *testing.T) { + t.Run("should generate interpretation argument properly", func(t *testing.T) { + // given + classname := "Class" + + // when + interpretation := additionalInterpretationField(classname) + + // then + // the built graphQL field needs to support this structure: + // Type: { + // source: [ + // { + // concept: "c1", + // weight: 0.1, + // occurrence: 0.2, + // } + // } + assert.NotNil(t, interpretation) + assert.Equal(t, "ClassAdditionalInterpretation", interpretation.Type.Name()) + interpretationObject, interpretationObjectOK := interpretation.Type.(*graphql.Object) + assert.True(t, interpretationObjectOK) + assert.Equal(t, 1, len(interpretationObject.Fields())) + assert.NotNil(t, interpretationObject.Fields()["source"]) + interpretationObjectList, interpretationObjectListOK := interpretationObject.Fields()["source"].Type.(*graphql.List) + assert.True(t, interpretationObjectListOK) + interpretationObjectListObjects, interpretationObjectListObjectsOK := interpretationObjectList.OfType.(*graphql.Object) + assert.True(t, interpretationObjectListObjectsOK) + assert.Equal(t, 3, len(interpretationObjectListObjects.Fields())) + assert.NotNil(t, interpretationObjectListObjects.Fields()["concept"]) + assert.NotNil(t, interpretationObjectListObjects.Fields()["weight"]) + assert.NotNil(t, interpretationObjectListObjects.Fields()["occurrence"]) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/interpretation/interpretation.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/interpretation/interpretation.go new file mode 100644 index 0000000000000000000000000000000000000000..4783cf12776c7c6d4572e8ab0adec0e922516990 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/interpretation/interpretation.go @@ -0,0 +1,48 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package interpretation + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/search" +) + +type Interpretation struct{} + +func New() *Interpretation { + return &Interpretation{} +} + +func (e *Interpretation) AdditionalPropertyDefaultValue() interface{} { + return true +} + +func (e *Interpretation) AdditionalPropertyFn(ctx context.Context, + in []search.Result, params interface{}, limit *int, + argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig, +) ([]search.Result, error) { + // this is a special case additional value + // this value is being added to storage object in vectorization process + // interpretation is being saved in DB when making vectorization + // interpretation is being extracted and added to the result + // when it's being read from DB (see storage_object.go) + return in, nil +} + +func (e *Interpretation) ExtractAdditionalFn(param []*ast.Argument, class *models.Class) interface{} { + return true +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/models/models.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/models/models.go new file mode 100644 index 0000000000000000000000000000000000000000..3f13df1f8d71d26d6d0fb7bf10e9afa4d77f46d7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/models/models.go @@ -0,0 +1,48 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package models + +type FeatureProjection struct { + Vector []float32 `json:"vector"` +} + +type NearestNeighbors struct { + Neighbors []*NearestNeighbor `json:"neighbors"` +} + +type NearestNeighbor struct { + Concept string `json:"concept,omitempty"` + Distance float32 `json:"distance,omitempty"` + Vector []float32 `json:"vector"` +} + +type SemanticPath struct { + Path []*SemanticPathElement `json:"path"` +} + +type SemanticPathElement struct { + Concept string `json:"concept,omitempty"` + DistanceToNext *float32 `json:"distanceToNext,omitempty"` + DistanceToPrevious *float32 `json:"distanceToPrevious,omitempty"` + DistanceToQuery float32 `json:"distanceToQuery,omitempty"` + DistanceToResult float32 `json:"distanceToResult,omitempty"` +} + +type Interpretation struct { + Source []*InterpretationSource `json:"source"` +} + +type InterpretationSource struct { + Concept string `json:"concept,omitempty"` + Occurrence uint64 `json:"occurrence,omitempty"` + Weight float64 `json:"weight,omitempty"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/nearestneighbors/extender.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/nearestneighbors/extender.go new file mode 100644 index 0000000000000000000000000000000000000000..63bf5d4cb80f3ad9a95a2ff6d59ad05ab3be38a3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/nearestneighbors/extender.go @@ -0,0 +1,130 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package nearestneighbors + +import ( + "context" + "fmt" + + "github.com/weaviate/weaviate/entities/moduletools" + + "github.com/pkg/errors" + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/search" + txt2vecmodels "github.com/weaviate/weaviate/modules/text2vec-contextionary/additional/models" +) + +const ( + DefaultLimit = 10 + DefaultK = 32 +) + +type Extender struct { + searcher contextionary +} + +type contextionary interface { + MultiNearestWordsByVector(ctx context.Context, vectors [][]float32, k, n int) ([]*txt2vecmodels.NearestNeighbors, error) +} + +func (e *Extender) AdditionalPropertyDefaultValue() interface{} { + return true +} + +func (e *Extender) AdditionalPropertyFn(ctx context.Context, + in []search.Result, params interface{}, limit *int, + argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig, +) ([]search.Result, error) { + return e.Multi(ctx, in, limit) +} + +func (e *Extender) ExtractAdditionalFn(param []*ast.Argument, class *models.Class) interface{} { + return true +} + +func (e *Extender) Single(ctx context.Context, in *search.Result, limit *int) (*search.Result, error) { + if in == nil { + return nil, nil + } + + multiRes, err := e.Multi(ctx, []search.Result{*in}, limit) // safe to deref, as we did a nil check before + if err != nil { + return nil, err + } + + return &multiRes[0], nil +} + +func (e *Extender) Multi(ctx context.Context, in []search.Result, limit *int) ([]search.Result, error) { + if in == nil { + return nil, nil + } + + vectors := make([][]float32, len(in)) + for i, res := range in { + if len(res.Vector) == 0 { + return nil, fmt.Errorf("item %d has no vector", i) + } + vectors[i] = res.Vector + } + + neighbors, err := e.searcher.MultiNearestWordsByVector(ctx, vectors, DefaultK, limitOrDefault(limit)) + if err != nil { + return nil, errors.Wrap(err, "get neighbors for search results") + } + + if len(neighbors) != len(in) { + return nil, fmt.Errorf("inconsistent results: input=%d neighbors=%d", len(in), len(neighbors)) + } + + for i, res := range in { + up := res.AdditionalProperties + if up == nil { + up = models.AdditionalProperties{} + } + + up["nearestNeighbors"] = removeDollarElements(neighbors[i]) + in[i].AdditionalProperties = up + } + + return in, nil +} + +func NewExtender(searcher contextionary) *Extender { + return &Extender{searcher: searcher} +} + +func limitOrDefault(user *int) int { + if user == nil || *user == 0 { + return DefaultLimit + } + + return *user +} + +func removeDollarElements(in *txt2vecmodels.NearestNeighbors) *txt2vecmodels.NearestNeighbors { + neighbors := make([]*txt2vecmodels.NearestNeighbor, len(in.Neighbors)) + i := 0 + for _, elem := range in.Neighbors { + if elem.Concept[0] == '$' { + continue + } + + neighbors[i] = elem + i++ + } + + return &txt2vecmodels.NearestNeighbors{ + Neighbors: neighbors[:i], + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/nearestneighbors/extender_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/nearestneighbors/extender_test.go new file mode 100644 index 0000000000000000000000000000000000000000..d1e0bfaa48370cb5528a2298a566959c5e783dc4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/nearestneighbors/extender_test.go @@ -0,0 +1,261 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package nearestneighbors + +import ( + "context" + "testing" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/additional" + "github.com/weaviate/weaviate/entities/search" + txt2vecmodels "github.com/weaviate/weaviate/modules/text2vec-contextionary/additional/models" +) + +func TestExtender(t *testing.T) { + f := &fakeContextionary{} + e := NewExtender(f) + + t.Run("with empty results", func(t *testing.T) { + testData := []search.Result(nil) + expectedResults := []search.Result(nil) + + res, err := e.Multi(context.Background(), testData, nil) + require.Nil(t, err) + assert.Equal(t, expectedResults, res) + }) + + t.Run("with a single result", func(t *testing.T) { + testData := &search.Result{ + Schema: map[string]interface{}{"name": "item1"}, + Vector: []float32{0.1, 0.3, 0.5}, + AdditionalProperties: map[string]interface{}{ + "classification": &additional.Classification{ // verify it doesn't remove existing additional props + ID: strfmt.UUID("123"), + }, + }, + } + + expectedResult := &search.Result{ + Schema: map[string]interface{}{"name": "item1"}, + Vector: []float32{0.1, 0.3, 0.5}, + AdditionalProperties: map[string]interface{}{ + "classification": &additional.Classification{ // verify it doesn't remove existing additional props + ID: strfmt.UUID("123"), + }, + "nearestNeighbors": &txt2vecmodels.NearestNeighbors{ + Neighbors: []*txt2vecmodels.NearestNeighbor{ + { + Concept: "word1", + Distance: 1, + }, + { + Concept: "word2", + Distance: 2, + }, + { + Concept: "word3", + Distance: 3, + }, + }, + }, + }, + } + + res, err := e.Single(context.Background(), testData, nil) + require.Nil(t, err) + assert.Equal(t, expectedResult, res) + }) + + t.Run("with multiple results", func(t *testing.T) { + vectors := [][]float32{ + {0.1, 0.2, 0.3}, + {0.11, 0.22, 0.33}, + {0.111, 0.222, 0.333}, + } + + testData := []search.Result{ + { + Schema: map[string]interface{}{"name": "item1"}, + Vector: vectors[0], + }, + { + Schema: map[string]interface{}{"name": "item2"}, + Vector: vectors[1], + }, + { + Schema: map[string]interface{}{"name": "item3"}, + Vector: vectors[2], + AdditionalProperties: map[string]interface{}{ + "classification": &additional.Classification{ // verify it doesn't remove existing additional props + ID: strfmt.UUID("123"), + }, + }, + }, + } + + expectedResults := []search.Result{ + { + Schema: map[string]interface{}{"name": "item1"}, + Vector: vectors[0], + AdditionalProperties: map[string]interface{}{ + "nearestNeighbors": &txt2vecmodels.NearestNeighbors{ + Neighbors: []*txt2vecmodels.NearestNeighbor{ + { + Concept: "word1", + Distance: 1, + }, + { + Concept: "word2", + Distance: 2, + }, + { + Concept: "word3", + Distance: 3, + }, + }, + }, + }, + }, + { + Schema: map[string]interface{}{"name": "item2"}, + Vector: vectors[1], + AdditionalProperties: map[string]interface{}{ + "nearestNeighbors": &txt2vecmodels.NearestNeighbors{ + Neighbors: []*txt2vecmodels.NearestNeighbor{ + { + Concept: "word4", + Distance: 0.1, + }, + { + Concept: "word5", + Distance: 0.2, + }, + { + Concept: "word6", + Distance: 0.3, + }, + }, + }, + }, + }, + { + Schema: map[string]interface{}{"name": "item3"}, + Vector: vectors[2], + AdditionalProperties: map[string]interface{}{ + "classification": &additional.Classification{ // verify it doesn't remove existing additional props + ID: strfmt.UUID("123"), + }, + "nearestNeighbors": &txt2vecmodels.NearestNeighbors{ + Neighbors: []*txt2vecmodels.NearestNeighbor{ + { + Concept: "word7", + Distance: 1.1, + }, + { + Concept: "word8", + Distance: 2.2, + }, + { + Concept: "word9", + Distance: 3.3, + }, + }, + }, + }, + }, + } + + res, err := e.Multi(context.Background(), testData, nil) + require.Nil(t, err) + assert.Equal(t, expectedResults, res) + assert.Equal(t, f.calledWithVectors, vectors) + }) +} + +type fakeContextionary struct { + calledWithVectors [][]float32 +} + +func (f *fakeContextionary) MultiNearestWordsByVector(ctx context.Context, vectors [][]float32, k, n int) ([]*txt2vecmodels.NearestNeighbors, error) { + f.calledWithVectors = vectors + out := []*txt2vecmodels.NearestNeighbors{ + { + Neighbors: []*txt2vecmodels.NearestNeighbor{ + { + Concept: "word1", + Distance: 1.0, + Vector: nil, + }, + { + Concept: "word2", + Distance: 2.0, + Vector: nil, + }, + { + Concept: "$THING[abc]", + Distance: 9.99, + Vector: nil, + }, + { + Concept: "word3", + Distance: 3.0, + Vector: nil, + }, + }, + }, + + { + Neighbors: []*txt2vecmodels.NearestNeighbor{ + { + Concept: "word4", + Distance: 0.1, + Vector: nil, + }, + { + Concept: "word5", + Distance: 0.2, + Vector: nil, + }, + { + Concept: "word6", + Distance: 0.3, + Vector: nil, + }, + }, + }, + + { + Neighbors: []*txt2vecmodels.NearestNeighbor{ + { + Concept: "word7", + Distance: 1.1, + Vector: nil, + }, + { + Concept: "word8", + Distance: 2.2, + Vector: nil, + }, + { + Concept: "word9", + Distance: 3.3, + Vector: nil, + }, + }, + }, + } + + return out[:len(vectors)], nil // return up to three results, but fewer if the input is shorter +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/provider.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/provider.go new file mode 100644 index 0000000000000000000000000000000000000000..5e45f83b9a40c4b323017c8544abeddab2607f98 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/provider.go @@ -0,0 +1,123 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package additional + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + + "github.com/weaviate/weaviate/entities/moduletools" + + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/search" +) + +type AdditionalProperty interface { + AdditionalPropertyFn(ctx context.Context, + in []search.Result, params interface{}, limit *int, + argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig) ([]search.Result, error) + ExtractAdditionalFn(param []*ast.Argument, class *models.Class) interface{} + AdditionalPropertyDefaultValue() interface{} +} + +type GraphQLAdditionalArgumentsProvider struct { + nnExtender AdditionalProperty + projector AdditionalProperty + sempathBuilder AdditionalProperty + interpretation AdditionalProperty +} + +func New(nnExtender, projector, sempath, interpretation AdditionalProperty) *GraphQLAdditionalArgumentsProvider { + return &GraphQLAdditionalArgumentsProvider{nnExtender, projector, sempath, interpretation} +} + +func (p *GraphQLAdditionalArgumentsProvider) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + additionalProperties := map[string]modulecapabilities.AdditionalProperty{} + additionalProperties["nearestNeighbors"] = p.getNearestNeighbors() + additionalProperties["featureProjection"] = p.getFeatureProjection() + additionalProperties["semanticPath"] = p.getSemanticPath() + additionalProperties["interpretation"] = p.getInterpretation() + return additionalProperties +} + +func (p *GraphQLAdditionalArgumentsProvider) getNearestNeighbors() modulecapabilities.AdditionalProperty { + return modulecapabilities.AdditionalProperty{ + RestNames: []string{ + "nearestNeighbors", + "nearestneighbors", + "nearest-neighbors", + "nearest_neighbors", + }, + DefaultValue: p.nnExtender.AdditionalPropertyDefaultValue(), + GraphQLNames: []string{"nearestNeighbors"}, + GraphQLFieldFunction: additionalNearestNeighborsField, + GraphQLExtractFunction: p.nnExtender.ExtractAdditionalFn, + SearchFunctions: modulecapabilities.AdditionalSearch{ + ObjectGet: p.nnExtender.AdditionalPropertyFn, + ObjectList: p.nnExtender.AdditionalPropertyFn, + ExploreGet: p.nnExtender.AdditionalPropertyFn, + ExploreList: p.nnExtender.AdditionalPropertyFn, + }, + } +} + +func (p *GraphQLAdditionalArgumentsProvider) getFeatureProjection() modulecapabilities.AdditionalProperty { + return modulecapabilities.AdditionalProperty{ + RestNames: []string{ + "featureProjection", + "featureprojection", + "feature-projection", + "feature_projection", + }, + DefaultValue: p.projector.AdditionalPropertyDefaultValue(), + GraphQLNames: []string{"featureProjection"}, + GraphQLFieldFunction: additionalFeatureProjectionField, + GraphQLExtractFunction: p.projector.ExtractAdditionalFn, + SearchFunctions: modulecapabilities.AdditionalSearch{ + ObjectList: p.projector.AdditionalPropertyFn, + ExploreGet: p.projector.AdditionalPropertyFn, + ExploreList: p.projector.AdditionalPropertyFn, + }, + } +} + +func (p *GraphQLAdditionalArgumentsProvider) getSemanticPath() modulecapabilities.AdditionalProperty { + return modulecapabilities.AdditionalProperty{ + DefaultValue: p.sempathBuilder.AdditionalPropertyDefaultValue(), + GraphQLNames: []string{"semanticPath"}, + GraphQLFieldFunction: additionalSemanticPathField, + GraphQLExtractFunction: p.sempathBuilder.ExtractAdditionalFn, + SearchFunctions: modulecapabilities.AdditionalSearch{ + ExploreGet: p.sempathBuilder.AdditionalPropertyFn, + }, + } +} + +func (p *GraphQLAdditionalArgumentsProvider) getInterpretation() modulecapabilities.AdditionalProperty { + return modulecapabilities.AdditionalProperty{ + RestNames: []string{ + "interpretation", + }, + DefaultValue: p.interpretation.AdditionalPropertyDefaultValue(), + GraphQLNames: []string{"interpretation"}, + GraphQLFieldFunction: additionalInterpretationField, + GraphQLExtractFunction: p.interpretation.ExtractAdditionalFn, + SearchFunctions: modulecapabilities.AdditionalSearch{ + ObjectGet: p.interpretation.AdditionalPropertyFn, + ObjectList: p.interpretation.AdditionalPropertyFn, + ExploreGet: p.interpretation.AdditionalPropertyFn, + ExploreList: p.interpretation.AdditionalPropertyFn, + }, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/sempath/builder.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/sempath/builder.go new file mode 100644 index 0000000000000000000000000000000000000000..e72b1cfb8ffe175d70a5a89e52d7804e66bcb11b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/sempath/builder.go @@ -0,0 +1,400 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package sempath + +import ( + "context" + "fmt" + "math" + "sort" + "time" + + "github.com/danaugrs/go-tsne/tsne" + "github.com/pkg/errors" + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/search" + txt2vecmodels "github.com/weaviate/weaviate/modules/text2vec-contextionary/additional/models" + "gonum.org/v1/gonum/mat" +) + +func New(c11y Remote) *PathBuilder { + return &PathBuilder{ + fixedSeed: time.Now().UnixNano(), + c11y: c11y, + } +} + +type PathBuilder struct { + fixedSeed int64 + c11y Remote +} + +type Remote interface { + MultiNearestWordsByVector(ctx context.Context, vectors [][]float32, k, n int) ([]*txt2vecmodels.NearestNeighbors, error) +} + +func (pb *PathBuilder) AdditionalPropertyDefaultValue() interface{} { + return &Params{} +} + +func (pb *PathBuilder) AdditionalPropertyFn(ctx context.Context, + in []search.Result, params interface{}, limit *int, + argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig, +) ([]search.Result, error) { + if parameters, ok := params.(*Params); ok { + return pb.CalculatePath(in, parameters) + } + return nil, errors.New("unknown params") +} + +func (pb *PathBuilder) ExtractAdditionalFn(param []*ast.Argument, class *models.Class) interface{} { + return &Params{} +} + +func (pb *PathBuilder) CalculatePath(in []search.Result, params *Params) ([]search.Result, error) { + if len(in) == 0 { + return nil, nil + } + + if params == nil { + return nil, fmt.Errorf("no params provided") + } + + dims := len(in[0].Vector) + if err := params.SetDefaultsAndValidate(len(in), dims); err != nil { + return nil, errors.Wrap(err, "invalid params") + } + + searchNeighbors, err := pb.addSearchNeighbors(params) + if err != nil { + return nil, err + } + + for i, obj := range in { + path, err := pb.calculatePathPerObject(obj, in, params, searchNeighbors) + if err != nil { + return nil, fmt.Errorf("object %d: %w", i, err) + } + + if in[i].AdditionalProperties == nil { + in[i].AdditionalProperties = models.AdditionalProperties{} + } + + in[i].AdditionalProperties["semanticPath"] = path + } + + return in, nil +} + +func (pb *PathBuilder) calculatePathPerObject(obj search.Result, allObjects []search.Result, params *Params, + searchNeighbors []*txt2vecmodels.NearestNeighbor, +) (*txt2vecmodels.SemanticPath, error) { + dims := len(obj.Vector) + matrix, neighbors, err := pb.vectorsToMatrix(obj, allObjects, dims, params, searchNeighbors) + if err != nil { + return nil, err + } + + inputRows := matrix.RawMatrix().Rows + t := tsne.NewTSNE(2, float64(inputRows/2), 100, 100, false) + res := t.EmbedData(matrix, nil) + rows, cols := res.Dims() + if rows != inputRows { + return nil, fmt.Errorf("have different output results than input %d != %d", inputRows, rows) + } + + // create an explicit copy of the neighbors, so we don't mutate them. + // Otherwise the 2nd round will have been influenced by the first + projectedNeighbors := copyNeighbors(neighbors) + var projectedSearchVector []float32 + var projectedTargetVector []float32 + for i := 0; i < rows; i++ { + vector := make([]float32, cols) + for j := range vector { + vector[j] = float32(res.At(i, j)) + } + if i == 0 { // the input object + projectedTargetVector = vector + } else if i < 1+len(neighbors) { + // these must be neighbor props + projectedNeighbors[i-1].Vector = vector + } else { + // is now the very last element which is the search vector + projectedSearchVector = vector + } + } + + path := pb.buildPath(projectedNeighbors, projectedSearchVector, projectedTargetVector) + return pb.addDistancesToPath(path, neighbors, params.SearchVector, obj.Vector) +} + +func (pb *PathBuilder) addSearchNeighbors(params *Params) ([]*txt2vecmodels.NearestNeighbor, error) { + nn, err := pb.c11y.MultiNearestWordsByVector(context.TODO(), [][]float32{params.SearchVector}, 36, 50) + if err != nil { + return nil, err + } + + return nn[0].Neighbors, nil +} + +// TODO: document behavior if it actually stays like this +func (pb *PathBuilder) vectorsToMatrix(obj search.Result, allObjects []search.Result, dims int, + params *Params, searchNeighbors []*txt2vecmodels.NearestNeighbor, +) (*mat.Dense, []*txt2vecmodels.NearestNeighbor, error) { + items := 1 // the initial object + var neighbors []*txt2vecmodels.NearestNeighbor + neighbors = pb.extractNeighbors(allObjects) + neighbors = append(neighbors, searchNeighbors...) + neighbors = pb.removeDuplicateNeighborsAndDollarNeighbors(neighbors) + items += len(neighbors) + 1 // The +1 is for the search vector which we append last + + // concat all vectors to build gonum dense matrix + mergedVectors := make([]float64, items*dims) + if l := len(obj.Vector); l != dims { + return nil, nil, fmt.Errorf("object: inconsistent vector lengths found: dimensions=%d and object=%d", dims, l) + } + + for j, dim := range obj.Vector { + mergedVectors[j] = float64(dim) + } + + withoutNeighbors := 1 * dims + for i, neighbor := range neighbors { + neighborVector := neighbor.Vector + + if l := len(neighborVector); l != dims { + return nil, nil, fmt.Errorf("neighbor: inconsistent vector lengths found: dimensions=%d and object=%d", dims, l) + } + + for j, dim := range neighborVector { + mergedVectors[withoutNeighbors+i*dims+j] = float64(dim) + } + } + + for i, dim := range params.SearchVector { + mergedVectors[len(mergedVectors)-dims+i] = float64(dim) + } + + return mat.NewDense(items, dims, mergedVectors), neighbors, nil +} + +func (pb *PathBuilder) extractNeighbors(in []search.Result) []*txt2vecmodels.NearestNeighbor { + var out []*txt2vecmodels.NearestNeighbor + + for _, obj := range in { + if obj.AdditionalProperties == nil || obj.AdditionalProperties["nearestNeighbors"] == nil { + continue + } + + if neighbors, ok := obj.AdditionalProperties["nearestNeighbors"]; ok { + if nearestNeighbors, ok := neighbors.(*txt2vecmodels.NearestNeighbors); ok { + out = append(out, nearestNeighbors.Neighbors...) + } + } + } + + return out +} + +func (pb *PathBuilder) removeDuplicateNeighborsAndDollarNeighbors(in []*txt2vecmodels.NearestNeighbor) []*txt2vecmodels.NearestNeighbor { + seen := map[string]struct{}{} + out := make([]*txt2vecmodels.NearestNeighbor, len(in)) + + i := 0 + for _, candidate := range in { + if _, ok := seen[candidate.Concept]; ok { + continue + } + + if candidate.Concept[0] == '$' { + continue + } + + out[i] = candidate + i++ + seen[candidate.Concept] = struct{}{} + } + + return out[:i] +} + +func (pb *PathBuilder) buildPath(neighbors []*txt2vecmodels.NearestNeighbor, searchVector []float32, + target []float32, +) *txt2vecmodels.SemanticPath { + var path []*txt2vecmodels.SemanticPathElement + + minDist := float32(math.MaxFloat32) + + current := searchVector // initial search point + + for { + nn := pb.nearestNeighbors(current, neighbors, 10) + nn = pb.discardFurtherThan(nn, minDist, target) + if len(nn) == 0 { + break + } + nn = pb.nearestNeighbors(current, nn, 1) + current = nn[0].Vector + minDist = pb.distance(current, target) + + path = append(path, &txt2vecmodels.SemanticPathElement{ + Concept: nn[0].Concept, + }) + } + + return &txt2vecmodels.SemanticPath{ + Path: path, + } +} + +func (pb *PathBuilder) nearestNeighbors(search []float32, candidates []*txt2vecmodels.NearestNeighbor, length int) []*txt2vecmodels.NearestNeighbor { + sort.Slice(candidates, func(a, b int) bool { + return pb.distance(candidates[a].Vector, search) < pb.distance(candidates[b].Vector, search) + }) + return candidates[:length] +} + +func (pb *PathBuilder) distance(a, b []float32) float32 { + var sums float32 + for i := range a { + sums += (a[i] - b[i]) * (a[i] - b[i]) + } + + return float32(math.Sqrt(float64(sums))) +} + +func (pb *PathBuilder) discardFurtherThan(candidates []*txt2vecmodels.NearestNeighbor, threshold float32, target []float32) []*txt2vecmodels.NearestNeighbor { + out := make([]*txt2vecmodels.NearestNeighbor, len(candidates)) + i := 0 + for _, c := range candidates { + if pb.distance(c.Vector, target) >= threshold { + continue + } + + out[i] = c + i++ + } + + return out[:i] +} + +// create an explicit deep copy that does not keep any references +func copyNeighbors(in []*txt2vecmodels.NearestNeighbor) []*txt2vecmodels.NearestNeighbor { + out := make([]*txt2vecmodels.NearestNeighbor, len(in)) + for i, n := range in { + out[i] = &txt2vecmodels.NearestNeighbor{ + Concept: n.Concept, + Distance: n.Distance, + Vector: n.Vector, + } + } + + return out +} + +func (pb *PathBuilder) addDistancesToPath(path *txt2vecmodels.SemanticPath, neighbors []*txt2vecmodels.NearestNeighbor, + searchVector, targetVector []float32, +) (*txt2vecmodels.SemanticPath, error) { + for i, elem := range path.Path { + vec, ok := neighborVecByConcept(neighbors, elem.Concept) + if !ok { + return nil, fmt.Errorf("no vector present for concept: %s", elem.Concept) + } + + if i != 0 { + // include previous + previousVec, ok := neighborVecByConcept(neighbors, path.Path[i-1].Concept) + if !ok { + return nil, fmt.Errorf("no vector present for previous concept: %s", path.Path[i-1].Concept) + } + + d, err := cosineDist(vec, previousVec) + if err != nil { + return nil, errors.Wrap(err, "calculate distance between current path and previous element") + } + + path.Path[i].DistanceToPrevious = &d + } + + // target + d, err := cosineDist(vec, targetVector) + if err != nil { + return nil, errors.Wrap(err, "calculate distance between current path and result element") + } + path.Path[i].DistanceToResult = d + + // query + d, err = cosineDist(vec, searchVector) + if err != nil { + return nil, errors.Wrap(err, "calculate distance between current path and query element") + } + path.Path[i].DistanceToQuery = d + + if i != len(path.Path)-1 { + // include next + nextVec, ok := neighborVecByConcept(neighbors, path.Path[i+1].Concept) + if !ok { + return nil, fmt.Errorf("no vector present for next concept: %s", path.Path[i+1].Concept) + } + + d, err := cosineDist(vec, nextVec) + if err != nil { + return nil, errors.Wrap(err, "calculate distance between current path and next element") + } + + path.Path[i].DistanceToNext = &d + } + } + + return path, nil +} + +func neighborVecByConcept(neighbors []*txt2vecmodels.NearestNeighbor, concept string) ([]float32, bool) { + for _, n := range neighbors { + if n.Concept == concept { + return n.Vector, true + } + } + + return nil, false +} + +func cosineSim(a, b []float32) (float32, error) { + if len(a) != len(b) { + return 0, fmt.Errorf("vectors have different dimensions") + } + + var ( + sumProduct float64 + sumASquare float64 + sumBSquare float64 + ) + + for i := range a { + sumProduct += float64(a[i] * b[i]) + sumASquare += float64(a[i] * a[i]) + sumBSquare += float64(b[i] * b[i]) + } + + return float32(sumProduct / (math.Sqrt(sumASquare) * math.Sqrt(sumBSquare))), nil +} + +func cosineDist(a, b []float32) (float32, error) { + sim, err := cosineSim(a, b) + if err != nil { + return 0, err + } + + return 1 - sim, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/sempath/builder_params.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/sempath/builder_params.go new file mode 100644 index 0000000000000000000000000000000000000000..57b5c5052ccfa4a10e72bb133a2b8eb7e1afc48a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/sempath/builder_params.go @@ -0,0 +1,39 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package sempath + +import "github.com/weaviate/weaviate/entities/errorcompounder" + +type Params struct { + SearchVector []float32 +} + +func (p *Params) SetSearchVector(vector []float32) { + p.SearchVector = vector +} + +func (p *Params) SetDefaultsAndValidate(inputSize, dims int) error { + return p.validate(inputSize, dims) +} + +func (p *Params) validate(inputSize, dims int) error { + ec := errorcompounder.New() + if inputSize > 25 { + ec.Addf("result length %d is larger than 25 items: semantic path calculation is only suported up to 25 items, set a limit to <= 25", inputSize) + } + + if len(p.SearchVector) == 0 { + ec.Addf("no valid search vector present, got: %v", p.SearchVector) + } + + return ec.ToError() +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/sempath/builder_params_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/sempath/builder_params_test.go new file mode 100644 index 0000000000000000000000000000000000000000..9634e1712d8a99668c6dcbd684d11a2d0d635a8f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/sempath/builder_params_test.go @@ -0,0 +1,105 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package sempath + +import "testing" + +func TestParams_validate(t *testing.T) { + type fields struct { + SearchVector []float32 + } + type args struct { + inputSize int + dims int + } + tests := []struct { + name string + fields fields + args args + wantErr bool + }{ + { + name: "Should validate", + fields: fields{ + SearchVector: []float32{1.0}, + }, + args: args{ + inputSize: 25, + dims: 0, + }, + wantErr: false, + }, + { + name: "Should error with empty SearchVector", + fields: fields{ + SearchVector: []float32{}, + }, + args: args{ + inputSize: 25, + dims: 0, + }, + wantErr: true, + }, + { + name: "Should error with nil SearchVector", + fields: fields{}, + args: args{ + inputSize: 25, + dims: 0, + }, + wantErr: true, + }, + { + name: "Should error with with inputSize greater then 25", + fields: fields{ + SearchVector: []float32{1.0}, + }, + args: args{ + inputSize: 26, + dims: 0, + }, + wantErr: true, + }, + { + name: "Should error with with inputSize greater then 25 and nil SearchVector", + fields: fields{ + SearchVector: nil, + }, + args: args{ + inputSize: 26, + dims: 0, + }, + wantErr: true, + }, + { + name: "Should error with with inputSize greater then 25 and empty SearchVector", + fields: fields{ + SearchVector: []float32{}, + }, + args: args{ + inputSize: 26, + dims: 0, + }, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + p := &Params{ + SearchVector: tt.fields.SearchVector, + } + if err := p.validate(tt.args.inputSize, tt.args.dims); (err != nil) != tt.wantErr { + t.Errorf("Params.validate() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/sempath/builder_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/sempath/builder_test.go new file mode 100644 index 0000000000000000000000000000000000000000..a729e4b770399cc523122eab49527e598bf71f73 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/additional/sempath/builder_test.go @@ -0,0 +1,162 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package sempath + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/search" + txt2vecmodels "github.com/weaviate/weaviate/modules/text2vec-contextionary/additional/models" +) + +func TestSemanticPathBuilder(t *testing.T) { + t.Skip("go1.20 change") + c11y := &fakeC11y{} + b := New(c11y) + + b.fixedSeed = 1000 // control randomness in unit test + + input := []search.Result{ + { + ID: "7fe919ed-2ef6-4087-856c-a307046bf895", + ClassName: "Foo", + Vector: []float32{1, 0.1}, + }, + } + searchVector := []float32{0.3, 0.3} + + c11y.neighbors = []*txt2vecmodels.NearestNeighbors{ + { + Neighbors: []*txt2vecmodels.NearestNeighbor{ + { + Concept: "good1", + Vector: []float32{0.5, 0.1}, + }, + { + Concept: "good2", + Vector: []float32{0.7, 0.2}, + }, + { + Concept: "good3", + Vector: []float32{0.9, 0.1}, + }, + { + Concept: "good4", + Vector: []float32{0.55, 0.1}, + }, + { + Concept: "good5", + Vector: []float32{0.77, 0.2}, + }, + { + Concept: "good6", + Vector: []float32{0.99, 0.1}, + }, + { + Concept: "bad1", + Vector: []float32{-0.1, -3}, + }, + { + Concept: "bad2", + Vector: []float32{-0.15, -2.75}, + }, + { + Concept: "bad3", + Vector: []float32{-0.22, -2.35}, + }, + { + Concept: "bad4", + Vector: []float32{0.1, -3.3}, + }, + { + Concept: "bad5", + Vector: []float32{0.15, -2.5}, + }, + { + Concept: "bad6", + Vector: []float32{-0.4, -2.25}, + }, + }, + }, + } + + res, err := b.CalculatePath(input, &Params{SearchVector: searchVector}) + require.Nil(t, err) + + expectedPath := &txt2vecmodels.SemanticPath{ + Path: []*txt2vecmodels.SemanticPathElement{ + { + Concept: "good5", + DistanceToNext: ptFloat32(0.00029218197), + DistanceToQuery: 0.13783735, + DistanceToResult: 0.011904657, + }, + { + Concept: "good2", + DistanceToNext: ptFloat32(0.014019072), + DistanceToPrevious: ptFloat32(0.00029218197), + DistanceToQuery: 0.12584269, + DistanceToResult: 0.015912116, + }, + { + Concept: "good3", + DistanceToNext: ptFloat32(4.9889088e-05), + DistanceToPrevious: ptFloat32(0.014019072), + DistanceToQuery: 0.21913117, + DistanceToResult: 6.0379505e-05, + }, + { + Concept: "good6", + DistanceToNext: ptFloat32(0.0046744347), + DistanceToPrevious: ptFloat32(4.9889088e-05), + DistanceToQuery: 0.2254098, + DistanceToResult: 5.364418e-07, + }, + { + Concept: "good1", + DistanceToNext: ptFloat32(0.00015383959), + DistanceToPrevious: ptFloat32(0.0046744347), + DistanceToQuery: 0.16794968, + DistanceToResult: 0.004771471, + }, + { + Concept: "good4", + DistanceToPrevious: ptFloat32(0.00015383959), + DistanceToQuery: 0.17780781, + DistanceToResult: 0.003213048, + }, + }, + } + + require.Len(t, res, 1) + require.NotNil(t, res[0].AdditionalProperties) + semanticPath, semanticPathOK := res[0].AdditionalProperties["semanticPath"] + assert.True(t, semanticPathOK) + semanticPathElement, semanticPathElementOK := semanticPath.(*txt2vecmodels.SemanticPath) + assert.True(t, semanticPathElementOK) + assert.Equal(t, expectedPath, semanticPathElement) +} + +type fakeC11y struct { + neighbors []*txt2vecmodels.NearestNeighbors +} + +func (f *fakeC11y) MultiNearestWordsByVector(ctx context.Context, vectors [][]float32, k, n int) ([]*txt2vecmodels.NearestNeighbors, error) { + return f.neighbors, nil +} + +func ptFloat32(in float32) *float32 { + return &in +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/classifier.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/classifier.go new file mode 100644 index 0000000000000000000000000000000000000000..6a7f9d1f2ca7665182f137c2c595927ea38b1156 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/classifier.go @@ -0,0 +1,122 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package classification + +import ( + "context" + "encoding/json" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" +) + +type vectorizer interface { + // MultiVectorForWord must keep order, if an item cannot be vectorized, the + // element should be explicit nil, not skipped + MultiVectorForWord(ctx context.Context, words []string) ([][]float32, error) + VectorOnlyForCorpi(ctx context.Context, corpi []string, overrides map[string]string) ([]float32, error) +} + +type Classifier struct { + vectorizer vectorizer +} + +func New(vectorizer vectorizer) modulecapabilities.Classifier { + return &Classifier{vectorizer: vectorizer} +} + +func (c *Classifier) Name() string { + return "text2vec-contextionary-contextual" +} + +func (c *Classifier) ClassifyFn(params modulecapabilities.ClassifyParams) (modulecapabilities.ClassifyItemFn, error) { + if c.vectorizer == nil { + return nil, errors.Errorf("cannot use text2vec-contextionary-contextual " + + "without the respective module") + } + + // 1. do preparation here once + preparedContext, err := c.prepareContextualClassification(params.GetClass, params.VectorRepo, + params.Params, params.Filters, params.UnclassifiedItems) + if err != nil { + return nil, errors.Wrap(err, "prepare context for text2vec-contextionary-contextual classification") + } + + // 2. use higher order function to inject preparation data so it is then present for each single run + return c.makeClassifyItemContextual(preparedContext), nil +} + +func (c *Classifier) ParseClassifierSettings(params *models.Classification) error { + raw := params.Settings + settings := &ParamsContextual{} + if raw == nil { + settings.SetDefaults() + params.Settings = settings + return nil + } + + asMap, ok := raw.(map[string]interface{}) + if !ok { + return errors.Errorf("settings must be an object got %T", raw) + } + + v, err := c.extractNumberFromMap(asMap, "minimumUsableWords") + if err != nil { + return err + } + settings.MinimumUsableWords = v + + v, err = c.extractNumberFromMap(asMap, "informationGainCutoffPercentile") + if err != nil { + return err + } + settings.InformationGainCutoffPercentile = v + + v, err = c.extractNumberFromMap(asMap, "informationGainMaximumBoost") + if err != nil { + return err + } + settings.InformationGainMaximumBoost = v + + v, err = c.extractNumberFromMap(asMap, "tfidfCutoffPercentile") + if err != nil { + return err + } + settings.TfidfCutoffPercentile = v + + settings.SetDefaults() + params.Settings = settings + + return nil +} + +func (c *Classifier) extractNumberFromMap(in map[string]interface{}, field string) (*int32, error) { + unparsed, present := in[field] + if present { + parsed, ok := unparsed.(json.Number) + if !ok { + return nil, errors.Errorf("settings.%s must be number, got %T", + field, unparsed) + } + + asInt64, err := parsed.Int64() + if err != nil { + return nil, errors.Wrapf(err, "settings.%s", field) + } + + asInt32 := int32(asInt64) + return &asInt32, nil + } + + return nil, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/classifier_misc.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/classifier_misc.go new file mode 100644 index 0000000000000000000000000000000000000000..9811e316d8a2d649c69e5775ae974ea59b37895d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/classifier_misc.go @@ -0,0 +1,21 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package classification + +import ( + "context" + "time" +) + +func contextWithTimeout(d time.Duration) (context.Context, context.CancelFunc) { + return context.WithTimeout(context.Background(), d) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/classifier_params.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/classifier_params.go new file mode 100644 index 0000000000000000000000000000000000000000..ac1e8ed15b3dc9996895ec69e6efb3b393f2f3b3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/classifier_params.go @@ -0,0 +1,41 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package classification + +type ParamsContextual struct { + MinimumUsableWords *int32 `json:"minimumUsableWords"` + InformationGainCutoffPercentile *int32 `json:"informationGainCutoffPercentile"` + InformationGainMaximumBoost *int32 `json:"informationGainMaximumBoost"` + TfidfCutoffPercentile *int32 `json:"tfidfCutoffPercentile"` +} + +func (params *ParamsContextual) SetDefaults() { + if params.MinimumUsableWords == nil { + defaultParam := int32(3) + params.MinimumUsableWords = &defaultParam + } + + if params.InformationGainCutoffPercentile == nil { + defaultParam := int32(50) + params.InformationGainCutoffPercentile = &defaultParam + } + + if params.InformationGainMaximumBoost == nil { + defaultParam := int32(3) + params.InformationGainMaximumBoost = &defaultParam + } + + if params.TfidfCutoffPercentile == nil { + defaultParam := int32(80) + params.TfidfCutoffPercentile = &defaultParam + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/classifier_prepare_contextual.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/classifier_prepare_contextual.go new file mode 100644 index 0000000000000000000000000000000000000000..87604476d9cf16e899ce915c8955ed2117249acf --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/classifier_prepare_contextual.go @@ -0,0 +1,175 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package classification + +import ( + "fmt" + "time" + + libfilters "github.com/weaviate/weaviate/entities/filters" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/search" + libclassification "github.com/weaviate/weaviate/usecases/classification" +) + +type tfidfScorer interface { + GetAllTerms(docIndex int) []TermWithTfIdf +} + +type contextualPreparationContext struct { + tfidf map[string]tfidfScorer // map[basedOnProp]scorer + targets map[string]search.Results // map[classifyProp]targets +} + +func (c *Classifier) prepareContextualClassification(getClass func(string) *models.Class, + vectorRepo modulecapabilities.VectorClassSearchRepo, params models.Classification, + filters libclassification.Filters, items search.Results, +) (contextualPreparationContext, error) { + p := &contextualPreparer{ + inputItems: items, + params: params, + repo: vectorRepo, + filters: filters, + getClass: getClass, + } + + return p.do() +} + +type contextualPreparer struct { + inputItems []search.Result + params models.Classification + repo modulecapabilities.VectorClassSearchRepo + filters libclassification.Filters + getClass func(string) *models.Class +} + +func (p *contextualPreparer) do() (contextualPreparationContext, error) { + pctx := contextualPreparationContext{} + + targets, err := p.findTargetsForProps() + if err != nil { + return pctx, err + } + + pctx.targets = targets + + tfidf, err := p.calculateTfidfForProps() + if err != nil { + return pctx, err + } + + pctx.tfidf = tfidf + + return pctx, nil +} + +func (p *contextualPreparer) calculateTfidfForProps() (map[string]tfidfScorer, error) { + props := map[string]tfidfScorer{} + + for _, basedOnName := range p.params.BasedOnProperties { + calc := NewTfIdfCalculator(len(p.inputItems)) + for _, obj := range p.inputItems { + schemaMap, ok := obj.Schema.(map[string]interface{}) + if !ok { + return nil, fmt.Errorf("no or incorrect schema map present on source object '%s': %T", obj.ID, obj.Schema) + } + + var docCorpus string + if basedOn, ok := schemaMap[basedOnName]; ok { + basedOnString, ok := basedOn.(string) + if !ok { + return nil, fmt.Errorf("property '%s' present on %s, but of unexpected type: want string, got %T", + basedOnName, obj.ID, basedOn) + } + + docCorpus = basedOnString + } + + calc.AddDoc(docCorpus) + } + + calc.Calculate() + props[basedOnName] = calc + } + + return props, nil +} + +func (p *contextualPreparer) findTargetsForProps() (map[string]search.Results, error) { + targetsMap := map[string]search.Results{} + + for _, targetProp := range p.params.ClassifyProperties { + class, err := p.classAndKindOfTarget(targetProp) + if err != nil { + return nil, fmt.Errorf("target prop '%s': find target class: %w", targetProp, err) + } + + targets, err := p.findTargets(class) + if err != nil { + return nil, fmt.Errorf("target prop '%s': find targets: %w", targetProp, err) + } + + targetsMap[targetProp] = targets + } + + return targetsMap, nil +} + +func (p *contextualPreparer) findTargets(class schema.ClassName) (search.Results, error) { + ctx, cancel := contextWithTimeout(30 * time.Second) + defer cancel() + res, err := p.repo.VectorClassSearch(ctx, modulecapabilities.VectorClassSearchParams{ + Filters: p.filters.Target(), + Pagination: &libfilters.Pagination{ + Limit: 10000, + }, + ClassName: string(class), + Properties: []string{"id"}, + }) + if err != nil { + return nil, fmt.Errorf("search closest target: %w", err) + } + + if len(res) == 0 { + return nil, fmt.Errorf("no potential targets found of class '%s'", class) + } + + return res, nil +} + +func (p *contextualPreparer) classAndKindOfTarget(propName string) (schema.ClassName, error) { + class := p.getClass(p.params.Class) + if class == nil { + return "", fmt.Errorf("could not find class %s in schema", p.params.Class) + } + + prop, err := schema.GetPropertyByName(class, propName) + if err != nil { + return "", err + } + + dataType, err := schema.FindPropertyDataTypeWithRefs(p.getClass, prop.DataType, false, "") + if err != nil { + return "", fmt.Errorf("extract dataType of prop '%s': %w", propName, err) + } + + // we have passed validation, so it is safe to assume that this is a ref prop + targetClasses := dataType.Classes() + + // len=1 is guaranteed from validation + targetClass := targetClasses[0] + + return targetClass, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/classifier_run_contextual.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/classifier_run_contextual.go new file mode 100644 index 0000000000000000000000000000000000000000..2813d1c3ffc2960514cf5f1870bb4d18ca081462 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/classifier_run_contextual.go @@ -0,0 +1,430 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package classification + +import ( + "fmt" + "math" + "sort" + "strings" + "time" + + "github.com/go-openapi/strfmt" + "github.com/weaviate/weaviate/entities/additional" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/schema/crossref" + "github.com/weaviate/weaviate/entities/search" +) + +// TODO: all of this must be served by the module in the future +type contextualItemClassifier struct { + item search.Result + itemIndex int + params models.Classification + settings *ParamsContextual + classifier *Classifier + writer modulecapabilities.Writer + filters modulecapabilities.Filters + context contextualPreparationContext + vectorizer vectorizer + words []string + rankedWords map[string][]scoredWord // map[targetProp]words as scoring/ranking is per target +} + +func (c *Classifier) extendItemWithObjectMeta(item *search.Result, + params models.Classification, classified []string, +) { + // don't overwrite existing non-classification meta info + if item.AdditionalProperties == nil { + item.AdditionalProperties = models.AdditionalProperties{} + } + + item.AdditionalProperties["classification"] = additional.Classification{ + ID: params.ID, + Scope: params.ClassifyProperties, + ClassifiedFields: classified, + Completed: strfmt.DateTime(time.Now()), + } +} + +// makeClassifyItemContextual is a higher-order function to produce the actual +// classify function, but additionally allows us to inject data which is valid +// for the entire run, such as tf-idf data and target vectors +func (c *Classifier) makeClassifyItemContextual(preparedContext contextualPreparationContext) func(search.Result, + int, models.Classification, modulecapabilities.Filters, modulecapabilities.Writer) error { + return func(item search.Result, itemIndex int, params models.Classification, + filters modulecapabilities.Filters, writer modulecapabilities.Writer, + ) error { + vectorizer := c.vectorizer + run := &contextualItemClassifier{ + item: item, + itemIndex: itemIndex, + params: params, + settings: params.Settings.(*ParamsContextual), // safe assertion after parsing + classifier: c, + writer: writer, + filters: filters, + context: preparedContext, + vectorizer: vectorizer, + rankedWords: map[string][]scoredWord{}, + } + + err := run.do() + if err != nil { + return fmt.Errorf("text2vec-contextionary-contextual: %w", err) + } + + return nil + } +} + +func (c *contextualItemClassifier) do() error { + var classified []string + for _, propName := range c.params.ClassifyProperties { + current, err := c.property(propName) + if err != nil { + return fmt.Errorf("prop '%s': %w", propName, err) + } + + // append list of actually classified (can differ from scope!) properties, + // so we can build the object meta information + classified = append(classified, current) + } + + c.classifier.extendItemWithObjectMeta(&c.item, c.params, classified) + err := c.writer.Store(c.item) + if err != nil { + return fmt.Errorf("store %s/%s: %w", c.item.ClassName, c.item.ID, err) + } + + return nil +} + +func (c *contextualItemClassifier) property(propName string) (string, error) { + targets, ok := c.context.targets[propName] + if !ok || len(targets) == 0 { + return "", fmt.Errorf("have no potential targets for property '%s'", propName) + } + + schemaMap, ok := c.item.Schema.(map[string]interface{}) + if !ok { + return "", fmt.Errorf("no or incorrect schema map present on source c.object '%s': %T", c.item.ID, c.item.Schema) + } + + // Limitation for now, basedOnProperty is always 0 + basedOnName := c.params.BasedOnProperties[0] + basedOn, ok := schemaMap[basedOnName] + if !ok { + return "", fmt.Errorf("property '%s' not found on source c.object '%s': %T", propName, c.item.ID, c.item.Schema) + } + + basedOnString, ok := basedOn.(string) + if !ok { + return "", fmt.Errorf("property '%s' present on %s, but of unexpected type: want string, got %T", + basedOnName, c.item.ID, basedOn) + } + + words := newSplitter().Split(basedOnString) + c.words = words + + ctx, cancel := contextWithTimeout(10 * time.Second) + defer cancel() + + vectors, err := c.vectorizer.MultiVectorForWord(ctx, words) + if err != nil { + return "", fmt.Errorf("vectorize individual words: %w", err) + } + + scoredWords, err := c.scoreWords(words, vectors, propName) + if err != nil { + return "", fmt.Errorf("score words: %w", err) + } + + c.rankedWords[propName] = c.rankAndDedup(scoredWords) + + corpus, boosts, err := c.buildBoostedCorpus(propName) + if err != nil { + return "", fmt.Errorf("build corpus: %w", err) + } + + ctx, cancel = contextWithTimeout(10 * time.Second) + defer cancel() + vector, err := c.vectorizer.VectorOnlyForCorpi(ctx, []string{corpus}, boosts) + if err != nil { + return "", fmt.Errorf("vectorize corpus: %w", err) + } + + target, distance, err := c.findClosestTarget(vector, propName) + if err != nil { + return "", fmt.Errorf("find closest target: %w", err) + } + + targetBeacon := crossref.New("localhost", target.ClassName, target.ID).String() + c.item.Schema.(map[string]interface{})[propName] = models.MultipleRef{ + &models.SingleRef{ + Beacon: strfmt.URI(targetBeacon), + Classification: &models.ReferenceMetaClassification{ + WinningDistance: float64(distance), + }, + }, + } + + return propName, nil +} + +func (c *contextualItemClassifier) findClosestTarget(query []float32, targetProp string) (*search.Result, float32, error) { + minimum := float32(100000) + var prediction search.Result + + for _, item := range c.context.targets[targetProp] { + dist, err := cosineDist(query, item.Vector) + if err != nil { + return nil, -1, fmt.Errorf("calculate distance: %w", err) + } + + if dist < minimum { + minimum = dist + prediction = item + } + } + + return &prediction, minimum, nil +} + +func (c *contextualItemClassifier) buildBoostedCorpus(targetProp string) (string, map[string]string, error) { + var corpus []string + + for _, word := range c.words { + word = strings.ToLower(word) + + tfscores := c.context.tfidf[c.params.BasedOnProperties[0]].GetAllTerms(c.itemIndex) + // dereferencing these optional parameters is safe, as defaults are + // explicitly set in classifier.Schedule() + if c.isInIgPercentile(int(*c.settings.InformationGainCutoffPercentile), word, targetProp) && + c.isInTfPercentile(tfscores, int(*c.settings.TfidfCutoffPercentile), word) { + corpus = append(corpus, word) + } + } + + // use minimum words if len is currently less + limit := int(*c.settings.MinimumUsableWords) + if len(corpus) < limit { + corpus = c.getTopNWords(targetProp, limit) + } + + corpusStr := strings.ToLower(strings.Join(corpus, " ")) + boosts := c.boostByInformationGain(targetProp, int(*c.settings.InformationGainCutoffPercentile), + float32(*c.settings.InformationGainMaximumBoost)) + return corpusStr, boosts, nil +} + +func (c *contextualItemClassifier) boostByInformationGain(targetProp string, percentile int, + maxBoost float32, +) map[string]string { + cutoff := int(float32(percentile) / float32(100) * float32(len(c.rankedWords[targetProp]))) + out := make(map[string]string, cutoff) + + for i, word := range c.rankedWords[targetProp][:cutoff] { + boost := 1 - float32(math.Log(float64(i)/float64(cutoff)))*float32(1) + if math.IsInf(float64(boost), 1) || boost > maxBoost { + boost = maxBoost + } + + out[word.word] = fmt.Sprintf("%f * w", boost) + } + + return out +} + +type scoredWord struct { + word string + distance float32 + informationGain float32 +} + +func (c *contextualItemClassifier) getTopNWords(targetProp string, limit int) []string { + words := c.rankedWords[targetProp] + + if len(words) < limit { + limit = len(words) + } + + out := make([]string, limit) + for i := 0; i < limit; i++ { + out[i] = words[i].word + } + + return out +} + +func (c *contextualItemClassifier) rankAndDedup(in []*scoredWord) []scoredWord { + return c.dedup(c.rank(in)) +} + +func (c *contextualItemClassifier) dedup(in []scoredWord) []scoredWord { + // simple dedup since it's already ordered, we only need to check the previous element + indexOut := 0 + out := make([]scoredWord, len(in)) + for i, elem := range in { + if i == 0 { + out[indexOut] = elem + indexOut++ + continue + } + + if elem.word == out[indexOut-1].word { + continue + } + + out[indexOut] = elem + indexOut++ + } + + return out[:indexOut] +} + +func (c *contextualItemClassifier) rank(in []*scoredWord) []scoredWord { + i := 0 + filtered := make([]scoredWord, len(in)) + for _, w := range in { + if w == nil { + continue + } + + filtered[i] = *w + i++ + } + out := filtered[:i] + sort.Slice(out, func(a, b int) bool { return out[a].informationGain > out[b].informationGain }) + return out +} + +func (c *contextualItemClassifier) scoreWords(words []string, vectors [][]float32, + targetProp string, +) ([]*scoredWord, error) { + if len(words) != len(vectors) { + return nil, fmt.Errorf("fatal: word list (l=%d) and vector list (l=%d) have different lengths", + len(words), len(vectors)) + } + + out := make([]*scoredWord, len(words)) + for i := range words { + word := strings.ToLower(words[i]) + sw, err := c.scoreWord(word, vectors[i], targetProp) + if err != nil { + return nil, fmt.Errorf("score word '%s': %w", word, err) + } + + // accept nil-entries for now, they will be removed in ranking/deduping + out[i] = sw + } + + return out, nil +} + +func (c *contextualItemClassifier) scoreWord(word string, vector []float32, + targetProp string, +) (*scoredWord, error) { + var all []float32 + minimum := float32(1000000.00) + + if vector == nil { + return nil, nil + } + + targets, ok := c.context.targets[targetProp] + if !ok { + return nil, fmt.Errorf("fatal: targets for prop '%s' not found", targetProp) + } + + for _, target := range targets { + dist, err := cosineDist(vector, target.Vector) + if err != nil { + return nil, fmt.Errorf("calculate cosine distance: %w", err) + } + + all = append(all, dist) + + if dist < minimum { + minimum = dist + } + } + + return &scoredWord{word: word, distance: minimum, informationGain: avg(all) - minimum}, nil +} + +func avg(in []float32) float32 { + var sum float32 + for _, curr := range in { + sum += curr + } + + return sum / float32(len(in)) +} + +func (c *contextualItemClassifier) isInIgPercentile(percentage int, needle string, target string) bool { + cutoff := int(float32(percentage) / float32(100) * float32(len(c.rankedWords[target]))) + + // no need to check if key exists, guaranteed from run + selection := c.rankedWords[target][:cutoff] + + for _, hay := range selection { + if needle == hay.word { + return true + } + } + + return false +} + +func (c *contextualItemClassifier) isInTfPercentile(tf []TermWithTfIdf, percentage int, needle string) bool { + cutoff := int(float32(percentage) / float32(100) * float32(len(tf))) + selection := tf[:cutoff] + + for _, hay := range selection { + if needle == hay.Term { + return true + } + } + + return false +} + +func cosineSim(a, b []float32) (float32, error) { + if len(a) != len(b) { + return 0, fmt.Errorf("vectors have different dimensions") + } + + var ( + sumProduct float64 + sumASquare float64 + sumBSquare float64 + ) + + for i := range a { + sumProduct += float64(a[i] * b[i]) + sumASquare += float64(a[i] * a[i]) + sumBSquare += float64(b[i] * b[i]) + } + + return float32(sumProduct / (math.Sqrt(sumASquare) * math.Sqrt(sumBSquare))), nil +} + +func cosineDist(a, b []float32) (float32, error) { + sim, err := cosineSim(a, b) + if err != nil { + return 0, err + } + + return 1 - sim, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/classifier_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/classifier_test.go new file mode 100644 index 0000000000000000000000000000000000000000..bdfbfc3d6811c1b4815e85131b1c410294222d4a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/classifier_test.go @@ -0,0 +1,357 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package classification + +import ( + "context" + "encoding/json" + "fmt" + "strings" + "testing" + "time" + + "github.com/go-openapi/strfmt" + "github.com/pkg/errors" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema/crossref" + testhelper "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/usecases/auth/authorization/mocks" + usecasesclassfication "github.com/weaviate/weaviate/usecases/classification" +) + +func TestContextualClassifier_ParseSettings(t *testing.T) { + t.Run("should parse with default values with empty settings are passed", func(t *testing.T) { + // given + classifier := New(&fakeVectorizer{}) + params := &models.Classification{ + Class: "Article", + BasedOnProperties: []string{"description"}, + ClassifyProperties: []string{"exactCategory", "mainCategory"}, + Type: "text2vec-contextionary-contextual", + } + + // when + err := classifier.ParseClassifierSettings(params) + + // then + assert.Nil(t, err) + settings := params.Settings + assert.NotNil(t, settings) + paramsContextual, ok := settings.(*ParamsContextual) + assert.NotNil(t, paramsContextual) + assert.True(t, ok) + assert.Equal(t, int32(3), *paramsContextual.MinimumUsableWords) + assert.Equal(t, int32(50), *paramsContextual.InformationGainCutoffPercentile) + assert.Equal(t, int32(3), *paramsContextual.InformationGainMaximumBoost) + assert.Equal(t, int32(80), *paramsContextual.TfidfCutoffPercentile) + }) + + t.Run("should parse classifier settings", func(t *testing.T) { + // given + classifier := New(&fakeVectorizer{}) + params := &models.Classification{ + Class: "Article", + BasedOnProperties: []string{"description"}, + ClassifyProperties: []string{"exactCategory", "mainCategory"}, + Type: "text2vec-contextionary-contextual", + Settings: map[string]interface{}{ + "minimumUsableWords": json.Number("1"), + "informationGainCutoffPercentile": json.Number("2"), + "informationGainMaximumBoost": json.Number("3"), + "tfidfCutoffPercentile": json.Number("4"), + }, + } + + // when + err := classifier.ParseClassifierSettings(params) + + // then + assert.Nil(t, err) + assert.NotNil(t, params.Settings) + settings, ok := params.Settings.(*ParamsContextual) + assert.NotNil(t, settings) + assert.True(t, ok) + assert.Equal(t, int32(1), *settings.MinimumUsableWords) + assert.Equal(t, int32(2), *settings.InformationGainCutoffPercentile) + assert.Equal(t, int32(3), *settings.InformationGainMaximumBoost) + assert.Equal(t, int32(4), *settings.TfidfCutoffPercentile) + }) +} + +func TestContextualClassifier_Classify(t *testing.T) { + var id strfmt.UUID + // so we can reuse it for follow up requests, such as checking the status + + t.Run("with valid data", func(t *testing.T) { + sg := &fakeSchemaGetter{testSchema()} + repo := newFakeClassificationRepo() + authorizer := mocks.NewMockAuthorizer() + + vectorRepo := newFakeVectorRepoContextual(testDataToBeClassified(), testDataPossibleTargets()) + logger, _ := test.NewNullLogger() + + vectorizer := &fakeVectorizer{words: testDataVectors()} + modulesProvider := NewFakeModulesProvider(vectorizer) + classifier := usecasesclassfication.New(sg, repo, vectorRepo, authorizer, logger, modulesProvider) + + contextual := "text2vec-contextionary-contextual" + params := models.Classification{ + Class: "Article", + BasedOnProperties: []string{"description"}, + ClassifyProperties: []string{"exactCategory", "mainCategory"}, + Type: contextual, + } + + t.Run("scheduling a classification", func(t *testing.T) { + class, err := classifier.Schedule(context.Background(), nil, params) + require.Nil(t, err, "should not error") + require.NotNil(t, class) + + assert.Len(t, class.ID, 36, "an id was assigned") + id = class.ID + }) + + t.Run("retrieving the same classification by id", func(t *testing.T) { + class, err := classifier.Get(context.Background(), nil, id) + require.Nil(t, err) + require.NotNil(t, class) + assert.Equal(t, id, class.ID) + }) + + // TODO: improve by polling instead + time.Sleep(500 * time.Millisecond) + + t.Run("status is now completed", func(t *testing.T) { + class, err := classifier.Get(context.Background(), nil, id) + require.Nil(t, err) + require.NotNil(t, class) + assert.Equal(t, models.ClassificationStatusCompleted, class.Status) + }) + + t.Run("the classifier updated the actions with the classified references", func(t *testing.T) { + vectorRepo.Lock() + require.Len(t, vectorRepo.db, 6) + vectorRepo.Unlock() + + t.Run("food", func(t *testing.T) { + idArticleFoodOne := "06a1e824-889c-4649-97f9-1ed3fa401d8e" + idArticleFoodTwo := "6402e649-b1e0-40ea-b192-a64eab0d5e56" + + checkRef(t, vectorRepo, idArticleFoodOne, "ExactCategory", "exactCategory", idCategoryFoodAndDrink) + checkRef(t, vectorRepo, idArticleFoodTwo, "MainCategory", "mainCategory", idMainCategoryFoodAndDrink) + }) + + t.Run("politics", func(t *testing.T) { + idArticlePoliticsOne := "75ba35af-6a08-40ae-b442-3bec69b355f9" + idArticlePoliticsTwo := "f850439a-d3cd-4f17-8fbf-5a64405645cd" + + checkRef(t, vectorRepo, idArticlePoliticsOne, "ExactCategory", "exactCategory", idCategoryPolitics) + checkRef(t, vectorRepo, idArticlePoliticsTwo, "MainCategory", "mainCategory", idMainCategoryPoliticsAndSociety) + }) + + t.Run("society", func(t *testing.T) { + idArticleSocietyOne := "a2bbcbdc-76e1-477d-9e72-a6d2cfb50109" + idArticleSocietyTwo := "069410c3-4b9e-4f68-8034-32a066cb7997" + + checkRef(t, vectorRepo, idArticleSocietyOne, "ExactCategory", "exactCategory", idCategorySociety) + checkRef(t, vectorRepo, idArticleSocietyTwo, "MainCategory", "mainCategory", idMainCategoryPoliticsAndSociety) + }) + }) + }) + + t.Run("when errors occur during classification", func(t *testing.T) { + sg := &fakeSchemaGetter{testSchema()} + repo := newFakeClassificationRepo() + authorizer := mocks.NewMockAuthorizer() + vectorRepo := newFakeVectorRepoKNN(testDataToBeClassified(), testDataAlreadyClassified()) + vectorRepo.errorOnAggregate = errors.New("something went wrong") + logger, _ := test.NewNullLogger() + classifier := usecasesclassfication.New(sg, repo, vectorRepo, authorizer, logger, nil) + + params := models.Classification{ + Class: "Article", + BasedOnProperties: []string{"description"}, + ClassifyProperties: []string{"exactCategory", "mainCategory"}, + Settings: map[string]interface{}{ + "k": json.Number("1"), + }, + } + + t.Run("scheduling a classification", func(t *testing.T) { + class, err := classifier.Schedule(context.Background(), nil, params) + require.Nil(t, err, "should not error") + require.NotNil(t, class) + + assert.Len(t, class.ID, 36, "an id was assigned") + id = class.ID + }) + + waitForStatusToNoLongerBeRunning(t, classifier, id) + + t.Run("status is now failed", func(t *testing.T) { + class, err := classifier.Get(context.Background(), nil, id) + require.Nil(t, err) + require.NotNil(t, class) + assert.Equal(t, models.ClassificationStatusFailed, class.Status) + expectedErrStrings := []string{ + "classification failed: ", + "classify Article/75ba35af-6a08-40ae-b442-3bec69b355f9: something went wrong", + "classify Article/f850439a-d3cd-4f17-8fbf-5a64405645cd: something went wrong", + "classify Article/a2bbcbdc-76e1-477d-9e72-a6d2cfb50109: something went wrong", + "classify Article/069410c3-4b9e-4f68-8034-32a066cb7997: something went wrong", + "classify Article/06a1e824-889c-4649-97f9-1ed3fa401d8e: something went wrong", + "classify Article/6402e649-b1e0-40ea-b192-a64eab0d5e56: something went wrong", + } + for _, msg := range expectedErrStrings { + assert.Contains(t, class.Error, msg) + } + }) + }) + + t.Run("when there is nothing to be classified", func(t *testing.T) { + sg := &fakeSchemaGetter{testSchema()} + repo := newFakeClassificationRepo() + authorizer := mocks.NewMockAuthorizer() + vectorRepo := newFakeVectorRepoKNN(nil, testDataAlreadyClassified()) + logger, _ := test.NewNullLogger() + classifier := usecasesclassfication.New(sg, repo, vectorRepo, authorizer, logger, nil) + + params := models.Classification{ + Class: "Article", + BasedOnProperties: []string{"description"}, + ClassifyProperties: []string{"exactCategory", "mainCategory"}, + Settings: map[string]interface{}{ + "k": json.Number("1"), + }, + } + + t.Run("scheduling a classification", func(t *testing.T) { + class, err := classifier.Schedule(context.Background(), nil, params) + require.Nil(t, err, "should not error") + require.NotNil(t, class) + + assert.Len(t, class.ID, 36, "an id was assigned") + id = class.ID + }) + + waitForStatusToNoLongerBeRunning(t, classifier, id) + + t.Run("status is now failed", func(t *testing.T) { + class, err := classifier.Get(context.Background(), nil, id) + require.Nil(t, err) + require.NotNil(t, class) + assert.Equal(t, models.ClassificationStatusFailed, class.Status) + expectedErr := "classification failed: " + + "no classes to be classified - did you run a previous classification already?" + assert.Equal(t, expectedErr, class.Error) + }) + }) +} + +func waitForStatusToNoLongerBeRunning(t *testing.T, classifier *usecasesclassfication.Classifier, id strfmt.UUID) { + testhelper.AssertEventuallyEqualWithFrequencyAndTimeout(t, true, func() interface{} { + class, err := classifier.Get(context.Background(), nil, id) + require.Nil(t, err) + require.NotNil(t, class) + + return class.Status != models.ClassificationStatusRunning + }, 100*time.Millisecond, 20*time.Second, "wait until status in no longer running") +} + +type genericFakeRepo interface { + get(strfmt.UUID) (*models.Object, bool) +} + +func checkRef(t *testing.T, repo genericFakeRepo, source, targetClass, propName, target string) { + object, ok := repo.get(strfmt.UUID(source)) + require.True(t, ok, "object must be present") + + schema, ok := object.Properties.(map[string]interface{}) + require.True(t, ok, "schema must be map") + + prop, ok := schema[propName] + require.True(t, ok, "ref prop must be present") + + refs, ok := prop.(models.MultipleRef) + require.True(t, ok, "ref prop must be models.MultipleRef") + require.Len(t, refs, 1, "refs must have len 1") + + assert.Equal(t, crossref.NewLocalhost(targetClass, strfmt.UUID(target)).String(), refs[0].Beacon.String(), "beacon must match") +} + +type fakeVectorizer struct { + words map[string][]float32 +} + +func (f *fakeVectorizer) MultiVectorForWord(ctx context.Context, words []string) ([][]float32, error) { + out := make([][]float32, len(words)) + for i, word := range words { + vector, ok := f.words[strings.ToLower(word)] + if !ok { + continue + } + out[i] = vector + } + return out, nil +} + +func (f *fakeVectorizer) VectorOnlyForCorpi(ctx context.Context, corpi []string, + overrides map[string]string, +) ([]float32, error) { + words := strings.Split(corpi[0], " ") + if len(words) == 0 { + return nil, fmt.Errorf("vector for corpi called without words") + } + + vectors, _ := f.MultiVectorForWord(ctx, words) + + return f.centroid(vectors, words) +} + +func (f *fakeVectorizer) centroid(in [][]float32, words []string) ([]float32, error) { + withoutNilVectors := make([][]float32, len(in)) + if len(in) == 0 { + return nil, fmt.Errorf("got nil vector list for words: %v", words) + } + + i := 0 + for _, vec := range in { + if vec == nil { + continue + } + + withoutNilVectors[i] = vec + i++ + } + withoutNilVectors = withoutNilVectors[:i] + if i == 0 { + return nil, fmt.Errorf("no usable words: %v", words) + } + + // take the first vector assuming all have the same length + out := make([]float32, len(withoutNilVectors[0])) + + for _, vec := range withoutNilVectors { + for i, dim := range vec { + out[i] = out[i] + dim + } + } + + for i, sum := range out { + out[i] = sum / float32(len(withoutNilVectors)) + } + + return out, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..4264ca46c56172d8853bc7a5a6b006cd647a2b97 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/fakes_for_test.go @@ -0,0 +1,376 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package classification + +import ( + "context" + "fmt" + "sort" + "sync" + "time" + + "github.com/go-openapi/strfmt" + "github.com/weaviate/weaviate/entities/additional" + "github.com/weaviate/weaviate/entities/dto" + libfilters "github.com/weaviate/weaviate/entities/filters" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/search" + usecasesclassfication "github.com/weaviate/weaviate/usecases/classification" + "github.com/weaviate/weaviate/usecases/objects" + "github.com/weaviate/weaviate/usecases/sharding" +) + +type fakeSchemaGetter struct { + schema schema.Schema +} + +func (f *fakeSchemaGetter) GetSchemaSkipAuth() schema.Schema { + return f.schema +} + +func (f *fakeSchemaGetter) ReadOnlyClass(class string) *models.Class { + return f.schema.GetClass(class) +} + +func (f *fakeSchemaGetter) ResolveAlias(string) string { + return "" +} + +func (f *fakeSchemaGetter) GetAliasesForClass(string) []*models.Alias { + return nil +} + +func (f *fakeSchemaGetter) CopyShardingState(class string) *sharding.State { + panic("not implemented") +} + +func (f *fakeSchemaGetter) ShardOwner(class, shard string) (string, error) { return "", nil } +func (f *fakeSchemaGetter) ShardReplicas(class, shard string) ([]string, error) { return nil, nil } + +func (f *fakeSchemaGetter) TenantsShards(_ context.Context, class string, tenants ...string) (map[string]string, error) { + res := map[string]string{} + for _, t := range tenants { + res[t] = models.TenantActivityStatusHOT + } + return res, nil +} + +func (f *fakeSchemaGetter) OptimisticTenantStatus(_ context.Context, class string, tenant string) (map[string]string, error) { + res := map[string]string{} + res[tenant] = models.TenantActivityStatusHOT + return res, nil +} + +func (f *fakeSchemaGetter) ShardFromUUID(class string, uuid []byte) string { return "" } + +func (f *fakeSchemaGetter) Nodes() []string { + panic("not implemented") +} + +func (f *fakeSchemaGetter) NodeName() string { + panic("not implemented") +} + +func (f *fakeSchemaGetter) ClusterHealthScore() int { + panic("not implemented") +} + +func (f *fakeSchemaGetter) Statistics() map[string]any { + panic("not implemented") +} + +func (f *fakeSchemaGetter) ResolveParentNodes(string, string, +) (map[string]string, error) { + panic("not implemented") +} + +type fakeClassificationRepo struct { + sync.Mutex + db map[strfmt.UUID]models.Classification +} + +func newFakeClassificationRepo() *fakeClassificationRepo { + return &fakeClassificationRepo{ + db: map[strfmt.UUID]models.Classification{}, + } +} + +func (f *fakeClassificationRepo) Put(ctx context.Context, class models.Classification) error { + f.Lock() + defer f.Unlock() + + f.db[class.ID] = class + return nil +} + +func (f *fakeClassificationRepo) Get(ctx context.Context, id strfmt.UUID) (*models.Classification, error) { + f.Lock() + defer f.Unlock() + + class, ok := f.db[id] + if !ok { + return nil, nil + } + + return &class, nil +} + +func newFakeVectorRepoKNN(unclassified, classified search.Results) *fakeVectorRepoKNN { + return &fakeVectorRepoKNN{ + unclassified: unclassified, + classified: classified, + db: map[strfmt.UUID]*models.Object{}, + } +} + +// read requests are specified through unclassified and classified, +// write requests (Put[Kind]) are stored in the db map +type fakeVectorRepoKNN struct { + sync.Mutex + unclassified []search.Result + classified []search.Result + db map[strfmt.UUID]*models.Object + errorOnAggregate error + batchStorageDelay time.Duration +} + +func (f *fakeVectorRepoKNN) GetUnclassified(ctx context.Context, + class string, properties []string, propsToReturn []string, + filter *libfilters.LocalFilter, +) ([]search.Result, error) { + f.Lock() + defer f.Unlock() + return f.unclassified, nil +} + +func (f *fakeVectorRepoKNN) AggregateNeighbors(ctx context.Context, vector []float32, + class string, properties []string, k int, + filter *libfilters.LocalFilter, +) ([]usecasesclassfication.NeighborRef, error) { + f.Lock() + defer f.Unlock() + + // simulate that this takes some time + time.Sleep(1 * time.Millisecond) + + if k != 1 { + return nil, fmt.Errorf("fake vector repo only supports k=1") + } + + results := f.classified + sort.SliceStable(results, func(i, j int) bool { + simI, err := cosineSim(results[i].Vector, vector) + if err != nil { + panic(err.Error()) + } + + simJ, err := cosineSim(results[j].Vector, vector) + if err != nil { + panic(err.Error()) + } + return simI > simJ + }) + + var out []usecasesclassfication.NeighborRef + schema := results[0].Schema.(map[string]interface{}) + for _, propName := range properties { + prop, ok := schema[propName] + if !ok { + return nil, fmt.Errorf("missing prop %s", propName) + } + + refs := prop.(models.MultipleRef) + if len(refs) != 1 { + return nil, fmt.Errorf("wrong length %d", len(refs)) + } + + out = append(out, usecasesclassfication.NeighborRef{ + Beacon: refs[0].Beacon, + WinningCount: 1, + OverallCount: 1, + LosingCount: 1, + Property: propName, + }) + } + + return out, f.errorOnAggregate +} + +func (f *fakeVectorRepoKNN) ZeroShotSearch(ctx context.Context, vector []float32, + class string, properties []string, + filter *libfilters.LocalFilter, +) ([]search.Result, error) { + panic("not implemented") +} + +func (f *fakeVectorRepoKNN) VectorSearch(ctx context.Context, + params dto.GetParams, targetVectors []string, searchVectors []models.Vector, +) ([]search.Result, error) { + f.Lock() + defer f.Unlock() + return nil, fmt.Errorf("vector class search not implemented in fake") +} + +func (f *fakeVectorRepoKNN) BatchPutObjects(ctx context.Context, objects objects.BatchObjects, repl *additional.ReplicationProperties, schemaVersion uint64) (objects.BatchObjects, error) { + f.Lock() + defer f.Unlock() + + if f.batchStorageDelay > 0 { + time.Sleep(f.batchStorageDelay) + } + + for _, batchObject := range objects { + f.db[batchObject.Object.ID] = batchObject.Object + } + return objects, nil +} + +func (f *fakeVectorRepoKNN) get(id strfmt.UUID) (*models.Object, bool) { + f.Lock() + defer f.Unlock() + t, ok := f.db[id] + return t, ok +} + +func newFakeVectorRepoContextual(unclassified, targets search.Results) *fakeVectorRepoContextual { + return &fakeVectorRepoContextual{ + unclassified: unclassified, + targets: targets, + db: map[strfmt.UUID]*models.Object{}, + } +} + +// read requests are specified through unclassified and classified, +// write requests (Put[Kind]) are stored in the db map +type fakeVectorRepoContextual struct { + sync.Mutex + unclassified []search.Result + targets []search.Result + db map[strfmt.UUID]*models.Object + errorOnAggregate error +} + +func (f *fakeVectorRepoContextual) get(id strfmt.UUID) (*models.Object, bool) { + f.Lock() + defer f.Unlock() + t, ok := f.db[id] + return t, ok +} + +func (f *fakeVectorRepoContextual) GetUnclassified(ctx context.Context, + class string, properties []string, propsToReturn []string, + filter *libfilters.LocalFilter, +) ([]search.Result, error) { + return f.unclassified, nil +} + +func (f *fakeVectorRepoContextual) AggregateNeighbors(ctx context.Context, vector []float32, + class string, properties []string, k int, + filter *libfilters.LocalFilter, +) ([]usecasesclassfication.NeighborRef, error) { + panic("not implemented") +} + +func (f *fakeVectorRepoContextual) ZeroShotSearch(ctx context.Context, vector []float32, + class string, properties []string, + filter *libfilters.LocalFilter, +) ([]search.Result, error) { + panic("not implemented") +} + +func (f *fakeVectorRepoContextual) BatchPutObjects(ctx context.Context, objects objects.BatchObjects, repl *additional.ReplicationProperties, schemaVersion uint64) (objects.BatchObjects, error) { + f.Lock() + defer f.Unlock() + for _, batchObject := range objects { + f.db[batchObject.Object.ID] = batchObject.Object + } + return objects, nil +} + +func (f *fakeVectorRepoContextual) VectorSearch(ctx context.Context, + params dto.GetParams, targetVectors []string, searchVectors []models.Vector, +) ([]search.Result, error) { + if searchVectors == nil { + filteredTargets := matchClassName(f.targets, params.ClassName) + return filteredTargets, nil + } + + // simulate that this takes some time + time.Sleep(5 * time.Millisecond) + + switch searchVector := searchVectors[0].(type) { + case []float32: + filteredTargets := matchClassName(f.targets, params.ClassName) + results := filteredTargets + sort.SliceStable(results, func(i, j int) bool { + simI, err := cosineSim(results[i].Vector, searchVector) + if err != nil { + panic(err.Error()) + } + + simJ, err := cosineSim(results[j].Vector, searchVector) + if err != nil { + panic(err.Error()) + } + return simI > simJ + }) + + if len(results) == 0 { + return nil, f.errorOnAggregate + } + + out := []search.Result{ + results[0], + } + + return out, f.errorOnAggregate + default: + return nil, fmt.Errorf("unsupported search vector type: %T", searchVectors[0]) + } +} + +func matchClassName(in []search.Result, className string) []search.Result { + var out []search.Result + for _, item := range in { + if item.ClassName == className { + out = append(out, item) + } + } + + return out +} + +type fakeModulesProvider struct { + contextualClassifier modulecapabilities.Classifier +} + +func (fmp *fakeModulesProvider) VectorFromInput(ctx context.Context, className string, input string) ([]float32, error) { + panic("not implemented") +} + +func NewFakeModulesProvider(vectorizer *fakeVectorizer) *fakeModulesProvider { + return &fakeModulesProvider{New(vectorizer)} +} + +func (fmp *fakeModulesProvider) ParseClassifierSettings(name string, + params *models.Classification, +) error { + return fmp.contextualClassifier.ParseClassifierSettings(params) +} + +func (fmp *fakeModulesProvider) GetClassificationFn(className, name string, + params modulecapabilities.ClassifyParams, +) (modulecapabilities.ClassifyItemFn, error) { + return fmp.contextualClassifier.ClassifyFn(params) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/schema_for_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/schema_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..879508a7f35da7d9e0fcc5d2418e8705625c0245 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/schema_for_test.go @@ -0,0 +1,234 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package classification + +import ( + "fmt" + + "github.com/go-openapi/strfmt" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/search" +) + +func testSchema() schema.Schema { + return schema.Schema{ + Objects: &models.Schema{ + Classes: []*models.Class{ + { + Class: "ExactCategory", + }, + { + Class: "MainCategory", + }, + { + Class: "Article", + Properties: []*models.Property{ + { + Name: "description", + DataType: []string{string(schema.DataTypeText)}, + }, + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + { + Name: "exactCategory", + DataType: []string{"ExactCategory"}, + }, + { + Name: "mainCategory", + DataType: []string{"MainCategory"}, + }, + { + Name: "categories", + DataType: []string{"ExactCategory"}, + }, + { + Name: "anyCategory", + DataType: []string{"MainCategory", "ExactCategory"}, + }, + }, + }, + }, + }, + } +} + +// vector position close to [1,0,0] means -> politics, [0,1,0] means -> society, [0, 0, 1] -> food&drink +func testDataToBeClassified() search.Results { + return search.Results{ + search.Result{ + ID: "75ba35af-6a08-40ae-b442-3bec69b355f9", + ClassName: "Article", + Vector: []float32{0.78, 0, 0}, + Schema: map[string]interface{}{ + "description": "Barack Obama is a former US president", + }, + }, + search.Result{ + ID: "f850439a-d3cd-4f17-8fbf-5a64405645cd", + ClassName: "Article", + Vector: []float32{0.90, 0, 0}, + Schema: map[string]interface{}{ + "description": "Michelle Obama is Barack Obamas wife", + }, + }, + search.Result{ + ID: "a2bbcbdc-76e1-477d-9e72-a6d2cfb50109", + ClassName: "Article", + Vector: []float32{0, 0.78, 0}, + Schema: map[string]interface{}{ + "description": "Johnny Depp is an actor", + }, + }, + search.Result{ + ID: "069410c3-4b9e-4f68-8034-32a066cb7997", + ClassName: "Article", + Vector: []float32{0, 0.90, 0}, + Schema: map[string]interface{}{ + "description": "Brad Pitt starred in a Quentin Tarantino movie", + }, + }, + search.Result{ + ID: "06a1e824-889c-4649-97f9-1ed3fa401d8e", + ClassName: "Article", + Vector: []float32{0, 0, 0.78}, + Schema: map[string]interface{}{ + "description": "Ice Cream often contains a lot of sugar", + }, + }, + search.Result{ + ID: "6402e649-b1e0-40ea-b192-a64eab0d5e56", + ClassName: "Article", + Vector: []float32{0, 0, 0.90}, + Schema: map[string]interface{}{ + "description": "French Fries are more common in Belgium and the US than in France", + }, + }, + } +} + +func testDataVectors() map[string][]float32 { + return map[string][]float32{ + "barack": {0.7, 0, 0}, + "michelle": {0.7, 0, 0}, + "obama": {1.0, 0, 0}, + "us": {0.6, 0.5, 0.4}, + "depp": {0.1, 0.8, 0.2}, + "actor": {0.1, 0.9, 0.0}, + "brad": {0.1, 0.8, 0.2}, + "starred": {0.1, 0.9, 0.0}, + "ice": {0, 0.1, 0.9}, + "cream": {0, 0.1, 0.8}, + "sugar": {0.3, 0.2, 0.9}, + "french": {0.5, 0.5, 0.4}, + "fries": {0, 0.1, 0.95}, + "belgium": {0.3, 0.3, 0.2}, + } +} + +const ( + idMainCategoryPoliticsAndSociety = "39c6abe3-4bbe-4c4e-9e60-ca5e99ec6b4e" + idMainCategoryFoodAndDrink = "5a3d909a-4f0d-4168-8f5c-cd3074d1e79a" + idCategoryPolitics = "1b204f16-7da6-44fd-bbd2-8cc4a7414bc3" + idCategorySociety = "ec500f39-1dc9-4580-9bd1-55a8ea8e37a2" + idCategoryFoodAndDrink = "027b708a-31ca-43ea-9001-88bec864c79c" +) + +// only used for contextual type classification +func testDataPossibleTargets() search.Results { + return search.Results{ + search.Result{ + ID: idMainCategoryPoliticsAndSociety, + ClassName: "MainCategory", + Vector: []float32{1.01, 1.01, 0}, + Schema: map[string]interface{}{ + "name": "Politics and Society", + }, + }, + search.Result{ + ID: idMainCategoryFoodAndDrink, + ClassName: "MainCategory", + Vector: []float32{0, 0, 0.99}, + Schema: map[string]interface{}{ + "name": "Food and Drinks", + }, + }, + search.Result{ + ID: idCategoryPolitics, + ClassName: "ExactCategory", + Vector: []float32{0.99, 0, 0}, + Schema: map[string]interface{}{ + "name": "Politics", + }, + }, + search.Result{ + ID: idCategorySociety, + ClassName: "ExactCategory", + Vector: []float32{0, 0.90, 0}, + Schema: map[string]interface{}{ + "name": "Society", + }, + }, + search.Result{ + ID: idCategoryFoodAndDrink, + ClassName: "ExactCategory", + Vector: []float32{0, 0, 0.99}, + Schema: map[string]interface{}{ + "name": "Food and Drink", + }, + }, + } +} + +func beaconRef(target string) *models.SingleRef { + beacon := fmt.Sprintf("weaviate://localhost/%s", target) + return &models.SingleRef{Beacon: strfmt.URI(beacon)} +} + +// only used for knn-type +func testDataAlreadyClassified() search.Results { + return search.Results{ + search.Result{ + ID: "8aeecd06-55a0-462c-9853-81b31a284d80", + ClassName: "Article", + Vector: []float32{1, 0, 0}, + Schema: map[string]interface{}{ + "description": "This article talks about politics", + "exactCategory": models.MultipleRef{beaconRef(idCategoryPolitics)}, + "mainCategory": models.MultipleRef{beaconRef(idMainCategoryPoliticsAndSociety)}, + }, + }, + search.Result{ + ID: "9f4c1847-2567-4de7-8861-34cf47a071ae", + ClassName: "Article", + Vector: []float32{0, 1, 0}, + Schema: map[string]interface{}{ + "description": "This articles talks about society", + "exactCategory": models.MultipleRef{beaconRef(idCategorySociety)}, + "mainCategory": models.MultipleRef{beaconRef(idMainCategoryPoliticsAndSociety)}, + }, + }, + search.Result{ + ID: "926416ec-8fb1-4e40-ab8c-37b226b3d68e", + ClassName: "Article", + Vector: []float32{0, 0, 1}, + Schema: map[string]interface{}{ + "description": "This article talks about food", + "exactCategory": models.MultipleRef{beaconRef(idCategoryFoodAndDrink)}, + "mainCategory": models.MultipleRef{beaconRef(idMainCategoryFoodAndDrink)}, + }, + }, + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/splitter.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/splitter.go new file mode 100644 index 0000000000000000000000000000000000000000..f42cda178464057603f8ecdda73ab24854cd161a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/splitter.go @@ -0,0 +1,32 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package classification + +// TODO: This code is duplicated across weaviate and contextionary which makes +// changes risky. Can we find a single source of truth for this logic + +import ( + "strings" + "unicode" +) + +func newSplitter() *splitter { + return &splitter{} +} + +type splitter struct{} + +func (s *splitter) Split(corpus string) []string { + return strings.FieldsFunc(corpus, func(c rune) bool { + return !unicode.IsLetter(c) && !unicode.IsNumber(c) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/tf_idf.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/tf_idf.go new file mode 100644 index 0000000000000000000000000000000000000000..b8d9dc74c994904b8b6a6d1726769d07a146ab03 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/tf_idf.go @@ -0,0 +1,192 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package classification + +import ( + "fmt" + "math" + "sort" + "strings" +) + +// warning, not thread-safe for this spike + +type TfIdfCalculator struct { + size int + documents []string + documentLengths []uint + docPointer int + terms map[string][]uint16 + termIdf map[string]float32 +} + +func NewTfIdfCalculator(size int) *TfIdfCalculator { + return &TfIdfCalculator{ + size: size, + documents: make([]string, size), + documentLengths: make([]uint, size), + terms: make(map[string][]uint16), + termIdf: make(map[string]float32), + } +} + +func (c *TfIdfCalculator) AddDoc(doc string) error { + if c.docPointer > c.size { + return fmt.Errorf("doc size exceeded") + } + + c.documents[c.docPointer] = doc + c.docPointer++ + return nil +} + +func (c *TfIdfCalculator) Calculate() { + for i := range c.documents { + c.analyzeDoc(i) + } + + for term, frequencies := range c.terms { + var contained uint + for _, frequency := range frequencies { + if frequency > 0 { + contained++ + } + } + + c.termIdf[term] = float32(math.Log10(float64(c.size) / float64(contained))) + } +} + +func (c *TfIdfCalculator) analyzeDoc(docIndex int) { + terms := newSplitter().Split(c.documents[docIndex]) + for i, term := range terms { + term = strings.ToLower(term) + frequencies := c.getOrInitTerm(term) + frequencies[docIndex] = frequencies[docIndex] + 1 + c.documentLengths[docIndex] = uint(i + 1) + c.terms[term] = frequencies + } +} + +func (c *TfIdfCalculator) getOrInitTerm(term string) []uint16 { + frequencies, ok := c.terms[term] + if !ok { + frequencies := make([]uint16, c.size) + c.terms[term] = frequencies + return frequencies + } + + return frequencies +} + +func (c *TfIdfCalculator) Get(term string, doc int) float32 { + term = strings.ToLower(term) + frequencies, ok := c.terms[term] + if !ok { + return 0 + } + + tf := float32(frequencies[doc]) / float32(c.documentLengths[doc]) + idf := c.termIdf[term] + + return tf * idf +} + +func (c *TfIdfCalculator) GetAllTerms(docIndex int) []TermWithTfIdf { + terms := newSplitter().Split(c.documents[docIndex]) + terms = c.lowerCaseAndDedup(terms) + + out := make([]TermWithTfIdf, len(terms)) + for i, term := range terms { + out[i] = TermWithTfIdf{ + Term: term, + TfIdf: c.Get(term, docIndex), + } + } + + sort.Slice(out, func(a, b int) bool { return out[a].TfIdf > out[b].TfIdf }) + return c.withRelativeScores(out) +} + +type TermWithTfIdf struct { + Term string + TfIdf float32 + RelativeScore float32 +} + +func (c *TfIdfCalculator) withRelativeScores(list []TermWithTfIdf) []TermWithTfIdf { + // mean for variance + var mean float64 + for _, t := range list { + mean += float64(t.TfIdf) + } + mean = mean / float64(len(list)) + + // calculate variance + for i, t := range list { + variance := math.Pow(float64(t.TfIdf)-mean, 2) + if float64(t.TfIdf) < mean { + list[i].RelativeScore = float32(-variance) + } else { + list[i].RelativeScore = float32(variance) + } + } + + return c.withNormalizedScores(list) +} + +// between -1 and 1 +func (c *TfIdfCalculator) withNormalizedScores(list []TermWithTfIdf) []TermWithTfIdf { + max, min := c.maxMin(list) + + for i, curr := range list { + score := (curr.RelativeScore - min) / (max - min) + list[i].RelativeScore = (score - 0.5) * 2 + } + + return list +} + +func (c *TfIdfCalculator) maxMin(list []TermWithTfIdf) (float32, float32) { + max := list[0].RelativeScore + min := list[0].RelativeScore + + for _, curr := range list { + if curr.RelativeScore > max { + max = curr.RelativeScore + } + if curr.RelativeScore < min { + min = curr.RelativeScore + } + } + + return max, min +} + +func (c *TfIdfCalculator) lowerCaseAndDedup(list []string) []string { + seen := map[string]struct{}{} + out := make([]string, len(list)) + i := 0 + for _, term := range list { + term = strings.ToLower(term) + _, ok := seen[term] + if ok { + continue + } + + seen[term] = struct{}{} + out[i] = term + i++ + } + + return out[:i] +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/tf_idf_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/tf_idf_test.go new file mode 100644 index 0000000000000000000000000000000000000000..6b78d31311cfd55464aeaa834e7bffb03b7b7d4d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/classification/tf_idf_test.go @@ -0,0 +1,103 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package classification + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestTfidf(t *testing.T) { + docs := []string{ + "this pinot wine is a pinot noir", + "this one is a cabernet sauvignon", + "this wine is a cabernet franc", + "this one is a merlot", + } + + calc := NewTfIdfCalculator(len(docs)) + for _, doc := range docs { + calc.AddDoc(doc) + } + calc.Calculate() + + t.Run("doc 0", func(t *testing.T) { + doc := 0 + + // filler words should have score of 0 + assert.Equal(t, float32(0), calc.Get("this", doc)) + assert.Equal(t, float32(0), calc.Get("is", doc)) + assert.Equal(t, float32(0), calc.Get("a", doc)) + + // next highest should be wine, noir, pinot + wine := calc.Get("wine", doc) + noir := calc.Get("noir", doc) + pinot := calc.Get("pinot", doc) + + assert.True(t, wine > 0, "wine greater 0") + assert.True(t, noir > wine, "noir greater than wine") + assert.True(t, pinot > noir, "pinot has highest score") + }) + + t.Run("doc 1", func(t *testing.T) { + doc := 1 + + // filler words should have score of 0 + assert.Equal(t, float32(0), calc.Get("this", doc)) + assert.Equal(t, float32(0), calc.Get("is", doc)) + assert.Equal(t, float32(0), calc.Get("a", doc)) + + // next highest should be one==cabernet, sauvignon + one := calc.Get("one", doc) + cabernet := calc.Get("cabernet", doc) + sauvignon := calc.Get("sauvignon", doc) + + assert.True(t, one > 0, "one greater 0") + assert.True(t, cabernet == one, "cabernet equal to one") + assert.True(t, sauvignon > cabernet, "sauvignon has highest score") + }) + + t.Run("doc 2", func(t *testing.T) { + doc := 2 + + // filler words should have score of 0 + assert.Equal(t, float32(0), calc.Get("this", doc)) + assert.Equal(t, float32(0), calc.Get("is", doc)) + assert.Equal(t, float32(0), calc.Get("a", doc)) + + // next highest should be one==cabernet, sauvignon + wine := calc.Get("wine", doc) + cabernet := calc.Get("cabernet", doc) + franc := calc.Get("franc", doc) + + assert.True(t, wine > 0, "wine greater 0") + assert.True(t, cabernet == wine, "cabernet equal to wine") + assert.True(t, franc > cabernet, "franc has highest score") + }) + + t.Run("doc 3", func(t *testing.T) { + doc := 3 + + // filler words should have score of 0 + assert.Equal(t, float32(0), calc.Get("this", doc)) + assert.Equal(t, float32(0), calc.Get("is", doc)) + assert.Equal(t, float32(0), calc.Get("a", doc)) + + // next highest should be one==cabernet, sauvignon + one := calc.Get("one", doc) + merlot := calc.Get("merlot", doc) + + assert.True(t, one > 0, "one greater 0") + assert.True(t, merlot > one, "merlot has highest score") + }) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/client/contextionary.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/client/contextionary.go new file mode 100644 index 0000000000000000000000000000000000000000..042062b0898de9be71b4efecd54d20ce9f2c27ae --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/client/contextionary.go @@ -0,0 +1,374 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package client + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + pb "github.com/weaviate/contextionary/contextionary" + "github.com/weaviate/weaviate/entities/models" + txt2vecmodels "github.com/weaviate/weaviate/modules/text2vec-contextionary/additional/models" + "github.com/weaviate/weaviate/modules/text2vec-contextionary/vectorizer" + "github.com/weaviate/weaviate/usecases/traverser" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/credentials/insecure" + "google.golang.org/grpc/status" +) + +const ModelUncontactable = "module uncontactable" + +// Client establishes a gRPC connection to a remote contextionary service +type Client struct { + grpcClient pb.ContextionaryClient + logger logrus.FieldLogger +} + +// NewClient from gRPC discovery url to connect to a remote contextionary service +func NewClient(uri string, logger logrus.FieldLogger) (*Client, error) { + conn, err := grpc.NewClient(uri, + grpc.WithTransportCredentials(insecure.NewCredentials()), + grpc.WithDefaultCallOptions(grpc.MaxCallRecvMsgSize(1024*1024*48))) + if err != nil { + return nil, fmt.Errorf("couldn't connect to remote contextionary gRPC server: %w", err) + } + + client := pb.NewContextionaryClient(conn) + return &Client{ + grpcClient: client, + logger: logger, + }, nil +} + +// IsStopWord returns true if the given word is a stopword, errors on connection errors +func (c *Client) IsStopWord(ctx context.Context, word string) (bool, error) { + res, err := c.grpcClient.IsWordStopword(ctx, &pb.Word{Word: word}) + if err != nil { + logConnectionRefused(c.logger, err) + return false, err + } + + return res.Stopword, nil +} + +// IsWordPresent returns true if the given word is a stopword, errors on connection errors +func (c *Client) IsWordPresent(ctx context.Context, word string) (bool, error) { + res, err := c.grpcClient.IsWordPresent(ctx, &pb.Word{Word: word}) + if err != nil { + logConnectionRefused(c.logger, err) + return false, err + } + + return res.Present, nil +} + +// SafeGetSimilarWordsWithCertainty will always return a list words - unless there is a network error +func (c *Client) SafeGetSimilarWordsWithCertainty(ctx context.Context, word string, certainty float32) ([]string, error) { + res, err := c.grpcClient.SafeGetSimilarWordsWithCertainty(ctx, &pb.SimilarWordsParams{Word: word, Certainty: certainty}) + if err != nil { + logConnectionRefused(c.logger, err) + return nil, err + } + + output := make([]string, len(res.Words)) + for i, word := range res.Words { + output[i] = word.Word + } + + return output, nil +} + +// SchemaSearch for related classes and properties +// TODO: is this still used? +func (c *Client) SchemaSearch(ctx context.Context, params traverser.SearchParams) (traverser.SearchResults, error) { + pbParams := &pb.SchemaSearchParams{ + Certainty: params.Certainty, + Name: params.Name, + SearchType: searchTypeToProto(params.SearchType), + } + + res, err := c.grpcClient.SchemaSearch(ctx, pbParams) + if err != nil { + logConnectionRefused(c.logger, err) + return traverser.SearchResults{}, err + } + + return schemaSearchResultsFromProto(res), nil +} + +func searchTypeToProto(input traverser.SearchType) pb.SearchType { + switch input { + case traverser.SearchTypeClass: + return pb.SearchType_CLASS + case traverser.SearchTypeProperty: + return pb.SearchType_PROPERTY + default: + panic(fmt.Sprintf("unknown search type %v", input)) + } +} + +func searchTypeFromProto(input pb.SearchType) traverser.SearchType { + switch input { + case pb.SearchType_CLASS: + return traverser.SearchTypeClass + case pb.SearchType_PROPERTY: + return traverser.SearchTypeProperty + default: + panic(fmt.Sprintf("unknown search type %v", input)) + } +} + +func schemaSearchResultsFromProto(res *pb.SchemaSearchResults) traverser.SearchResults { + return traverser.SearchResults{ + Type: searchTypeFromProto(res.Type), + Results: searchResultsFromProto(res.Results), + } +} + +func searchResultsFromProto(input []*pb.SchemaSearchResult) []traverser.SearchResult { + output := make([]traverser.SearchResult, len(input)) + for i, res := range input { + output[i] = traverser.SearchResult{ + Certainty: res.Certainty, + Name: res.Name, + } + } + + return output +} + +func (c *Client) VectorForWord(ctx context.Context, word string) ([]float32, error) { + res, err := c.grpcClient.VectorForWord(ctx, &pb.Word{Word: word}) + if err != nil { + logConnectionRefused(c.logger, err) + return nil, fmt.Errorf("could not get vector from remote: %w", err) + } + v, _, _ := vectorFromProto(res) + return v, nil +} + +func logConnectionRefused(logger logrus.FieldLogger, err error) { + if strings.Contains(fmt.Sprintf("%v", err), "connect: connection refused") { + logger.WithError(err).WithField("module", "contextionary").Warnf(ModelUncontactable) + } else if strings.Contains(err.Error(), "connectex: No connection could be made because the target machine actively refused it.") { + logger.WithError(err).WithField("module", "contextionary").Warnf(ModelUncontactable) + } +} + +func (c *Client) MultiVectorForWord(ctx context.Context, words []string) ([][]float32, error) { + out := make([][]float32, len(words)) + wordParams := make([]*pb.Word, len(words)) + + for i, word := range words { + wordParams[i] = &pb.Word{Word: word} + } + + res, err := c.grpcClient.MultiVectorForWord(ctx, &pb.WordList{Words: wordParams}) + if err != nil { + logConnectionRefused(c.logger, err) + return nil, err + } + + for i, elem := range res.Vectors { + if len(elem.Entries) == 0 { + // indicates word not found + continue + } + + out[i], _, _ = vectorFromProto(elem) + } + + return out, nil +} + +func (c *Client) MultiNearestWordsByVector(ctx context.Context, vectors [][]float32, k, n int) ([]*txt2vecmodels.NearestNeighbors, error) { + out := make([]*txt2vecmodels.NearestNeighbors, len(vectors)) + searchParams := make([]*pb.VectorNNParams, len(vectors)) + + for i, vector := range vectors { + searchParams[i] = &pb.VectorNNParams{ + Vector: vectorToProto(vector), + K: int32(k), + N: int32(n), + } + } + + res, err := c.grpcClient.MultiNearestWordsByVector(ctx, &pb.VectorNNParamsList{Params: searchParams}) + if err != nil { + logConnectionRefused(c.logger, err) + return nil, err + } + + for i, elem := range res.Words { + out[i] = &txt2vecmodels.NearestNeighbors{ + Neighbors: c.extractNeighbors(elem), + } + } + + return out, nil +} + +func (c *Client) extractNeighbors(elem *pb.NearestWords) []*txt2vecmodels.NearestNeighbor { + out := make([]*txt2vecmodels.NearestNeighbor, len(elem.Words)) + + for i := range out { + vec, _, _ := vectorFromProto(elem.Vectors.Vectors[i]) + out[i] = &txt2vecmodels.NearestNeighbor{ + Concept: elem.Words[i], + Distance: elem.Distances[i], + Vector: vec, + } + } + return out +} + +func vectorFromProto(in *pb.Vector) ([]float32, []txt2vecmodels.InterpretationSource, error) { + output := make([]float32, len(in.Entries)) + for i, entry := range in.Entries { + output[i] = entry.Entry + } + + source := make([]txt2vecmodels.InterpretationSource, len(in.Source)) + for i, s := range in.Source { + source[i].Concept = s.Concept + source[i].Weight = float64(s.Weight) + source[i].Occurrence = s.Occurrence + } + + return output, source, nil +} + +func (c *Client) VectorForCorpi(ctx context.Context, corpi []string, overridesMap map[string]string) ([]float32, []txt2vecmodels.InterpretationSource, error) { + overrides := overridesFromMap(overridesMap) + res, err := c.grpcClient.VectorForCorpi(ctx, &pb.Corpi{Corpi: corpi, Overrides: overrides}) + if err != nil { + if strings.Contains(err.Error(), "connect: connection refused") { + c.logger.WithError(err).WithField("module", "contextionary").Warnf(ModelUncontactable) + } else if strings.Contains(err.Error(), "connectex: No connection could be made because the target machine actively refused it.") { + c.logger.WithError(err).WithField("module", "contextionary").Warnf(ModelUncontactable) + } + st, ok := status.FromError(err) + if !ok || st.Code() != codes.InvalidArgument { + return nil, nil, fmt.Errorf("could not get vector from remote: %w", err) + } + + return nil, nil, vectorizer.NewErrNoUsableWordsf("%s", st.Message()) + } + + return vectorFromProto(res) +} + +func (c *Client) VectorOnlyForCorpi(ctx context.Context, corpi []string, overrides map[string]string) ([]float32, error) { + vec, _, err := c.VectorForCorpi(ctx, corpi, overrides) + return vec, err +} + +func (c *Client) NearestWordsByVector(ctx context.Context, vector []float32, n int, k int) ([]string, []float32, error) { + res, err := c.grpcClient.NearestWordsByVector(ctx, &pb.VectorNNParams{ + K: int32(k), + N: int32(n), + Vector: vectorToProto(vector), + }) + if err != nil { + logConnectionRefused(c.logger, err) + return nil, nil, fmt.Errorf("could not get nearest words by vector: %w", err) + } + + return res.Words, res.Distances, nil +} + +func (c *Client) AddExtension(ctx context.Context, extension *models.C11yExtension) error { + _, err := c.grpcClient.AddExtension(ctx, &pb.ExtensionInput{ + Concept: extension.Concept, + Definition: strings.ToLower(extension.Definition), + Weight: extension.Weight, + }) + + return err +} + +func vectorToProto(in []float32) *pb.Vector { + output := make([]*pb.VectorEntry, len(in)) + for i, entry := range in { + output[i] = &pb.VectorEntry{ + Entry: entry, + } + } + + return &pb.Vector{Entries: output} +} + +func (c *Client) WaitForStartupAndValidateVersion(startupCtx context.Context, + requiredMinimumVersion string, interval time.Duration, +) error { + for { + if err := startupCtx.Err(); err != nil { + return errors.Wrap(err, "wait for contextionary remote inference service") + } + + time.Sleep(interval) + + ctx, cancel := context.WithTimeout(startupCtx, 2*time.Second) + defer cancel() + v, err := c.version(ctx) + if err != nil { + c.logger.WithField("action", "startup_check_contextionary").WithError(err). + Warnf("could not connect to contextionary at startup, trying again in 1 sec") + continue + } + + ok, err := extractVersionAndCompare(v, requiredMinimumVersion) + if err != nil { + c.logger.WithField("action", "startup_check_contextionary"). + WithField("requiredMinimumContextionaryVersion", requiredMinimumVersion). + WithField("contextionaryVersion", v). + WithError(err). + Warnf("cannot determine if contextionary version is compatible. " + + "This is fine in development, but probelematic if you see this production") + return nil + } + + if ok { + c.logger.WithField("action", "startup_check_contextionary"). + WithField("requiredMinimumContextionaryVersion", requiredMinimumVersion). + WithField("contextionaryVersion", v). + Infof("found a valid contextionary version") + return nil + } else { + return errors.Errorf("insuffcient contextionary version: need at least %s, got %s", + requiredMinimumVersion, v) + } + } +} + +func overridesFromMap(in map[string]string) []*pb.Override { + if in == nil { + return nil + } + + out := make([]*pb.Override, len(in)) + i := 0 + for key, value := range in { + out[i] = &pb.Override{ + Word: key, + Expression: value, + } + i++ + } + + return out +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/client/meta_provider.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/client/meta_provider.go new file mode 100644 index 0000000000000000000000000000000000000000..7cfd239a07b49fa1a7fbe5a1571d6c32d1444c01 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/client/meta_provider.go @@ -0,0 +1,55 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package client + +import ( + "context" + + pb "github.com/weaviate/contextionary/contextionary" +) + +func (c *Client) version(ctx context.Context) (string, error) { + m, err := c.grpcClient.Meta(ctx, &pb.MetaParams{}) + if err != nil { + return "", err + } + + return m.Version, nil +} + +func (c *Client) wordCount(ctx context.Context) (int64, error) { + m, err := c.grpcClient.Meta(ctx, &pb.MetaParams{}) + if err != nil { + return 0, err + } + + return m.WordCount, nil +} + +func (c *Client) MetaInfo() (map[string]interface{}, error) { + c11yVersion, err := c.version(context.Background()) + if err != nil { + return nil, err + } + + c11yWordCount, err := c.wordCount(context.Background()) + if err != nil { + return nil, err + } + + meta := map[string]interface{}{ + "version": c11yVersion, + "wordCount": c11yWordCount, + } + + return meta, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/client/version_checks.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/client/version_checks.go new file mode 100644 index 0000000000000000000000000000000000000000..f402cf912aa773376d013d482b16b339ebb5b017 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/client/version_checks.go @@ -0,0 +1,76 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package client + +import ( + "fmt" + "regexp" + "strconv" +) + +const ( + inputVersionRegexString = `^.*-v(?P[0-9]+)\.(?P[0-9]+)\.(?P[0-9]+)$` + minimumVersionRegexString = `^(?P[0-9]+)\.(?P[0-9])+\.(?P[0-9]+)$` +) + +func extractVersionAndCompare(input, requiredMin string) (bool, error) { + inputRegexp := regexp.MustCompile(inputVersionRegexString) + minimumRegexp := regexp.MustCompile(minimumVersionRegexString) + + if ok := inputRegexp.MatchString(input); !ok { + return false, fmt.Errorf("unexpected input version tag: %s", input) + } + + if ok := minimumRegexp.MatchString(requiredMin); !ok { + return false, fmt.Errorf("unexpected threshold version tag: %s", requiredMin) + } + + inputMatches := inputRegexp.FindAllStringSubmatch(input, 4) + inputMajor, _ := strconv.Atoi(inputMatches[0][1]) + inputMinor, _ := strconv.Atoi(inputMatches[0][2]) + inputPatch, _ := strconv.Atoi(inputMatches[0][3]) + + minimumMatches := minimumRegexp.FindAllStringSubmatch(requiredMin, 4) + minimumMajor, _ := strconv.Atoi(minimumMatches[0][1]) + minimumMinor, _ := strconv.Atoi(minimumMatches[0][2]) + minimumPatch, _ := strconv.Atoi(minimumMatches[0][3]) + + return compareSemver(inputMajor, inputMinor, inputPatch, minimumMajor, minimumMinor, minimumPatch), nil +} + +func compareSemver(iMaj, iMin, iPat, rMaj, rMin, rPat int) bool { + if iMaj > rMaj { + return true + } + + if iMaj < rMaj { + return false + } + + if iMin > rMin { + return true + } + + if iMin < rMin { + return false + } + + if iPat > rPat { + return true + } + + if iPat < rPat { + return false + } + + return true +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/client/version_checks_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/client/version_checks_test.go new file mode 100644 index 0000000000000000000000000000000000000000..1348ef363f590ccc057100c1a2720e35996f83cf --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/client/version_checks_test.go @@ -0,0 +1,117 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package client + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestExtractVersionAndCompare(t *testing.T) { + type test struct { + input string + requiredMinimum string + expectedIsMet bool + expectedErr error + } + + tests := []test{ + { + input: "notavalidversiontag", + requiredMinimum: "1.2.3", + expectedIsMet: false, + expectedErr: fmt.Errorf("unexpected input version tag: notavalidversiontag"), + }, + { + input: "abc-v0.1.2", + requiredMinimum: "invalidrequired", + expectedIsMet: false, + expectedErr: fmt.Errorf("unexpected threshold version tag: invalidrequired"), + }, + + // valid matches + + // exact match + { + input: "abc-v0.1.2", + requiredMinimum: "0.1.2", + expectedIsMet: true, + expectedErr: nil, + }, + + // every digit bigger + { + input: "abc-v1.2.3", + requiredMinimum: "0.1.2", + expectedIsMet: true, + expectedErr: nil, + }, + + // only major bigger + { + input: "abc-v1.0.0", + requiredMinimum: "0.1.2", + expectedIsMet: true, + expectedErr: nil, + }, + + // only minor bigger + { + input: "abc-v0.2.0", + requiredMinimum: "0.1.2", + expectedIsMet: true, + expectedErr: nil, + }, + + // only patch bigger + { + input: "abc-v0.1.3", + requiredMinimum: "0.1.2", + expectedIsMet: true, + expectedErr: nil, + }, + + // invalid requirements + + // only patch smaller + { + input: "abc-v0.1.1", + requiredMinimum: "0.1.2", + expectedIsMet: false, + expectedErr: nil, + }, + + // only minor smaller + { + input: "abc-v0.0.9", + requiredMinimum: "0.1.2", + expectedIsMet: false, + expectedErr: nil, + }, + + // only major smaller + { + input: "abc-v0.9.9", + requiredMinimum: "1.1.2", + expectedIsMet: false, + expectedErr: nil, + }, + } + + for _, test := range tests { + ok, err := extractVersionAndCompare(test.input, test.requiredMinimum) + assert.Equal(t, test.expectedIsMet, ok) + assert.Equal(t, test.expectedErr, err) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/concepts/rest.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/concepts/rest.go new file mode 100644 index 0000000000000000000000000000000000000000..3b5fe5652a5274f630afedfd8b2f17d455eab7aa --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/concepts/rest.go @@ -0,0 +1,96 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package concepts + +import ( + "context" + "net/http" + + "github.com/weaviate/weaviate/entities/models" +) + +type RESTHandlers struct { + inspector Inspector +} + +func NewRESTHandlers(inspector Inspector) *RESTHandlers { + return &RESTHandlers{ + inspector: inspector, + } +} + +func (h *RESTHandlers) Handler() http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.Method { + case http.MethodGet: + h.get(w, r) + default: + w.WriteHeader(http.StatusMethodNotAllowed) + } + }) +} + +func (h *RESTHandlers) get(w http.ResponseWriter, r *http.Request) { + if len(r.URL.String()) == 0 || h.extractConcept(r) == "" { + w.WriteHeader(http.StatusNotFound) + return + } + + h.getOne(w, r) +} + +func (h *RESTHandlers) getOne(w http.ResponseWriter, r *http.Request) { + concept := h.extractConcept(r) + + res, err := h.inspector.GetWords(r.Context(), concept) + if err != nil { + h.writeError(w, err, http.StatusBadRequest) + return + } + + json, err := res.MarshalBinary() + if err != nil { + h.writeError(w, err, http.StatusInternalServerError) + return + } + + w.Header().Add("content-type", "application/json") + w.WriteHeader(http.StatusOK) + w.Write(json) +} + +func (h *RESTHandlers) writeError(w http.ResponseWriter, err error, code int) { + res := &models.ErrorResponse{Error: []*models.ErrorResponseErrorItems0{{ + Message: err.Error(), + }}} + + json, mErr := res.MarshalBinary() + if mErr != nil { + // fallback to text + w.Header().Add("content-type", "text/plain") + w.WriteHeader(code) + w.Write([]byte(err.Error())) + } + + w.Header().Add("content-type", "application/json") + w.WriteHeader(code) + w.Write(json) +} + +func (h *RESTHandlers) extractConcept(r *http.Request) string { + // cutoff leading slash, consider the rest the concept + return r.URL.String()[1:] +} + +type Inspector interface { + GetWords(ctx context.Context, words string) (*models.C11yWordsResponse, error) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/concepts/rest_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/concepts/rest_test.go new file mode 100644 index 0000000000000000000000000000000000000000..8ab0f23816d294f2620808d790b8ec3aa53011b3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/concepts/rest_test.go @@ -0,0 +1,103 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package concepts + +import ( + "context" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" +) + +func TestHandlers(t *testing.T) { + insp := newFakeInspector() + h := NewRESTHandlers(insp) + + t.Run("without a concept", func(t *testing.T) { + insp.reset() + r := httptest.NewRequest("GET", "/", nil) + w := httptest.NewRecorder() + h.Handler().ServeHTTP(w, r) + + res := w.Result() + defer res.Body.Close() + assert.Equal(t, http.StatusNotFound, res.StatusCode) + }) + + t.Run("without any errors", func(t *testing.T) { + insp.reset() + r := httptest.NewRequest("GET", "/my-concept", nil) + w := httptest.NewRecorder() + h.Handler().ServeHTTP(w, r) + + res := w.Result() + defer res.Body.Close() + json, err := io.ReadAll(res.Body) + require.Nil(t, err) + expected := `{"individualWords":[{` + + `"info":{"vector":[0.1,0.2]},"present":true,"word":"my-concept"}]}` + + assert.Equal(t, http.StatusOK, res.StatusCode) + assert.Equal(t, expected, string(json)) + }) + + t.Run("without an error from the UC", func(t *testing.T) { + insp.reset() + insp.err = errors.Errorf("invalid input") + r := httptest.NewRequest("GET", "/my-concept", nil) + w := httptest.NewRecorder() + h.Handler().ServeHTTP(w, r) + + res := w.Result() + defer res.Body.Close() + json, err := io.ReadAll(res.Body) + require.Nil(t, err) + expected := `{"error":[{"message":"invalid input"}]}` + + assert.Equal(t, http.StatusBadRequest, res.StatusCode) + assert.Equal(t, expected, string(json)) + }) +} + +type fakeInspector struct { + err error +} + +func (f *fakeInspector) reset() { + f.err = nil +} + +func (f *fakeInspector) GetWords(ctx context.Context, + concept string, +) (*models.C11yWordsResponse, error) { + return &models.C11yWordsResponse{ + IndividualWords: []*models.C11yWordsResponseIndividualWordsItems0{ + { + Present: true, + Word: concept, + Info: &models.C11yWordsResponseIndividualWordsItems0Info{ + Vector: []float32{0.1, 0.2}, + }, + }, + }, + }, f.err +} + +func newFakeInspector() *fakeInspector { + return &fakeInspector{} +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/config.go new file mode 100644 index 0000000000000000000000000000000000000000..54054b33b2c74637e9ba225bf1c91c99904cbbc5 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/config.go @@ -0,0 +1,50 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modcontextionary + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/text2vec-contextionary/vectorizer" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +func (m *ContextionaryModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{ + "vectorizeClassName": basesettings.DefaultVectorizeClassName, + } +} + +func (m *ContextionaryModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{ + "skip": !basesettings.DefaultPropertyIndexed, + "vectorizePropertyName": basesettings.DefaultVectorizePropertyName, + } +} + +func (m *ContextionaryModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + icheck := vectorizer.NewIndexChecker(cfg) + if err := icheck.Validate(class); err != nil { + return err + } + return m.configValidator.Do(ctx, class, cfg, icheck) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/doc.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/doc.go new file mode 100644 index 0000000000000000000000000000000000000000..f27a96ac34f4dc233369fb1ba5177da4cc9fe42f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/doc.go @@ -0,0 +1,17 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +// modcontextionary concentrates some of the code that relates to the +// contextionary module, this must be extracted when Weaviate becomes modular. +// +// For now it's just used to concentrate any new contextionary specific code, +// so it's already in one place to make the modularization easier later on +package modcontextionary diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/extensions/rest_storage.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/extensions/rest_storage.go new file mode 100644 index 0000000000000000000000000000000000000000..166f67c8a04eee25ac3ab9e8b3a18c043ef4228f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/extensions/rest_storage.go @@ -0,0 +1,142 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package extensions + +import ( + "io" + "net/http" +) + +type RESTHandlers struct { + ls LoaderStorer + proxy Proxy +} + +func NewRESTHandlers(ls LoaderStorer, proxy Proxy) *RESTHandlers { + return &RESTHandlers{ + ls: ls, + proxy: proxy, + } +} + +type RESTStorageHandlers struct { + ls LoaderStorer +} + +func newRESTStorageHandlers(ls LoaderStorer) *RESTStorageHandlers { + return &RESTStorageHandlers{ + ls: ls, + } +} + +func (h *RESTHandlers) StorageHandler() http.Handler { + return newRESTStorageHandlers(h.ls).Handler() +} + +func (h *RESTStorageHandlers) Handler() http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.Method { + case http.MethodGet: + h.get(w, r) + case http.MethodPut: + h.put(w, r) + default: + w.WriteHeader(http.StatusMethodNotAllowed) + } + }) +} + +func (h *RESTStorageHandlers) get(w http.ResponseWriter, r *http.Request) { + if len(r.URL.String()) == 0 || h.extractConcept(r) == "" { + h.getAll(w, r) + return + } + + h.getOne(w, r) +} + +func (h *RESTStorageHandlers) getOne(w http.ResponseWriter, r *http.Request) { + concept := h.extractConcept(r) + if concept == "" { + w.WriteHeader(http.StatusNotFound) + return + } + + res, err := h.ls.Load(concept) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(err.Error())) + return + } + + if res == nil { + w.WriteHeader(http.StatusNotFound) + return + } + + w.Write(res) +} + +func (h *RESTStorageHandlers) getAll(w http.ResponseWriter, r *http.Request) { + res, err := h.ls.LoadAll() + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(err.Error())) + return + } + + w.Write(res) +} + +func (h *RESTStorageHandlers) put(w http.ResponseWriter, r *http.Request) { + defer r.Body.Close() + concept := h.extractConcept(r) + if len(concept) == 0 { + w.WriteHeader(http.StatusNotFound) + return + } + + body, err := io.ReadAll(r.Body) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(err.Error())) + } + + err = h.ls.Store(concept, body) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(err.Error())) + } +} + +func (h *RESTStorageHandlers) extractConcept(r *http.Request) string { + // cutoff leading slash, consider the rest the concept + return r.URL.String()[1:] +} + +type Storer interface { + Store(concept string, value []byte) error +} + +type Loader interface { + Load(concept string) ([]byte, error) +} + +type LoaderAller interface { + LoadAll() ([]byte, error) +} + +type LoaderStorer interface { + Storer + Loader + LoaderAller +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/extensions/rest_storage_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/extensions/rest_storage_test.go new file mode 100644 index 0000000000000000000000000000000000000000..c07a6b7341bb1bba433390bc0a559a455a543981 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/extensions/rest_storage_test.go @@ -0,0 +1,184 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package extensions + +import ( + "bytes" + "fmt" + "net/http" + "net/http/httptest" + "sort" + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_StorageHandlers(t *testing.T) { + ls := newFakeLoaderStorer() + h := NewRESTHandlers(ls, nil) + + extensionAKey := "my-first-extension" + extensionAValue := []byte("some-value") + + extensionBKey := "my-other-extension" + extensionBValue := []byte("some-other-value") + + t.Run("retrieving a non existent concept", func(t *testing.T) { + r := httptest.NewRequest("GET", "/my-concept", nil) + w := httptest.NewRecorder() + h.StorageHandler().ServeHTTP(w, r) + + res := w.Result() + defer res.Body.Close() + assert.Equal(t, http.StatusNotFound, res.StatusCode) + }) + + t.Run("storing two extensions", func(t *testing.T) { + t.Run("extension A", func(t *testing.T) { + body := bytes.NewReader(extensionAValue) + r := httptest.NewRequest("PUT", fmt.Sprintf("/%s", extensionAKey), body) + w := httptest.NewRecorder() + h.StorageHandler().ServeHTTP(w, r) + + res := w.Result() + defer res.Body.Close() + assert.Equal(t, http.StatusOK, res.StatusCode) + }) + + t.Run("extension B", func(t *testing.T) { + body := bytes.NewReader(extensionBValue) + r := httptest.NewRequest("PUT", fmt.Sprintf("/%s", extensionBKey), body) + w := httptest.NewRecorder() + h.StorageHandler().ServeHTTP(w, r) + + res := w.Result() + defer res.Body.Close() + assert.Equal(t, http.StatusOK, res.StatusCode) + }) + }) + + t.Run("when storing fails", func(t *testing.T) { + ls.storeError = fmt.Errorf("oops") + body := bytes.NewReader(extensionAValue) + r := httptest.NewRequest("PUT", "/some-extension", body) + + w := httptest.NewRecorder() + h.StorageHandler().ServeHTTP(w, r) + + res := w.Result() + defer res.Body.Close() + assert.Equal(t, http.StatusInternalServerError, res.StatusCode) + }) + + t.Run("storing with an empty concept", func(t *testing.T) { + body := bytes.NewReader(extensionAValue) + r := httptest.NewRequest("PUT", "/", body) + + w := httptest.NewRecorder() + h.StorageHandler().ServeHTTP(w, r) + + res := w.Result() + defer res.Body.Close() + assert.Equal(t, http.StatusNotFound, res.StatusCode) + }) + + t.Run("retrieving two extensions", func(t *testing.T) { + t.Run("extension A", func(t *testing.T) { + r := httptest.NewRequest("GET", fmt.Sprintf("/%s", extensionAKey), nil) + w := httptest.NewRecorder() + h.StorageHandler().ServeHTTP(w, r) + + res := w.Result() + defer res.Body.Close() + assert.Equal(t, http.StatusOK, res.StatusCode) + assert.Equal(t, extensionAValue, w.Body.Bytes()) + }) + + t.Run("extension B", func(t *testing.T) { + r := httptest.NewRequest("GET", fmt.Sprintf("/%s", extensionBKey), nil) + w := httptest.NewRecorder() + h.StorageHandler().ServeHTTP(w, r) + + res := w.Result() + defer res.Body.Close() + assert.Equal(t, http.StatusOK, res.StatusCode) + assert.Equal(t, extensionBValue, w.Body.Bytes()) + }) + + t.Run("full dump with trailing slash", func(t *testing.T) { + r := httptest.NewRequest("GET", "/", nil) + w := httptest.NewRecorder() + h.StorageHandler().ServeHTTP(w, r) + expectedValue := []byte("some-value\nsome-other-value\n") + + res := w.Result() + defer res.Body.Close() + assert.Equal(t, http.StatusOK, res.StatusCode) + assert.Equal(t, expectedValue, w.Body.Bytes()) + }) + }) + + t.Run("when loading fails", func(t *testing.T) { + ls.loadError = fmt.Errorf("oops") + body := bytes.NewReader(extensionAValue) + r := httptest.NewRequest("GET", "/some-extension", body) + + w := httptest.NewRecorder() + h.StorageHandler().ServeHTTP(w, r) + + res := w.Result() + defer res.Body.Close() + assert.Equal(t, http.StatusInternalServerError, res.StatusCode) + }) +} + +type fakeLoaderStorer struct { + store map[string][]byte + storeError error + loadError error +} + +func newFakeLoaderStorer() *fakeLoaderStorer { + return &fakeLoaderStorer{ + store: map[string][]byte{}, + } +} + +func (f *fakeLoaderStorer) Store(concept string, value []byte) error { + if f.storeError == nil { + f.store[concept] = value + } + return f.storeError +} + +func (f *fakeLoaderStorer) Load(concept string) ([]byte, error) { + return f.store[concept], f.loadError +} + +func (f *fakeLoaderStorer) LoadAll() ([]byte, error) { + var keys [][]byte + for key := range f.store { + keys = append(keys, []byte(key)) + } + + sort.Slice(keys, func(a, b int) bool { + return bytes.Compare(keys[a], keys[b]) == -1 + }) + + buf := bytes.NewBuffer(nil) + for _, key := range keys { + buf.Write(f.store[string(key)]) + buf.Write([]byte("\n")) + } + + return buf.Bytes(), nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/extensions/rest_user_facing.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/extensions/rest_user_facing.go new file mode 100644 index 0000000000000000000000000000000000000000..4446791baa5a0a47e07d1961b22494d04e3d0e9d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/extensions/rest_user_facing.go @@ -0,0 +1,107 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package extensions + +import ( + "context" + "io" + "net/http" + + "github.com/weaviate/weaviate/entities/models" +) + +type RESTUserFacingHandlers struct { + proxy Proxy +} + +func newRESTUserFacingHandlers(proxy Proxy) *RESTUserFacingHandlers { + return &RESTUserFacingHandlers{ + proxy: proxy, + } +} + +func (h *RESTHandlers) UserFacingHandler() http.Handler { + return newRESTUserFacingHandlers(h.proxy).Handler() +} + +func (h *RESTUserFacingHandlers) Handler() http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.Method { + case http.MethodPost: + h.post(w, r) + default: + w.WriteHeader(http.StatusMethodNotAllowed) + } + }) +} + +func (h *RESTUserFacingHandlers) post(w http.ResponseWriter, r *http.Request) { + ct := r.Header.Get("content-type") + if ct != "application/json" { + w.WriteHeader(http.StatusUnsupportedMediaType) + return + } + + defer r.Body.Close() + body, err := io.ReadAll(r.Body) + if err != nil { + h.writeError(w, err, http.StatusInternalServerError) + return + } + + var ext models.C11yExtension + if err := (&ext).UnmarshalBinary(body); err != nil { + h.writeError(w, err, http.StatusUnprocessableEntity) + return + } + + if err := h.proxy.AddExtension(r.Context(), &ext); err != nil { + h.writeError(w, err, http.StatusBadRequest) + return + } + + resBody, err := ext.MarshalBinary() + if err != nil { + h.writeError(w, err, http.StatusInternalServerError) + return + } + + w.Header().Add("content-type", "application/json") + w.WriteHeader(http.StatusOK) + w.Write(resBody) +} + +// C11yProxy proxies the request through the separate container, only for it to +// come back here for the storage. This is legacy from the pre-module times. +// TODO: cleanup, there does not need to be a separation between user-facing +// and internal storage endpoint in the long-term +type Proxy interface { + AddExtension(ctx context.Context, extension *models.C11yExtension) error +} + +func (h *RESTUserFacingHandlers) writeError(w http.ResponseWriter, err error, code int) { + res := &models.ErrorResponse{Error: []*models.ErrorResponseErrorItems0{{ + Message: err.Error(), + }}} + + json, mErr := res.MarshalBinary() + if mErr != nil { + // fallback to text + w.Header().Add("content-type", "text/plain") + w.WriteHeader(code) + w.Write([]byte(err.Error())) + } + + w.Header().Add("content-type", "application/json") + w.WriteHeader(code) + w.Write(json) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/extensions/rest_user_facing_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/extensions/rest_user_facing_test.go new file mode 100644 index 0000000000000000000000000000000000000000..1bfd68ceb9114a54239cc16fe318fb3daf5d676f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/extensions/rest_user_facing_test.go @@ -0,0 +1,107 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package extensions + +import ( + "bytes" + "context" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" +) + +func Test_UserFacingHandlers(t *testing.T) { + proxy := newFakeProxy() + h := NewRESTHandlers(nil, proxy) + + t.Run("with a method other than POST", func(t *testing.T) { + r := httptest.NewRequest("GET", "/", nil) + w := httptest.NewRecorder() + h.UserFacingHandler().ServeHTTP(w, r) + + res := w.Result() + defer res.Body.Close() + assert.Equal(t, http.StatusMethodNotAllowed, res.StatusCode) + }) + + t.Run("with the wrong media type", func(t *testing.T) { + r := httptest.NewRequest("POST", "/", nil) + r.Header.Add("content-type", "text/plain") + w := httptest.NewRecorder() + h.UserFacingHandler().ServeHTTP(w, r) + + res := w.Result() + defer res.Body.Close() + assert.Equal(t, http.StatusUnsupportedMediaType, res.StatusCode) + }) + + t.Run("with the wrong body", func(t *testing.T) { + body := []byte(`{"concept":7}`) + r := httptest.NewRequest("POST", "/", bytes.NewReader(body)) + r.Header.Add("content-type", "application/json") + w := httptest.NewRecorder() + h.UserFacingHandler().ServeHTTP(w, r) + + res := w.Result() + defer res.Body.Close() + assert.Equal(t, http.StatusUnprocessableEntity, res.StatusCode) + }) + + t.Run("with the right body", func(t *testing.T) { + body := []byte(`{"concept":"foo","definition":"bar","weight":1}`) + r := httptest.NewRequest("POST", "/", bytes.NewReader(body)) + r.Header.Add("content-type", "application/json") + w := httptest.NewRecorder() + h.UserFacingHandler().ServeHTTP(w, r) + + res := w.Result() + defer res.Body.Close() + + readBody, err := io.ReadAll(res.Body) + require.Nil(t, err) + assert.Equal(t, http.StatusOK, res.StatusCode) + assert.Equal(t, body, readBody) + }) + + t.Run("with a proxy error", func(t *testing.T) { + proxy.err = errors.Errorf("invalid input") + body := []byte(`{"concept":"foo","definition":"bar","weight":1}`) + r := httptest.NewRequest("POST", "/", bytes.NewReader(body)) + r.Header.Add("content-type", "application/json") + w := httptest.NewRecorder() + h.UserFacingHandler().ServeHTTP(w, r) + + res := w.Result() + defer res.Body.Close() + assert.Equal(t, http.StatusBadRequest, res.StatusCode) + }) +} + +type fakeProxy struct { + err error +} + +func (f *fakeProxy) AddExtension(ctx context.Context, + ext *models.C11yExtension, +) error { + return f.err +} + +func newFakeProxy() *fakeProxy { + return &fakeProxy{} +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/extensions/usecase.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/extensions/usecase.go new file mode 100644 index 0000000000000000000000000000000000000000..3788ca6863b4822a486eae44713ce124d84c3fa1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/extensions/usecase.go @@ -0,0 +1,70 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package extensions + +import ( + "bytes" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/moduletools" +) + +// UseCase handles all business logic regarding extensions +type UseCase struct { + storage moduletools.Storage +} + +func NewUseCase(storage moduletools.Storage) *UseCase { + return &UseCase{ + storage: storage, + } +} + +func (uc *UseCase) Store(concept string, value []byte) error { + err := uc.storage.Put([]byte(concept), value) + if err != nil { + return errors.Wrapf(err, "store concept %q", concept) + } + + return nil +} + +func (uc *UseCase) Load(concept string) ([]byte, error) { + val, err := uc.storage.Get([]byte(concept)) + if err != nil { + return nil, errors.Wrapf(err, "load concept %q", concept) + } + + return val, nil +} + +func (uc *UseCase) LoadAll() ([]byte, error) { + buf := bytes.NewBuffer(nil) + err := uc.storage.Scan(func(k, v []byte) (bool, error) { + _, err := buf.Write(v) + if err != nil { + return false, errors.Wrapf(err, "write concept %q", string(k)) + } + + _, err = buf.Write([]byte("\n")) + if err != nil { + return false, errors.Wrap(err, "write newline separator") + } + + return true, nil + }) + if err != nil { + return nil, errors.Wrap(err, "load all concepts") + } + + return buf.Bytes(), nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/extensions/usecase_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/extensions/usecase_test.go new file mode 100644 index 0000000000000000000000000000000000000000..80309bfd0b070ca88af407050de795dfa9424a0f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/extensions/usecase_test.go @@ -0,0 +1,98 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package extensions + +import ( + "bytes" + "fmt" + "sort" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/moduletools" +) + +func Test_UseCase(t *testing.T) { + storage := newFakeStorage() + uc := NewUseCase(storage) + + t.Run("storing and loading something", func(t *testing.T) { + err := uc.Store("concept1", []byte("value1")) + require.Nil(t, err) + + err = uc.Store("concept2", []byte("value2")) + require.Nil(t, err) + + val, err := uc.Load("concept1") + require.Nil(t, err) + assert.Equal(t, []byte("value1"), val) + + val, err = uc.Load("concept2") + require.Nil(t, err) + assert.Equal(t, []byte("value2"), val) + + vals, err := uc.LoadAll() + require.Nil(t, err) + assert.Equal(t, []byte("value1\nvalue2\n"), vals) + }) + + t.Run("when storing fails", func(t *testing.T) { + storage.putError = fmt.Errorf("oops") + err := uc.Store("concept1", []byte("value1")) + assert.Equal(t, "store concept \"concept1\": oops", err.Error()) + }) + + t.Run("when loading fails", func(t *testing.T) { + storage.getError = fmt.Errorf("oops") + _, err := uc.Load("concept1") + assert.Equal(t, "load concept \"concept1\": oops", err.Error()) + }) +} + +func newFakeStorage() *fakeStorage { + return &fakeStorage{ + store: map[string][]byte{}, + } +} + +type fakeStorage struct { + store map[string][]byte + getError error + putError error +} + +func (f *fakeStorage) Get(k []byte) ([]byte, error) { + return f.store[string(k)], f.getError +} + +func (f *fakeStorage) Put(k, v []byte) error { + f.store[string(k)] = v + return f.putError +} + +func (f *fakeStorage) Scan(scan moduletools.ScanFn) error { + var keys [][]byte + for key := range f.store { + keys = append(keys, []byte(key)) + } + + sort.Slice(keys, func(a, b int) bool { + return bytes.Compare(keys[a], keys[b]) == -1 + }) + + for _, key := range keys { + scan(key, f.store[string(key)]) + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/helpers_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/helpers_test.go new file mode 100644 index 0000000000000000000000000000000000000000..cc235775aafeb4f4b6ab7a9d8d3659004599baad --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/helpers_test.go @@ -0,0 +1,393 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modcontextionary + +import ( + "context" + "fmt" + + "github.com/sirupsen/logrus/hooks/test" + "github.com/tailor-inc/graphql" + "github.com/tailor-inc/graphql/language/ast" + + "github.com/weaviate/weaviate/adapters/handlers/graphql/local/explore" + "github.com/weaviate/weaviate/adapters/handlers/graphql/local/get" + test_helper "github.com/weaviate/weaviate/adapters/handlers/graphql/test/helper" + "github.com/weaviate/weaviate/entities/dto" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/search" + text2vecadditional "github.com/weaviate/weaviate/modules/text2vec-contextionary/additional" + text2vecadditionalsempath "github.com/weaviate/weaviate/modules/text2vec-contextionary/additional/sempath" + text2vecadditionalprojector "github.com/weaviate/weaviate/usecases/modulecomponents/additional/projector" + text2vecneartext "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" + "github.com/weaviate/weaviate/usecases/traverser" +) + +type mockRequestsLog struct{} + +func (m *mockRequestsLog) Register(first string, second string) { +} + +type mockResolver struct { + test_helper.MockResolver +} + +type fakeInterpretation struct{} + +func (f *fakeInterpretation) AdditionalPropertyFn(ctx context.Context, + in []search.Result, params interface{}, limit *int, + argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig, +) ([]search.Result, error) { + return in, nil +} + +func (f *fakeInterpretation) ExtractAdditionalFn(param []*ast.Argument, class *models.Class) interface{} { + return true +} + +func (f *fakeInterpretation) AdditionalPropertyDefaultValue() interface{} { + return true +} + +type fakeExtender struct { + returnArgs []search.Result +} + +func (f *fakeExtender) AdditionalPropertyFn(ctx context.Context, + in []search.Result, params interface{}, limit *int, + argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig, +) ([]search.Result, error) { + return f.returnArgs, nil +} + +func (f *fakeExtender) ExtractAdditionalFn(param []*ast.Argument, class *models.Class) interface{} { + return true +} + +func (f *fakeExtender) AdditionalPropertyDefaultValue() interface{} { + return true +} + +type fakeProjector struct { + returnArgs []search.Result +} + +func (f *fakeProjector) AdditionalPropertyFn(ctx context.Context, + in []search.Result, params interface{}, limit *int, + argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig, +) ([]search.Result, error) { + return f.returnArgs, nil +} + +func (f *fakeProjector) ExtractAdditionalFn(param []*ast.Argument, class *models.Class) interface{} { + if len(param) > 0 { + p := &text2vecadditionalprojector.Params{} + err := p.SetDefaultsAndValidate(100, 4) + if err != nil { + return nil + } + return p + } + return &text2vecadditionalprojector.Params{ + Enabled: true, + } +} + +func (f *fakeProjector) AdditionalPropertyDefaultValue() interface{} { + return &text2vecadditionalprojector.Params{} +} + +type fakePathBuilder struct { + returnArgs []search.Result +} + +func (f *fakePathBuilder) AdditionalPropertyFn(ctx context.Context, + in []search.Result, params interface{}, limit *int, + argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig, +) ([]search.Result, error) { + return f.returnArgs, nil +} + +func (f *fakePathBuilder) ExtractAdditionalFn(param []*ast.Argument, class *models.Class) interface{} { + return &text2vecadditionalsempath.Params{} +} + +func (f *fakePathBuilder) AdditionalPropertyDefaultValue() interface{} { + return &text2vecadditionalsempath.Params{} +} + +type mockText2vecContextionaryModule struct{} + +func (m *mockText2vecContextionaryModule) Name() string { + return "text2vec-contextionary" +} + +func (m *mockText2vecContextionaryModule) Init(params moduletools.ModuleInitParams) error { + return nil +} + +// graphql arguments +func (m *mockText2vecContextionaryModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return text2vecneartext.New(nil).Arguments() +} + +// additional properties +func (m *mockText2vecContextionaryModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return text2vecadditional.New(&fakeExtender{}, &fakeProjector{}, &fakePathBuilder{}, &fakeInterpretation{}).AdditionalProperties() +} + +type fakeModulesProvider struct{} + +func (fmp *fakeModulesProvider) GetAll() []modulecapabilities.Module { + panic("implement me") +} + +func (fmp *fakeModulesProvider) VectorFromInput(ctx context.Context, className string, input string) ([]float32, error) { + panic("not implemented") +} + +func (fmp *fakeModulesProvider) GetArguments(class *models.Class) map[string]*graphql.ArgumentConfig { + args := map[string]*graphql.ArgumentConfig{} + txt2vec := &mockText2vecContextionaryModule{} + if class.Vectorizer == txt2vec.Name() { + for name, argument := range txt2vec.Arguments() { + args[name] = argument.GetArgumentsFunction(class.Class) + } + } + return args +} + +func (fmp *fakeModulesProvider) ExploreArguments(schema *models.Schema) map[string]*graphql.ArgumentConfig { + args := map[string]*graphql.ArgumentConfig{} + txt2vec := &mockText2vecContextionaryModule{} + for _, c := range schema.Classes { + if c.Vectorizer == txt2vec.Name() { + for name, argument := range txt2vec.Arguments() { + args[name] = argument.ExploreArgumentsFunction() + } + } + } + return args +} + +func (fmp *fakeModulesProvider) CrossClassExtractSearchParams(arguments map[string]interface{}) map[string]interface{} { + params, _ := fmp.ExtractSearchParams(arguments, "") + return params +} + +func (fmp *fakeModulesProvider) ExtractSearchParams(arguments map[string]interface{}, className string) (map[string]interface{}, map[string]*dto.TargetCombination) { + exractedParams := map[string]interface{}{} + if param, ok := arguments["nearText"]; ok { + exractedParams["nearText"] = extractNearTextParam(param.(map[string]interface{})) + } + return exractedParams, nil +} + +func (fmp *fakeModulesProvider) GetAdditionalFields(class *models.Class) map[string]*graphql.Field { + txt2vec := &mockText2vecContextionaryModule{} + additionalProperties := map[string]*graphql.Field{} + for name, additionalProperty := range txt2vec.AdditionalProperties() { + if additionalProperty.GraphQLFieldFunction != nil { + additionalProperties[name] = additionalProperty.GraphQLFieldFunction(class.Class) + } + } + return additionalProperties +} + +func (fmp *fakeModulesProvider) ExtractAdditionalField(className, name string, params []*ast.Argument) interface{} { + txt2vec := &mockText2vecContextionaryModule{} + if additionalProperties := txt2vec.AdditionalProperties(); len(additionalProperties) > 0 { + if additionalProperty, ok := additionalProperties[name]; ok { + if additionalProperty.GraphQLExtractFunction != nil { + return additionalProperty.GraphQLExtractFunction(params, nil) + } + } + } + return nil +} + +func (fmp *fakeModulesProvider) GetExploreAdditionalExtend(ctx context.Context, in []search.Result, + moduleParams map[string]interface{}, searchVector models.Vector, + argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig, +) ([]search.Result, error) { + return fmp.additionalExtend(ctx, in, moduleParams, searchVector, "ExploreGet", argumentModuleParams, nil) +} + +func (fmp *fakeModulesProvider) additionalExtend(ctx context.Context, + in search.Results, moduleParams map[string]interface{}, + searchVector models.Vector, capability string, argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig, +) (search.Results, error) { + txt2vec := &mockText2vecContextionaryModule{} + additionalProperties := txt2vec.AdditionalProperties() + for name, value := range moduleParams { + additionalPropertyFn := fmp.getAdditionalPropertyFn(additionalProperties[name], capability) + if additionalPropertyFn != nil && value != nil { + searchValue := value + if searchVectorValue, ok := value.(modulecapabilities.AdditionalPropertyWithSearchVector[[]float32]); ok { + searchVectorValue.SetSearchVector(searchVector.([]float32)) + searchValue = searchVectorValue + } + resArray, err := additionalPropertyFn(ctx, in, searchValue, nil, nil, nil) + if err != nil { + return nil, err + } + in = resArray + } + } + return in, nil +} + +func (fmp *fakeModulesProvider) getAdditionalPropertyFn(additionalProperty modulecapabilities.AdditionalProperty, + capability string, +) modulecapabilities.AdditionalPropertyFn { + switch capability { + case "ObjectGet": + return additionalProperty.SearchFunctions.ObjectGet + case "ObjectList": + return additionalProperty.SearchFunctions.ObjectList + case "ExploreGet": + return additionalProperty.SearchFunctions.ExploreGet + case "ExploreList": + return additionalProperty.SearchFunctions.ExploreList + default: + return nil + } +} + +func (fmp *fakeModulesProvider) GraphQLAdditionalFieldNames() []string { + txt2vec := &mockText2vecContextionaryModule{} + additionalPropertiesNames := []string{} + for _, additionalProperty := range txt2vec.AdditionalProperties() { + if additionalProperty.GraphQLNames != nil { + additionalPropertiesNames = append(additionalPropertiesNames, additionalProperty.GraphQLNames...) + } + } + return additionalPropertiesNames +} + +func extractNearTextParam(param map[string]interface{}) interface{} { + txt2vec := &mockText2vecContextionaryModule{} + argument := txt2vec.Arguments()["nearText"] + params, _, _ := argument.ExtractFunction(param) + return params +} + +func createArg(name string, value string) *ast.Argument { + n := ast.Name{ + Value: name, + } + val := ast.StringValue{ + Kind: "Kind", + Value: value, + } + arg := ast.Argument{ + Name: ast.NewName(&n), + Kind: "Kind", + Value: ast.NewStringValue(&val), + } + a := ast.NewArgument(&arg) + return a +} + +func extractAdditionalParam(name string, args []*ast.Argument) interface{} { + txt2vec := &mockText2vecContextionaryModule{} + additionalProperties := txt2vec.AdditionalProperties() + switch name { + case "semanticPath", "featureProjection": + if ap, ok := additionalProperties[name]; ok { + return ap.GraphQLExtractFunction(args, nil) + } + return nil + default: + return nil + } +} + +func getFakeModulesProvider() *fakeModulesProvider { + return &fakeModulesProvider{} +} + +type fakeAuthorizer struct{} + +func (f *fakeAuthorizer) Authorize(ctx context.Context, principal *models.Principal, action string, resource ...string) error { + return nil +} + +func (f *fakeAuthorizer) AuthorizeSilent(ctx context.Context, principal *models.Principal, action string, resource ...string) error { + return nil +} + +func (a *fakeAuthorizer) FilterAuthorizedResources(ctx context.Context, principal *models.Principal, verb string, resources ...string) ([]string, error) { + return resources, nil +} + +func getFakeAuthorizer() *fakeAuthorizer { + return &fakeAuthorizer{} +} + +func newMockResolver() *mockResolver { + logger, _ := test.NewNullLogger() + field, err := get.Build(&test_helper.SimpleSchema, logger, getFakeModulesProvider(), getFakeAuthorizer()) + if err != nil { + panic(fmt.Sprintf("could not build graphql test schema: %s", err)) + } + mocker := &mockResolver{} + mockLog := &mockRequestsLog{} + mocker.RootFieldName = "Get" + mocker.RootField = field + mocker.RootObject = map[string]interface{}{"Resolver": GetResolver(mocker), "RequestsLog": RequestsLog(mockLog)} + return mocker +} + +func newExploreMockResolver() *mockResolver { + field := explore.Build(test_helper.SimpleSchema.Objects, getFakeModulesProvider(), getFakeAuthorizer()) + mocker := &mockResolver{} + mockLog := &mockRequestsLog{} + mocker.RootFieldName = "Explore" + mocker.RootField = field + mocker.RootObject = map[string]interface{}{ + "Resolver": ExploreResolver(mocker), + "RequestsLog": mockLog, + } + return mocker +} + +func (m *mockResolver) GetClass(ctx context.Context, principal *models.Principal, + params dto.GetParams, +) ([]interface{}, error) { + args := m.Called(params) + return args.Get(0).([]interface{}), args.Error(1) +} + +func (m *mockResolver) Explore(ctx context.Context, + principal *models.Principal, params traverser.ExploreParams, +) ([]search.Result, error) { + args := m.Called(params) + return args.Get(0).([]search.Result), args.Error(1) +} + +// Resolver is a local abstraction of the required UC resolvers +type GetResolver interface { + GetClass(ctx context.Context, principal *models.Principal, info dto.GetParams) ([]interface{}, error) +} + +type ExploreResolver interface { + Explore(ctx context.Context, principal *models.Principal, params traverser.ExploreParams) ([]search.Result, error) +} + +// RequestsLog is a local abstraction on the RequestsLog that needs to be +// provided to the graphQL API in order to log Local.Get queries. +type RequestsLog interface { + Register(requestType string, identifier string) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/module.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/module.go new file mode 100644 index 0000000000000000000000000000000000000000..cad7552bfb7f41183b1973c6495fee781405fb9a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/module.go @@ -0,0 +1,278 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modcontextionary + +import ( + "context" + "net/http" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/weaviate/weaviate/adapters/handlers/rest/state" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + text2vecadditional "github.com/weaviate/weaviate/modules/text2vec-contextionary/additional" + text2vecinterpretation "github.com/weaviate/weaviate/modules/text2vec-contextionary/additional/interpretation" + text2vecnn "github.com/weaviate/weaviate/modules/text2vec-contextionary/additional/nearestneighbors" + text2vecsempath "github.com/weaviate/weaviate/modules/text2vec-contextionary/additional/sempath" + text2vecclassification "github.com/weaviate/weaviate/modules/text2vec-contextionary/classification" + "github.com/weaviate/weaviate/modules/text2vec-contextionary/client" + "github.com/weaviate/weaviate/modules/text2vec-contextionary/concepts" + "github.com/weaviate/weaviate/modules/text2vec-contextionary/extensions" + "github.com/weaviate/weaviate/modules/text2vec-contextionary/vectorizer" + text2vecprojector "github.com/weaviate/weaviate/usecases/modulecomponents/additional/projector" + text2vecneartext "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +// MinimumRequiredRemoteVersion describes the minimal semver version +// (independent of the model version) of the remote model inference API +const ( + MinimumRequiredRemoteVersion = "1.0.0" + Name = "text2vec-contextionary" +) + +func New() *ContextionaryModule { + return &ContextionaryModule{} +} + +// ContextionaryModule for now only handles storage and retrieval of extensions, +// but with making Weaviate more modular, this should contain anything related +// to the module +type ContextionaryModule struct { + storageProvider moduletools.StorageProvider + extensions *extensions.RESTHandlers + concepts *concepts.RESTHandlers + vectorizer *vectorizer.Vectorizer + configValidator configValidator + graphqlProvider modulecapabilities.GraphQLArguments + additionalPropertiesProvider modulecapabilities.AdditionalProperties + searcher modulecapabilities.Searcher[[]float32] + remote remoteClient + classifierContextual modulecapabilities.Classifier + logger logrus.FieldLogger + nearTextTransformer modulecapabilities.TextTransform +} + +type remoteClient interface { + vectorizer.RemoteClient + extensions.Proxy + vectorizer.InspectorClient + text2vecsempath.Remote + modulecapabilities.MetaProvider + modulecapabilities.VectorizerClient + WaitForStartupAndValidateVersion(ctx context.Context, version string, + interval time.Duration) error +} + +type configValidator interface { + Do(ctx context.Context, class *models.Class, cfg moduletools.ClassConfig, + indexChecker vectorizer.IndexChecker) error +} + +func (m *ContextionaryModule) Name() string { + return Name +} + +func (m *ContextionaryModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2Vec +} + +func (m *ContextionaryModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.storageProvider = params.GetStorageProvider() + appState, ok := params.GetAppState().(*state.State) + if !ok { + return errors.Errorf("appState is not a *state.State") + } + + m.logger = appState.Logger + + url := appState.ServerConfig.Config.Contextionary.URL + remote, err := client.NewClient(url, m.logger) + if err != nil { + return errors.Wrap(err, "init remote client") + } + m.remote = remote + + if err := m.remote.WaitForStartupAndValidateVersion(ctx, + MinimumRequiredRemoteVersion, 1*time.Second); err != nil { + return errors.Wrap(err, "validate remote inference api") + } + + if err := m.initExtensions(); err != nil { + return errors.Wrap(err, "init extensions") + } + + if err := m.initConcepts(); err != nil { + return errors.Wrap(err, "init concepts") + } + + if err := m.initVectorizer(); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initGraphqlAdditionalPropertiesProvider(); err != nil { + return errors.Wrap(err, "init graphql additional properties provider") + } + + if err := m.initClassifiers(); err != nil { + return errors.Wrap(err, "init classifiers") + } + + return nil +} + +func (m *ContextionaryModule) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initGraphqlProvider(); err != nil { + return errors.Wrap(err, "init graphql provider") + } + return nil +} + +func (m *ContextionaryModule) initExtensions() error { + storage, err := m.storageProvider.Storage("contextionary-extensions") + if err != nil { + return errors.Wrap(err, "initialize extensions storage") + } + + uc := extensions.NewUseCase(storage) + m.extensions = extensions.NewRESTHandlers(uc, m.remote) + + return nil +} + +func (m *ContextionaryModule) initConcepts() error { + uc := vectorizer.NewInspector(m.remote) + m.concepts = concepts.NewRESTHandlers(uc) + + return nil +} + +func (m *ContextionaryModule) initVectorizer() error { + m.vectorizer = vectorizer.New(m.remote) + m.configValidator = vectorizer.NewConfigValidator(m.remote, m.logger) + + m.searcher = text2vecneartext.NewSearcher(m.vectorizer) + + return nil +} + +func (m *ContextionaryModule) initGraphqlProvider() error { + m.graphqlProvider = text2vecneartext.New(m.nearTextTransformer) + return nil +} + +func (m *ContextionaryModule) initGraphqlAdditionalPropertiesProvider() error { + nnExtender := text2vecnn.NewExtender(m.remote) + featureProjector := text2vecprojector.New() + pathBuilder := text2vecsempath.New(m.remote) + interpretation := text2vecinterpretation.New() + m.additionalPropertiesProvider = text2vecadditional.New(nnExtender, featureProjector, pathBuilder, interpretation) + return nil +} + +func (m *ContextionaryModule) initClassifiers() error { + m.classifierContextual = text2vecclassification.New(m.remote) + return nil +} + +func (m *ContextionaryModule) RootHandler() http.Handler { + mux := http.NewServeMux() + + mux.Handle("/extensions-storage/", http.StripPrefix("/extensions-storage", + m.extensions.StorageHandler())) + mux.Handle("/extensions", http.StripPrefix("/extensions", + m.extensions.UserFacingHandler())) + mux.Handle("/concepts/", http.StripPrefix("/concepts", m.concepts.Handler())) + + return mux +} + +func (m *ContextionaryModule) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + return m.vectorizer.Object(ctx, obj, cfg) +} + +// VectorizeBatch is _slower_ if many requests are done in parallel. So do all objects sequentially +func (m *ContextionaryModule) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + vecs := make([][]float32, len(objs)) + addProps := make([]models.AdditionalProperties, len(objs)) + // error should be the exception so dont preallocate + errs := make(map[int]error, 0) + for i, obj := range objs { + if skipObject[i] { + continue + } + vec, addProp, err := m.vectorizer.Object(ctx, obj, cfg) + if err != nil { + errs[i] = err + continue + } + addProps[i] = addProp + vecs[i] = vec + } + + return vecs, addProps, errs +} + +func (m *ContextionaryModule) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + return m.vectorizer.Texts(ctx, []string{input}, cfg) +} + +func (m *ContextionaryModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return m.graphqlProvider.Arguments() +} + +func (m *ContextionaryModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + return m.searcher.VectorSearches() +} + +func (m *ContextionaryModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +func (m *ContextionaryModule) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + return true, nil, nil +} + +func (m *ContextionaryModule) Classifiers() []modulecapabilities.Classifier { + return []modulecapabilities.Classifier{m.classifierContextual} +} + +func (m *ContextionaryModule) MetaInfo() (map[string]interface{}, error) { + return m.remote.MetaInfo() +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.ModuleWithHTTPHandlers(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.InputVectorizer[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/module_graphql_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/module_graphql_test.go new file mode 100644 index 0000000000000000000000000000000000000000..27dda724984aae46fedc845e64d0ef8c62002a82 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/module_graphql_test.go @@ -0,0 +1,969 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modcontextionary + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/entities/additional" + "github.com/weaviate/weaviate/entities/dto" + "github.com/weaviate/weaviate/entities/filters" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/modules/text2vec-contextionary/additional/models" + helper "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/usecases/traverser" +) + +type testCase struct { + name string + query string + expectedParamsToTraverser traverser.ExploreParams + resolverReturn []search.Result + expectedResults []result +} + +type testCases []testCase + +type result struct { + pathToField []string + expectedValue interface{} +} + +func TestExtractAdditionalFields(t *testing.T) { + // We don't need to explicitly test every subselection as we did on + // phoneNumber as these fields have fixed keys. So we can simply check for + // the prop + + type test struct { + name string + query string + expectedParams dto.GetParams + resolverReturn interface{} + expectedResult interface{} + } + + tests := []test{ + { + name: "with _additional certainty", + query: "{ Get { SomeAction { _additional { certainty distance } } } }", + expectedParams: dto.GetParams{ + ClassName: "SomeAction", + AdditionalProperties: additional.Properties{ + Certainty: true, + Distance: true, + }, + }, + resolverReturn: []interface{}{ + map[string]interface{}{ + "_additional": map[string]interface{}{ + "certainty": 0.69, + "distance": helper.CertaintyToDist(t, 0.69), + }, + }, + }, + expectedResult: map[string]interface{}{ + "_additional": map[string]interface{}{ + "certainty": 0.69, + "distance": helper.CertaintyToDist(t, 0.69), + }, + }, + }, + { + name: "with _additional interpretation", + query: "{ Get { SomeAction { _additional { interpretation { source { concept weight occurrence } } } } } }", + expectedParams: dto.GetParams{ + ClassName: "SomeAction", + AdditionalProperties: additional.Properties{ + ModuleParams: map[string]interface{}{ + "interpretation": true, + }, + }, + }, + resolverReturn: []interface{}{ + map[string]interface{}{ + "_additional": map[string]interface{}{ + "interpretation": &models.Interpretation{ + Source: []*models.InterpretationSource{ + { + Concept: "foo", + Weight: 0.6, + Occurrence: 1200, + }, + { + Concept: "bar", + Weight: 0.9, + Occurrence: 800, + }, + }, + }, + }, + }, + }, + expectedResult: map[string]interface{}{ + "_additional": map[string]interface{}{ + "interpretation": map[string]interface{}{ + "source": []interface{}{ + map[string]interface{}{ + "concept": "foo", + "weight": 0.6, + "occurrence": 1200, + }, + map[string]interface{}{ + "concept": "bar", + "weight": 0.9, + "occurrence": 800, + }, + }, + }, + }, + }, + }, + { + name: "with _additional nearestNeighbors", + query: "{ Get { SomeAction { _additional { nearestNeighbors { neighbors { concept distance } } } } } }", + expectedParams: dto.GetParams{ + ClassName: "SomeAction", + AdditionalProperties: additional.Properties{ + ModuleParams: map[string]interface{}{ + "nearestNeighbors": true, + }, + }, + }, + resolverReturn: []interface{}{ + map[string]interface{}{ + "_additional": map[string]interface{}{ + "nearestNeighbors": &models.NearestNeighbors{ + Neighbors: []*models.NearestNeighbor{ + { + Concept: "foo", + Distance: 0.1, + }, + { + Concept: "bar", + Distance: 0.2, + }, + }, + }, + }, + }, + }, + expectedResult: map[string]interface{}{ + "_additional": map[string]interface{}{ + "nearestNeighbors": map[string]interface{}{ + "neighbors": []interface{}{ + map[string]interface{}{ + "concept": "foo", + "distance": float32(0.1), + }, + map[string]interface{}{ + "concept": "bar", + "distance": float32(0.2), + }, + }, + }, + }, + }, + }, + { + name: "with _additional featureProjection without any optional parameters", + query: "{ Get { SomeAction { _additional { featureProjection { vector } } } } }", + expectedParams: dto.GetParams{ + ClassName: "SomeAction", + AdditionalProperties: additional.Properties{ + ModuleParams: map[string]interface{}{ + "featureProjection": extractAdditionalParam("featureProjection", nil), + }, + }, + }, + resolverReturn: []interface{}{ + map[string]interface{}{ + "_additional": map[string]interface{}{ + "featureProjection": &models.FeatureProjection{ + Vector: []float32{0.0, 1.1, 2.2}, + }, + }, + }, + }, + expectedResult: map[string]interface{}{ + "_additional": map[string]interface{}{ + "featureProjection": map[string]interface{}{ + "vector": []interface{}{float32(0.0), float32(1.1), float32(2.2)}, + }, + }, + }, + }, + { + name: "with _additional featureProjection with optional parameters", + query: `{ Get { SomeAction { _additional { featureProjection(algorithm: "tsne", dimensions: 3, learningRate: 15, iterations: 100, perplexity: 10) { vector } } } } }`, + expectedParams: dto.GetParams{ + ClassName: "SomeAction", + AdditionalProperties: additional.Properties{ + ModuleParams: map[string]interface{}{ + "featureProjection": extractAdditionalParam("featureProjection", + []*ast.Argument{ + createArg("algorithm", "tsne"), + createArg("dimensions", "3"), + createArg("iterations", "100"), + createArg("learningRate", "15"), + createArg("perplexity", "10"), + }, + ), + }, + }, + }, + resolverReturn: []interface{}{ + map[string]interface{}{ + "_additional": map[string]interface{}{ + "featureProjection": &models.FeatureProjection{ + Vector: []float32{0.0, 1.1, 2.2}, + }, + }, + }, + }, + expectedResult: map[string]interface{}{ + "_additional": map[string]interface{}{ + "featureProjection": map[string]interface{}{ + "vector": []interface{}{float32(0.0), float32(1.1), float32(2.2)}, + }, + }, + }, + }, + { + name: "with _additional semanticPath set", + query: `{ Get { SomeAction { _additional { semanticPath { path { concept distanceToQuery distanceToResult distanceToPrevious distanceToNext } } } } } }`, + expectedParams: dto.GetParams{ + ClassName: "SomeAction", + AdditionalProperties: additional.Properties{ + ModuleParams: map[string]interface{}{ + "semanticPath": extractAdditionalParam("semanticPath", nil), + }, + }, + }, + resolverReturn: []interface{}{ + map[string]interface{}{ + "_additional": map[string]interface{}{ + "semanticPath": &models.SemanticPath{ + Path: []*models.SemanticPathElement{ + { + Concept: "foo", + DistanceToNext: ptFloat32(0.5), + DistanceToPrevious: nil, + DistanceToQuery: 0.1, + DistanceToResult: 0.1, + }, + { + Concept: "bar", + DistanceToPrevious: ptFloat32(0.5), + DistanceToNext: nil, + DistanceToQuery: 0.1, + DistanceToResult: 0.1, + }, + }, + }, + }, + }, + }, + expectedResult: map[string]interface{}{ + "_additional": map[string]interface{}{ + "semanticPath": map[string]interface{}{ + "path": []interface{}{ + map[string]interface{}{ + "concept": "foo", + "distanceToNext": float32(0.5), + "distanceToPrevious": nil, + "distanceToQuery": float32(0.1), + "distanceToResult": float32(0.1), + }, + map[string]interface{}{ + "concept": "bar", + "distanceToPrevious": float32(0.5), + "distanceToNext": nil, + "distanceToQuery": float32(0.1), + "distanceToResult": float32(0.1), + }, + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + resolver := newMockResolver() + + resolver.On("GetClass", test.expectedParams). + Return(test.resolverReturn, nil).Once() + result := resolver.AssertResolve(t, test.query) + assert.Equal(t, test.expectedResult, result.Get("Get", "SomeAction").Result.([]interface{})[0]) + }) + } +} + +func TestNearTextRanker(t *testing.T) { + t.Parallel() + + resolver := newMockResolver() + + t.Run("for actions", func(t *testing.T) { + query := `{ Get { SomeAction(nearText: { + concepts: ["c1", "c2", "c3"], + moveTo: { + concepts:["positive"], + force: 0.5 + }, + moveAwayFrom: { + concepts:["epic"], + force: 0.25 + } + }) { intField } } }` + + expectedParams := dto.GetParams{ + ClassName: "SomeAction", + Properties: []search.SelectProperty{{Name: "intField", IsPrimitive: true}}, + ModuleParams: map[string]interface{}{ + "nearText": extractNearTextParam(map[string]interface{}{ + "concepts": []interface{}{"c1", "c2", "c3"}, + "moveTo": map[string]interface{}{ + "concepts": []interface{}{"positive"}, + "force": float64(0.5), + }, + "moveAwayFrom": map[string]interface{}{ + "concepts": []interface{}{"epic"}, + "force": float64(0.25), + }, + }), + }, + } + + resolver.On("GetClass", expectedParams). + Return([]interface{}{}, nil).Once() + + resolver.AssertResolve(t, query) + }) + + t.Run("for a class that does not have a text2vec module", func(t *testing.T) { + query := `{ Get { CustomVectorClass(nearText: { + concepts: ["c1", "c2", "c3"], + moveTo: { + concepts:["positive"], + force: 0.5 + }, + moveAwayFrom: { + concepts:["epic"], + force: 0.25 + } + }) { intField } } }` + + res := resolver.Resolve(query) + require.Len(t, res.Errors, 1) + assert.Contains(t, res.Errors[0].Message, "Unknown argument \"nearText\" on field \"CustomVectorClass\"") + }) + + t.Run("for things with optional distance set", func(t *testing.T) { + query := `{ Get { SomeThing(nearText: { + concepts: ["c1", "c2", "c3"], + distance: 0.6, + moveTo: { + concepts:["positive"], + force: 0.5 + }, + moveAwayFrom: { + concepts:["epic"], + force: 0.25 + } + }) { intField } } }` + + expectedParams := dto.GetParams{ + ClassName: "SomeThing", + Properties: []search.SelectProperty{{Name: "intField", IsPrimitive: true}}, + Pagination: &filters.Pagination{Limit: filters.LimitFlagSearchByDist}, + ModuleParams: map[string]interface{}{ + "nearText": extractNearTextParam(map[string]interface{}{ + "concepts": []interface{}{"c1", "c2", "c3"}, + "distance": float64(0.6), + "moveTo": map[string]interface{}{ + "concepts": []interface{}{"positive"}, + "force": float64(0.5), + }, + "moveAwayFrom": map[string]interface{}{ + "concepts": []interface{}{"epic"}, + "force": float64(0.25), + }, + }), + }, + } + resolver.On("GetClass", expectedParams). + Return([]interface{}{}, nil).Once() + + resolver.AssertResolve(t, query) + }) + + t.Run("for things with optional certainty set", func(t *testing.T) { + query := `{ Get { SomeThing(nearText: { + concepts: ["c1", "c2", "c3"], + certainty: 0.4, + moveTo: { + concepts:["positive"], + force: 0.5 + }, + moveAwayFrom: { + concepts:["epic"], + force: 0.25 + } + }) { intField } } }` + + expectedParams := dto.GetParams{ + ClassName: "SomeThing", + Properties: []search.SelectProperty{{Name: "intField", IsPrimitive: true}}, + Pagination: &filters.Pagination{Limit: filters.LimitFlagSearchByDist}, + ModuleParams: map[string]interface{}{ + "nearText": extractNearTextParam(map[string]interface{}{ + "concepts": []interface{}{"c1", "c2", "c3"}, + "certainty": float64(0.4), + "moveTo": map[string]interface{}{ + "concepts": []interface{}{"positive"}, + "force": float64(0.5), + }, + "moveAwayFrom": map[string]interface{}{ + "concepts": []interface{}{"epic"}, + "force": float64(0.25), + }, + }), + }, + } + resolver.On("GetClass", expectedParams). + Return([]interface{}{}, nil).Once() + + resolver.AssertResolve(t, query) + }) + + t.Run("for things with optional distance and objects set", func(t *testing.T) { + query := `{ Get { SomeThing(nearText: { + concepts: ["c1", "c2", "c3"], + distance: 0.4, + moveTo: { + concepts:["positive"], + force: 0.5 + objects: [ + { id: "moveTo-uuid1" } + { beacon: "weaviate://localhost/moveTo-uuid3" } + ] + }, + moveAwayFrom: { + concepts:["epic"], + force: 0.25 + objects: [ + { id: "moveAway-uuid1" } + { beacon: "weaviate://localhost/moveAway-uuid2" } + { beacon: "weaviate://localhost/moveAway-uuid3" } + ] + } + }) { intField } } }` + + expectedParams := dto.GetParams{ + ClassName: "SomeThing", + Properties: []search.SelectProperty{{Name: "intField", IsPrimitive: true}}, + Pagination: &filters.Pagination{Limit: filters.LimitFlagSearchByDist}, + ModuleParams: map[string]interface{}{ + "nearText": extractNearTextParam(map[string]interface{}{ + "concepts": []interface{}{"c1", "c2", "c3"}, + "distance": float64(0.4), + "moveTo": map[string]interface{}{ + "concepts": []interface{}{"positive"}, + "force": float64(0.5), + "objects": []interface{}{ + map[string]interface{}{ + "id": "moveTo-uuid1", + }, + map[string]interface{}{ + "beacon": "weaviate://localhost/moveTo-uuid3", + }, + }, + }, + "moveAwayFrom": map[string]interface{}{ + "concepts": []interface{}{"epic"}, + "force": float64(0.25), + "objects": []interface{}{ + map[string]interface{}{ + "id": "moveAway-uuid1", + }, + map[string]interface{}{ + "beacon": "weaviate://localhost/moveAway-uuid2", + }, + map[string]interface{}{ + "beacon": "weaviate://localhost/moveAway-uuid3", + }, + }, + }, + }), + }, + } + resolver.On("GetClass", expectedParams). + Return([]interface{}{}, nil).Once() + + resolver.AssertResolve(t, query) + }) + + t.Run("for things with optional certainty and objects set", func(t *testing.T) { + query := `{ Get { SomeThing(nearText: { + concepts: ["c1", "c2", "c3"], + certainty: 0.4, + moveTo: { + concepts:["positive"], + force: 0.5 + objects: [ + { id: "moveTo-uuid1" } + { beacon: "weaviate://localhost/moveTo-uuid3" } + ] + }, + moveAwayFrom: { + concepts:["epic"], + force: 0.25 + objects: [ + { id: "moveAway-uuid1" } + { beacon: "weaviate://localhost/moveAway-uuid2" } + { beacon: "weaviate://localhost/moveAway-uuid3" } + ] + } + }) { intField } } }` + + expectedParams := dto.GetParams{ + ClassName: "SomeThing", + Properties: []search.SelectProperty{{Name: "intField", IsPrimitive: true}}, + Pagination: &filters.Pagination{Limit: filters.LimitFlagSearchByDist}, + ModuleParams: map[string]interface{}{ + "nearText": extractNearTextParam(map[string]interface{}{ + "concepts": []interface{}{"c1", "c2", "c3"}, + "certainty": float64(0.4), + "moveTo": map[string]interface{}{ + "concepts": []interface{}{"positive"}, + "force": float64(0.5), + "objects": []interface{}{ + map[string]interface{}{ + "id": "moveTo-uuid1", + }, + map[string]interface{}{ + "beacon": "weaviate://localhost/moveTo-uuid3", + }, + }, + }, + "moveAwayFrom": map[string]interface{}{ + "concepts": []interface{}{"epic"}, + "force": float64(0.25), + "objects": []interface{}{ + map[string]interface{}{ + "id": "moveAway-uuid1", + }, + map[string]interface{}{ + "beacon": "weaviate://localhost/moveAway-uuid2", + }, + map[string]interface{}{ + "beacon": "weaviate://localhost/moveAway-uuid3", + }, + }, + }, + }), + }, + } + resolver.On("GetClass", expectedParams). + Return([]interface{}{}, nil).Once() + + resolver.AssertResolve(t, query) + }) +} + +func Test_ResolveExplore(t *testing.T) { + t.Parallel() + + testsNearText := testCases{ + testCase{ + name: "Resolve Explore with nearText", + query: ` + { + Explore(nearText: {concepts: ["car", "best brand"]}) { + beacon className certainty distance + } + }`, + expectedParamsToTraverser: traverser.ExploreParams{ + ModuleParams: map[string]interface{}{ + "nearText": extractNearTextParam(map[string]interface{}{ + "concepts": []interface{}{"car", "best brand"}, + }), + }, + WithCertaintyProp: true, + }, + resolverReturn: []search.Result{ + { + Beacon: "weaviate://localhost/some-uuid", + ClassName: "bestClass", + Certainty: 0.7, + Dist: helper.CertaintyToDist(t, 0.7), + }, + }, + expectedResults: []result{{ + pathToField: []string{"Explore"}, + expectedValue: []interface{}{ + map[string]interface{}{ + "beacon": "weaviate://localhost/some-uuid", + "className": "bestClass", + "certainty": float32(0.7), + "distance": helper.CertaintyToDist(t, 0.7), + }, + }, + }}, + }, + + testCase{ + name: "with nearText with optional limit and distance set", + query: ` + { + Explore( + nearText: {concepts: ["car", "best brand"], distance: 0.6}, limit: 17 + ){ + beacon className + } + }`, + expectedParamsToTraverser: traverser.ExploreParams{ + ModuleParams: map[string]interface{}{ + "nearText": extractNearTextParam(map[string]interface{}{ + "concepts": []interface{}{"car", "best brand"}, + "distance": float64(0.6), + }), + }, + Limit: 17, + }, + resolverReturn: []search.Result{ + { + Beacon: "weaviate://localhost/some-uuid", + ClassName: "bestClass", + Dist: 0.6, + }, + }, + expectedResults: []result{{ + pathToField: []string{"Explore"}, + expectedValue: []interface{}{ + map[string]interface{}{ + "beacon": "weaviate://localhost/some-uuid", + "className": "bestClass", + }, + }, + }}, + }, + + testCase{ + name: "with nearText with optional limit and certainty set", + query: ` + { + Explore( + nearText: {concepts: ["car", "best brand"], certainty: 0.6}, limit: 17 + ){ + beacon className + } + }`, + expectedParamsToTraverser: traverser.ExploreParams{ + ModuleParams: map[string]interface{}{ + "nearText": extractNearTextParam(map[string]interface{}{ + "concepts": []interface{}{"car", "best brand"}, + "certainty": float64(0.6), + }), + }, + Limit: 17, + }, + resolverReturn: []search.Result{ + { + Beacon: "weaviate://localhost/some-uuid", + ClassName: "bestClass", + }, + }, + expectedResults: []result{{ + pathToField: []string{"Explore"}, + expectedValue: []interface{}{ + map[string]interface{}{ + "beacon": "weaviate://localhost/some-uuid", + "className": "bestClass", + }, + }, + }}, + }, + + testCase{ + name: "with moveTo set", + query: ` + { + Explore( + limit: 17 + nearText: { + concepts: ["car", "best brand"] + moveTo: { + concepts: ["mercedes"] + force: 0.7 + } + } + ) { + beacon className + } + }`, + expectedParamsToTraverser: traverser.ExploreParams{ + Limit: 17, + ModuleParams: map[string]interface{}{ + "nearText": extractNearTextParam(map[string]interface{}{ + "concepts": []interface{}{"car", "best brand"}, + "moveTo": map[string]interface{}{ + "concepts": []interface{}{"mercedes"}, + "force": float64(0.7), + }, + }), + }, + }, + resolverReturn: []search.Result{ + { + Beacon: "weaviate://localhost/some-uuid", + ClassName: "bestClass", + }, + }, + expectedResults: []result{{ + pathToField: []string{"Explore"}, + expectedValue: []interface{}{ + map[string]interface{}{ + "beacon": "weaviate://localhost/some-uuid", + "className": "bestClass", + }, + }, + }}, + }, + + testCase{ + name: "with moveTo and moveAwayFrom set", + query: ` + { + Explore( + limit: 17 + nearText: { + concepts: ["car", "best brand"] + moveTo: { + concepts: ["mercedes"] + force: 0.7 + } + moveAwayFrom: { + concepts: ["van"] + force: 0.7 + } + } + ) { + beacon className + } + }`, + expectedParamsToTraverser: traverser.ExploreParams{ + Limit: 17, + ModuleParams: map[string]interface{}{ + "nearText": extractNearTextParam(map[string]interface{}{ + "concepts": []interface{}{"car", "best brand"}, + "moveTo": map[string]interface{}{ + "concepts": []interface{}{"mercedes"}, + "force": float64(0.7), + }, + "moveAwayFrom": map[string]interface{}{ + "concepts": []interface{}{"van"}, + "force": float64(0.7), + }, + }), + }, + }, + resolverReturn: []search.Result{ + { + Beacon: "weaviate://localhost/some-uuid", + ClassName: "bestClass", + }, + }, + expectedResults: []result{{ + pathToField: []string{"Explore"}, + expectedValue: []interface{}{ + map[string]interface{}{ + "beacon": "weaviate://localhost/some-uuid", + "className": "bestClass", + }, + }, + }}, + }, + + testCase{ + name: "with moveTo and objects set", + query: ` + { + Explore( + limit: 17 + nearText: { + concepts: ["car", "best brand"] + moveTo: { + concepts: ["mercedes"] + force: 0.7 + objects: [ + {id: "moveto-uuid"}, + {beacon: "weaviate://localhost/other-moveto-uuid"}, + ] + } + } + ) { + beacon className + } + }`, + expectedParamsToTraverser: traverser.ExploreParams{ + Limit: 17, + ModuleParams: map[string]interface{}{ + "nearText": extractNearTextParam(map[string]interface{}{ + "concepts": []interface{}{"car", "best brand"}, + "moveTo": map[string]interface{}{ + "concepts": []interface{}{"mercedes"}, + "force": float64(0.7), + "objects": []interface{}{ + map[string]interface{}{ + "id": "moveto-uuid", + }, + map[string]interface{}{ + "beacon": "weaviate://localhost/other-moveto-uuid", + }, + }, + }, + }), + }, + }, + resolverReturn: []search.Result{ + { + Beacon: "weaviate://localhost/some-uuid", + ClassName: "bestClass", + }, + }, + expectedResults: []result{{ + pathToField: []string{"Explore"}, + expectedValue: []interface{}{ + map[string]interface{}{ + "beacon": "weaviate://localhost/some-uuid", + "className": "bestClass", + }, + }, + }}, + }, + + testCase{ + name: "with moveTo and moveAwayFrom and objects set", + query: ` + { + Explore( + limit: 17 + nearText: { + concepts: ["car", "best brand"] + moveTo: { + concepts: ["mercedes"] + force: 0.7 + objects: [ + {id: "moveto-uuid1"}, + {beacon: "weaviate://localhost/moveto-uuid2"}, + ] + } + moveAwayFrom: { + concepts: ["van"] + force: 0.7 + objects: [ + {id: "moveAway-uuid1"}, + {beacon: "weaviate://localhost/moveAway-uuid2"}, + {id: "moveAway-uuid3"}, + {id: "moveAway-uuid4"}, + ] + } + } + ) { + beacon className + } + }`, + expectedParamsToTraverser: traverser.ExploreParams{ + Limit: 17, + ModuleParams: map[string]interface{}{ + "nearText": extractNearTextParam(map[string]interface{}{ + "concepts": []interface{}{"car", "best brand"}, + "moveTo": map[string]interface{}{ + "concepts": []interface{}{"mercedes"}, + "force": float64(0.7), + "objects": []interface{}{ + map[string]interface{}{ + "id": "moveto-uuid1", + }, + map[string]interface{}{ + "beacon": "weaviate://localhost/moveto-uuid2", + }, + }, + }, + "moveAwayFrom": map[string]interface{}{ + "concepts": []interface{}{"van"}, + "force": float64(0.7), + "objects": []interface{}{ + map[string]interface{}{ + "id": "moveAway-uuid1", + }, + map[string]interface{}{ + "beacon": "weaviate://localhost/moveAway-uuid2", + }, + map[string]interface{}{ + "id": "moveAway-uuid3", + }, + map[string]interface{}{ + "id": "moveAway-uuid4", + }, + }, + }, + }), + }, + }, + resolverReturn: []search.Result{ + { + Beacon: "weaviate://localhost/some-uuid", + ClassName: "bestClass", + }, + }, + expectedResults: []result{{ + pathToField: []string{"Explore"}, + expectedValue: []interface{}{ + map[string]interface{}{ + "beacon": "weaviate://localhost/some-uuid", + "className": "bestClass", + }, + }, + }}, + }, + } + + testsNearText.AssertExtraction(t, newExploreMockResolver()) +} + +func (tests testCases) AssertExtraction(t *testing.T, resolver *mockResolver) { + for _, testCase := range tests { + t.Run(testCase.name, func(t *testing.T) { + resolver.On("Explore", testCase.expectedParamsToTraverser). + Return(testCase.resolverReturn, nil).Once() + + result := resolver.AssertResolve(t, testCase.query) + + for _, expectedResult := range testCase.expectedResults { + value := result.Get(expectedResult.pathToField...).Result + + assert.Equal(t, expectedResult.expectedValue, value) + } + }) + } +} + +func ptFloat32(in float32) *float32 { + return &in +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..1c9e8593e6d85ab07d47b85aa8deca6ce637c772 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/fakes_for_test.go @@ -0,0 +1,98 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/weaviate/weaviate/entities/schema" + txt2vecmodels "github.com/weaviate/weaviate/modules/text2vec-contextionary/additional/models" + "github.com/weaviate/weaviate/usecases/config" +) + +type fakeClient struct { + lastInput []string +} + +func (c *fakeClient) VectorForCorpi(ctx context.Context, corpi []string, overrides map[string]string) ([]float32, []txt2vecmodels.InterpretationSource, error) { + c.lastInput = corpi + return []float32{0, 1, 2, 3}, nil, nil +} + +func (c *fakeClient) VectorForWord(ctx context.Context, word string) ([]float32, error) { + c.lastInput = []string{word} + return []float32{3, 2, 1, 0}, nil +} + +func (c *fakeClient) NearestWordsByVector(ctx context.Context, + vector []float32, n int, k int, +) ([]string, []float32, error) { + return []string{"word1", "word2"}, []float32{0.1, 0.2}, nil +} + +func (c *fakeClient) IsWordPresent(ctx context.Context, word string) (bool, error) { + return true, nil +} + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizePropertyName bool + skippedProperty string + vectorizeClassName bool + excludedProperty string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + classSettings := map[string]interface{}{ + "vectorizeClassName": f.vectorizeClassName, + } + return classSettings +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/index_check.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/index_check.go new file mode 100644 index 0000000000000000000000000000000000000000..3c98e287416f0708d5a6bb3561bbef9733a4fd02 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/index_check.go @@ -0,0 +1,31 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +type classSettings struct { + basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewIndexChecker(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, BaseClassSettings: *basesettings.NewBaseClassSettings(cfg, true)} +} + +func (ic *classSettings) Validate(class *models.Class) error { + return ic.BaseClassSettings.ValidateClassSettings() +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/index_check_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/index_check_test.go new file mode 100644 index 0000000000000000000000000000000000000000..0a5d4ae1b3db307edaea9066fa02f15e88f3fad4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/index_check_test.go @@ -0,0 +1,92 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/usecases/modules" +) + +func TestIndexChecker(t *testing.T) { + t.Run("with all defaults", func(t *testing.T) { + class := &models.Class{ + Class: "MyClass", + Properties: []*models.Property{{ + Name: "someProp", + }}, + } + + cfg := modules.NewClassBasedModuleConfig(class, "my-module", "tenant", "", nil) + ic := NewIndexChecker(cfg) + + assert.True(t, ic.PropertyIndexed("someProp")) + assert.False(t, ic.VectorizePropertyName("someProp")) + assert.True(t, ic.VectorizeClassName()) + }) + + t.Run("with all explicit config matching the defaults", func(t *testing.T) { + class := &models.Class{ + Class: "MyClass", + ModuleConfig: map[string]interface{}{ + "my-module": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + Properties: []*models.Property{{ + Name: "someProp", + ModuleConfig: map[string]interface{}{ + "my-module": map[string]interface{}{ + "skip": false, + "vectorizePropertyName": false, + }, + }, + }}, + } + + cfg := modules.NewClassBasedModuleConfig(class, "my-module", "tenant", "", nil) + ic := NewIndexChecker(cfg) + + assert.True(t, ic.PropertyIndexed("someProp")) + assert.False(t, ic.VectorizePropertyName("someProp")) + assert.True(t, ic.VectorizeClassName()) + }) + + t.Run("with all explicit config using non-default values", func(t *testing.T) { + class := &models.Class{ + Class: "MyClass", + ModuleConfig: map[string]interface{}{ + "my-module": map[string]interface{}{ + "vectorizeClassName": false, + }, + }, + Properties: []*models.Property{{ + Name: "someProp", + ModuleConfig: map[string]interface{}{ + "my-module": map[string]interface{}{ + "skip": true, + "vectorizePropertyName": true, + }, + }, + }}, + } + + cfg := modules.NewClassBasedModuleConfig(class, "my-module", "tenant", "", nil) + ic := NewIndexChecker(cfg) + + assert.False(t, ic.PropertyIndexed("someProp")) + assert.True(t, ic.VectorizePropertyName("someProp")) + assert.False(t, ic.VectorizeClassName()) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/inspector.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/inspector.go new file mode 100644 index 0000000000000000000000000000000000000000..ba342adedf2a4cc1471eb40150e6575b24ca09d5 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/inspector.go @@ -0,0 +1,252 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "fmt" + "strings" + "unicode" + "unicode/utf8" + + "github.com/weaviate/weaviate/entities/models" + txt2vecmodels "github.com/weaviate/weaviate/modules/text2vec-contextionary/additional/models" +) + +type InspectorClient interface { + VectorForWord(ctx context.Context, word string) ([]float32, error) + VectorForCorpi(ctx context.Context, words []string, + overrides map[string]string) ([]float32, []txt2vecmodels.InterpretationSource, error) + NearestWordsByVector(ctx context.Context, vector []float32, n int, k int) ([]string, []float32, error) + IsWordPresent(ctx context.Context, word string) (bool, error) +} + +type Inspector struct { + client InspectorClient +} + +func NewInspector(client InspectorClient) *Inspector { + return &Inspector{client: client} +} + +func (i *Inspector) GetWords(ctx context.Context, words string) (*models.C11yWordsResponse, error) { + wordArray, err := i.validateAndSplit(words) + if err != nil { + return nil, err + } + + concatWord, err := i.concatWord(ctx, words, wordArray) + if err != nil { + return nil, err + } + + individualWords, err := i.individualWords(ctx, wordArray) + if err != nil { + return nil, err + } + + return &models.C11yWordsResponse{ + ConcatenatedWord: concatWord, + IndividualWords: individualWords, + }, nil +} + +func (i *Inspector) validateAndSplit(words string) ([]string, error) { + // set first character to lowercase + wordChars := []rune(words) + wordChars[0] = unicode.ToLower(wordChars[0]) + words = string(wordChars) + + for _, r := range words { + if !unicode.IsLetter(r) && !unicode.IsNumber(r) { + return nil, fmt.Errorf("invalid word input: words must only contain unicode letters and digits") + } + } + + return split(words), nil +} + +func (i *Inspector) concatWord(ctx context.Context, words string, + wordArray []string, +) (*models.C11yWordsResponseConcatenatedWord, error) { + if len(wordArray) < 2 { + // only build a concat response if we have more than a single word + return nil, nil + } + + // join the words into a single corpus. While the contextionary also supports + // building a centroid from multiple corpi (thus []string for Corpi, an + // occurrence-based weighing can only happen within a corpus. It is thus - by + // far - preferable in this case, to concat the words into one corpus, rather + // than treating each word as its own. + corpus := strings.Join(wordArray, " ") + vector, _, err := i.client.VectorForCorpi(ctx, []string{corpus}, nil) + if err != nil { + return nil, err + } + + nearestNeighbors, err := i.nearestNeighbors(ctx, vector) + if err != nil { + return nil, err + } + + return &models.C11yWordsResponseConcatenatedWord{ + ConcatenatedWord: words, + SingleWords: wordArray, + ConcatenatedVector: vector, + ConcatenatedNearestNeighbors: nearestNeighbors, + }, nil +} + +func (i *Inspector) nearestNeighbors(ctx context.Context, + vector []float32, +) ([]*models.C11yNearestNeighborsItems0, error) { + // relate words of centroid + words, dists, err := i.client.NearestWordsByVector(ctx, vector, 12, 32) + if err != nil { + return nil, err + } + + nearestNeighbors := []*models.C11yNearestNeighborsItems0{} + + // loop over NN Idx' and append to the return object + for i, word := range words { + item := models.C11yNearestNeighborsItems0{ + Word: word, + Distance: dists[i], + } + + nearestNeighbors = append(nearestNeighbors, &item) + } + + return nearestNeighbors, nil +} + +func (i *Inspector) individualWords(ctx context.Context, + wordArray []string, +) ([]*models.C11yWordsResponseIndividualWordsItems0, error) { + var res []*models.C11yWordsResponseIndividualWordsItems0 + + for _, word := range wordArray { + iw, err := i.individualWord(ctx, word) + if err != nil { + return nil, fmt.Errorf("word '%s': %w", word, err) + } + + res = append(res, iw) + } + + return res, nil +} + +func (i *Inspector) individualWord(ctx context.Context, + word string, +) (*models.C11yWordsResponseIndividualWordsItems0, error) { + ok, err := i.client.IsWordPresent(ctx, word) + if err != nil { + return nil, fmt.Errorf("could not check word presence: %w", err) + } + + if !ok { + return i.individualWordNotPresent(word), nil + } + + return i.individualWordPresent(ctx, word) +} + +func (i *Inspector) individualWordNotPresent(word string) *models.C11yWordsResponseIndividualWordsItems0 { + return &models.C11yWordsResponseIndividualWordsItems0{ + Word: word, + Present: false, + } +} + +func (i *Inspector) individualWordPresent(ctx context.Context, + word string, +) (*models.C11yWordsResponseIndividualWordsItems0, error) { + info, err := i.individualWordInfo(ctx, word) + if err != nil { + return nil, err + } + + return &models.C11yWordsResponseIndividualWordsItems0{ + Word: word, + Present: true, + Info: info, + }, nil +} + +func (i *Inspector) individualWordInfo(ctx context.Context, + word string, +) (*models.C11yWordsResponseIndividualWordsItems0Info, error) { + vector, err := i.client.VectorForWord(ctx, word) + if err != nil { + return nil, err + } + + nns, err := i.nearestNeighbors(ctx, vector) + if err != nil { + return nil, err + } + + return &models.C11yWordsResponseIndividualWordsItems0Info{ + Vector: vector, + NearestNeighbors: nns, + }, nil +} + +// Splits a CamelCase string to an array +// Based on: https://github.com/fatih/camelcase +func split(src string) (entries []string) { + // don't split invalid utf8 + if !utf8.ValidString(src) { + return []string{src} + } + entries = []string{} + var runes [][]rune + lastClass := 0 + class := 0 + // split into fields based on class of unicode character + for _, r := range src { + switch true { + case unicode.IsLower(r): + class = 1 + case unicode.IsUpper(r): + class = 2 + case unicode.IsDigit(r): + class = 1 + default: + class = 4 + } + if class == lastClass { + runes[len(runes)-1] = append(runes[len(runes)-1], r) + } else { + runes = append(runes, []rune{r}) + } + lastClass = class + } + // handle upper case -> lower case sequences, e.g. + // "PDFL", "oader" -> "PDF", "Loader" + for i := 0; i < len(runes)-1; i++ { + if unicode.IsUpper(runes[i][0]) && unicode.IsLower(runes[i+1][0]) { + runes[i+1] = append([]rune{runes[i][len(runes[i])-1]}, runes[i+1]...) + runes[i] = runes[i][:len(runes[i])-1] + } + } + // construct []string from results + for _, s := range runes { + if len(s) > 0 { + entries = append(entries, strings.ToLower(string(s))) + } + } + return +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/inspector_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/inspector_test.go new file mode 100644 index 0000000000000000000000000000000000000000..5eac90cc814bde9ce3e6606ab5daa24ecd1c68a9 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/inspector_test.go @@ -0,0 +1,190 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" +) + +func TestInspector(t *testing.T) { + type test struct { + name string + input string + expectedErr error + expectedOutput *models.C11yWordsResponse + } + + tests := []test{ + { + name: "invalid input", + input: "i don't like pizza", + expectedErr: fmt.Errorf("invalid word input: words must only contain unicode letters and digits"), + }, + { + name: "single valid word", + input: "pizza", + expectedOutput: &models.C11yWordsResponse{ + IndividualWords: []*models.C11yWordsResponseIndividualWordsItems0{ + { + Present: true, + Word: "pizza", + Info: &models.C11yWordsResponseIndividualWordsItems0Info{ + Vector: []float32{3, 2, 1, 0}, + NearestNeighbors: []*models.C11yNearestNeighborsItems0{ + { + Distance: 0.1, + Word: "word1", + }, + { + Distance: 0.2, + Word: "word2", + }, + }, + }, + }, + }, + }, + }, + { + name: "single valid word containing numbers", + input: "pi55a", + expectedOutput: &models.C11yWordsResponse{ + IndividualWords: []*models.C11yWordsResponseIndividualWordsItems0{ + { + Present: true, + Word: "pi55a", + Info: &models.C11yWordsResponseIndividualWordsItems0Info{ + Vector: []float32{3, 2, 1, 0}, + NearestNeighbors: []*models.C11yNearestNeighborsItems0{ + { + Distance: 0.1, + Word: "word1", + }, + { + Distance: 0.2, + Word: "word2", + }, + }, + }, + }, + }, + }, + }, + { + name: "concatenated words", + input: "pizzaBakerMakerShaker", + expectedOutput: &models.C11yWordsResponse{ + ConcatenatedWord: &models.C11yWordsResponseConcatenatedWord{ + ConcatenatedWord: "pizzaBakerMakerShaker", + SingleWords: []string{"pizza", "baker", "maker", "shaker"}, + ConcatenatedVector: []float32{0, 1, 2, 3}, + ConcatenatedNearestNeighbors: []*models.C11yNearestNeighborsItems0{ + { + Distance: 0.1, + Word: "word1", + }, + { + Distance: 0.2, + Word: "word2", + }, + }, + }, + IndividualWords: []*models.C11yWordsResponseIndividualWordsItems0{ + { + Present: true, + Word: "pizza", + Info: &models.C11yWordsResponseIndividualWordsItems0Info{ + Vector: []float32{3, 2, 1, 0}, + NearestNeighbors: []*models.C11yNearestNeighborsItems0{ + { + Distance: 0.1, + Word: "word1", + }, + { + Distance: 0.2, + Word: "word2", + }, + }, + }, + }, + { + Present: true, + Word: "baker", + Info: &models.C11yWordsResponseIndividualWordsItems0Info{ + Vector: []float32{3, 2, 1, 0}, + NearestNeighbors: []*models.C11yNearestNeighborsItems0{ + { + Distance: 0.1, + Word: "word1", + }, + { + Distance: 0.2, + Word: "word2", + }, + }, + }, + }, + { + Present: true, + Word: "maker", + Info: &models.C11yWordsResponseIndividualWordsItems0Info{ + Vector: []float32{3, 2, 1, 0}, + NearestNeighbors: []*models.C11yNearestNeighborsItems0{ + { + Distance: 0.1, + Word: "word1", + }, + { + Distance: 0.2, + Word: "word2", + }, + }, + }, + }, + { + Present: true, + Word: "shaker", + Info: &models.C11yWordsResponseIndividualWordsItems0Info{ + Vector: []float32{3, 2, 1, 0}, + NearestNeighbors: []*models.C11yNearestNeighborsItems0{ + { + Distance: 0.1, + Word: "word1", + }, + { + Distance: 0.2, + Word: "word2", + }, + }, + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + client := &fakeClient{} + i := NewInspector(client) + res, err := i.GetWords(context.Background(), test.input) + require.Equal(t, err, test.expectedErr) + assert.Equal(t, res, test.expectedOutput) + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/noop.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/noop.go new file mode 100644 index 0000000000000000000000000000000000000000..095495b16ffe04fa9771d43690c77690828cffb1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/noop.go @@ -0,0 +1,53 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" +) + +// NoOpVectorizer is a simple stand in that does nothing. Can be used when the +// feature should be turned off overall +type NoOpVectorizer struct{} + +// Corpi is not implemented in the NoOpVectorizer +func (n *NoOpVectorizer) Corpi(ctx context.Context, corpi []string) ([]float32, error) { + return []float32{}, nil +} + +// MoveTo is not implemented in the NoOpVectorizer +func (n *NoOpVectorizer) MoveTo(source []float32, target []float32, weight float32) ([]float32, error) { + return []float32{}, nil +} + +// MoveAwayFrom is not implemented in the NoOpVectorizer +func (n *NoOpVectorizer) MoveAwayFrom(source []float32, target []float32, weight float32) ([]float32, error) { + return []float32{}, nil +} + +// NormalizedDistance is not implemented in the NoOpVectorizer +func (n *NoOpVectorizer) NormalizedDistance(a, b []float32) (float32, error) { + return 0, nil +} + +// Object is not implemented in the NoOpVectorizer +func (n *NoOpVectorizer) Object(ctx context.Context, concept *models.Object) ([]float32, error) { + return []float32{}, nil +} + +// NewNoOp creates a new NoOpVectorizer which can be used when no vectorization +// is desired, i.e. the feature is turned off completely +func NewNoOp() *NoOpVectorizer { + return &NoOpVectorizer{} +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/schema_config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/schema_config.go new file mode 100644 index 0000000000000000000000000000000000000000..1c9c32a15296692e1a2b7ff74470eb3280fc94fb --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/schema_config.go @@ -0,0 +1,236 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "fmt" + "strings" + + "github.com/fatih/camelcase" + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +type ConfigValidator struct { + remote RemoteClient + logger logrus.FieldLogger +} + +type IndexChecker interface { + VectorizeClassName() bool + VectorizePropertyName(propName string) bool + PropertyIndexed(propName string) bool +} + +type RemoteClient interface { + IsStopWord(ctx context.Context, word string) (bool, error) + IsWordPresent(ctx context.Context, word string) (bool, error) +} + +func NewConfigValidator(rc RemoteClient, + logger logrus.FieldLogger, +) *ConfigValidator { + return &ConfigValidator{remote: rc, logger: logger} +} + +func (cv *ConfigValidator) Do(ctx context.Context, class *models.Class, + cfg moduletools.ClassConfig, icheck IndexChecker, +) error { + err := cv.validateClassName(ctx, class.Class, icheck.VectorizeClassName()) + if err != nil { + return fmt.Errorf("invalid class name: %w", err) + } + + for _, prop := range class.Properties { + if !icheck.PropertyIndexed(prop.Name) { + continue + } + + err = cv.validatePropertyName(ctx, prop.Name, + icheck.VectorizePropertyName(prop.Name)) + if err != nil { + return errors.Wrapf(err, "class %q: invalid property name", class.Class) + } + } + + if err := cv.validateIndexState(ctx, class, icheck); err != nil { + return errors.Wrap(err, "invalid combination of properties") + } + + cv.checkForPossibilityOfDuplicateVectors(ctx, class, icheck) + + return nil +} + +func (cv *ConfigValidator) validateClassName(ctx context.Context, className string, + vectorizeClass bool, +) error { + // class name + if !vectorizeClass { + // if the user chooses not to vectorize the class, we don't need to check + // if its c11y-valid or not + return nil + } + + camelParts := camelcase.Split(className) + stopWordsFound := 0 + for _, part := range camelParts { + word := strings.ToLower(part) + sw, err := cv.remote.IsStopWord(ctx, word) + if err != nil { + return fmt.Errorf("check stopword: %w", err) + } + + if sw { + stopWordsFound++ + continue + } + + present, err := cv.remote.IsWordPresent(ctx, word) + if err != nil { + return fmt.Errorf("check word presence: %w", err) + } + + if !present { + return fmt.Errorf("could not find the word '%s' from the class name '%s' "+ + "in the contextionary", word, className) + } + } + + if len(camelParts) == stopWordsFound { + return fmt.Errorf("className '%s' consists of only stopwords and is therefore "+ + "not a contextionary-valid class name, make sure at least one word in the "+ + "classname is not a stop word", className) + } + + return nil +} + +func (cv *ConfigValidator) validatePropertyName(ctx context.Context, + propertyName string, vectorize bool, +) error { + if !vectorize { + // user does not want to vectorize this property name, so we don't have to + // validate it + return nil + } + + camelParts := camelcase.Split(propertyName) + stopWordsFound := 0 + for _, part := range camelParts { + word := strings.ToLower(part) + sw, err := cv.remote.IsStopWord(ctx, word) + if err != nil { + return fmt.Errorf("check stopword: %w", err) + } + + if sw { + stopWordsFound++ + continue + } + + present, err := cv.remote.IsWordPresent(ctx, word) + if err != nil { + return fmt.Errorf("check word presence: %w", err) + } + + if !present { + return fmt.Errorf("could not find word '%s' of the property '%s' in the "+ + "contextionary", word, propertyName) + } + } + + if len(camelParts) == stopWordsFound { + return fmt.Errorf("the propertyName '%s' consists of only stopwords and is "+ + "therefore not a contextionary-valid property name, make sure at least one word "+ + "in the property name is not a stop word", propertyName) + } + + return nil +} + +func (cv *ConfigValidator) validateIndexState(ctx context.Context, + class *models.Class, icheck IndexChecker, +) error { + if icheck.VectorizeClassName() { + // if the user chooses to vectorize the classname, vector-building will + // always be possible, no need to investigate further + + return nil + } + + // search if there is at least one indexed, string/text or string/text[] + // prop. If found pass validation + for _, prop := range class.Properties { + if len(prop.DataType) < 1 { + return errors.Errorf("property %s must have at least one datatype: "+ + "got %v", prop.Name, prop.DataType) + } + + if prop.DataType[0] != string(schema.DataTypeText) && + prop.DataType[0] != string(schema.DataTypeTextArray) { + // we can only vectorize text-like props + continue + } + + if icheck.PropertyIndexed(prop.Name) { + // found at least one, this is a valid schema + return nil + } + } + + return fmt.Errorf("invalid properties: didn't find a single property which is " + + "of type string or text and is not excluded from indexing. In addition the " + + "class name is excluded from vectorization as well, meaning that it cannot be " + + "used to determine the vector position. To fix this, set 'vectorizeClassName' " + + "to true if the class name is contextionary-valid. Alternatively add at least " + + "contextionary-valid text/string property which is not excluded from " + + "indexing") +} + +func (cv *ConfigValidator) checkForPossibilityOfDuplicateVectors( + ctx context.Context, class *models.Class, icheck IndexChecker, +) { + if !icheck.VectorizeClassName() { + // if the user choses not to vectorize the class name, this means they must + // have chosen something else to vectorize, otherwise the validation would + // have error'd before we ever got here. We can skip further checking. + + return + } + + // search if there is at least one indexed, string/text prop. If found exit + for _, prop := range class.Properties { + // length check skipped, because validation has already passed + if prop.DataType[0] != string(schema.DataTypeText) { + // we can only vectorize text-like props + continue + } + + if icheck.PropertyIndexed(prop.Name) { + // found at least one + return + } + } + + cv.logger.WithField("module", "text2vec-contextionary"). + WithField("class", class.Class). + Warnf("text2vec-contextionary: Class %q does not have any properties "+ + "indexed (or only non text-properties indexed) and the vector position is "+ + "only determined by the class name. Each object will end up with the same "+ + "vector which leads to a severe performance penalty on imports. Consider "+ + "setting vectorIndexConfig.skip=true for this property", class.Class) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/schema_config_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/schema_config_test.go new file mode 100644 index 0000000000000000000000000000000000000000..c731429dcfbac45afbe73b70623f7414ba87636b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/schema_config_test.go @@ -0,0 +1,515 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "testing" + + "github.com/sirupsen/logrus" + ltest "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" +) + +func TestConfigValidator(t *testing.T) { + t.Run("validate class names", func(t *testing.T) { + type testCase struct { + input string + valid bool + name string + vectorize bool + } + + // for all test cases keep in mind that the word "carrot" is not present in + // the fake c11y, but every other word is. + // + // Additionally, the word "the" is a stopword + // + // all inputs represent class names (!) + tests := []testCase{ + // valid names + { + name: "Single uppercase word present in the c11y", + input: "Car", + valid: true, + vectorize: true, + }, + { + name: "Single lowercase word present in the c11y, stored as uppercase", + input: "car", + valid: true, + vectorize: true, + }, + { + name: "combination of valid words starting with uppercase letter", + input: "CarGarage", + valid: true, + vectorize: true, + }, + { + name: "combination of valid words starting with lowercase letter, stored as uppercase", + input: "carGarage", + valid: true, + vectorize: true, + }, + { + name: "combination of valid words and stopwords, starting with uppercase", + input: "TheCarGarage", + valid: true, + vectorize: true, + }, + { + name: "combination of valid words and stopwords starting with lowercase letter, stored as uppercase", + input: "carTheGarage", + valid: true, + vectorize: true, + }, + + // invalid names + { + name: "Single uppercase word NOT present in the c11y", + input: "Carrot", + valid: false, + vectorize: true, + }, + { + name: "Single lowercase word NOT present in the c11y", + input: "carrot", + valid: false, + vectorize: true, + }, + { + name: "Single uppercase stopword", + input: "The", + valid: false, + vectorize: true, + }, + { + name: "Single lowercase stopword", + input: "the", + valid: false, + vectorize: true, + }, + { + name: "combination of valid and invalid words, valid word first lowercased", + input: "potatoCarrot", + valid: false, + vectorize: true, + }, + { + name: "combination of valid and invalid words, valid word first uppercased", + input: "PotatoCarrot", + valid: false, + vectorize: true, + }, + { + name: "combination of valid and invalid words, invalid word first lowercased", + input: "carrotPotato", + valid: false, + vectorize: true, + }, + { + name: "combination of valid and invalid words, invalid word first uppercased", + input: "CarrotPotato", + valid: false, + vectorize: true, + }, + { + name: "combination of only stopwords, starting with lowercase", + input: "theThe", + valid: false, + vectorize: true, + }, + { + name: "combination of only stopwords, starting with uppercase", + input: "TheThe", + valid: false, + vectorize: true, + }, + + // vectorize turned off + { + name: "non-vectorized: combination of only stopwords, starting with uppercase", + input: "TheThe", + valid: true, + vectorize: false, + }, + { + name: "non-vectorized: excluded word", + input: "carrot", + valid: true, + vectorize: false, + }, + } + + for _, test := range tests { + t.Run(test.name+" object class", func(t *testing.T) { + class := &models.Class{ + Class: test.input, + Properties: []*models.Property{{ + Name: "dummyPropSoWeDontRunIntoAllNoindexedError", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }}, + } + + logger, _ := ltest.NewNullLogger() + v := NewConfigValidator(&fakeRemote{}, logger) + err := v.Do(context.Background(), class, nil, &fakeIndexChecker{ + vectorizeClassName: test.vectorize, + propertyIndexed: true, + }) + assert.Equal(t, test.valid, err == nil) + + // only proceed if input was supposed to be valid + if test.valid == false { + return + } + }) + } + }) + + t.Run("validate property names", func(t *testing.T) { + type testCase struct { + input string + valid bool + name string + vectorize bool + } + + // for all test cases keep in mind that the word "carrot" is not present in + // the fake c11y, but every other word is + // + // all inputs represent property names (!) + tests := []testCase{ + // valid names + { + name: "Single uppercase word present in the c11y, stored as lowercase", + input: "Brand", + valid: true, + vectorize: true, + }, + { + name: "Single lowercase word present in the c11y", + input: "brand", + valid: true, + vectorize: true, + }, + { + name: "combination of valid words starting with uppercase letter, stored as lowercase", + input: "BrandGarage", + valid: true, + vectorize: true, + }, + { + name: "combination of valid words starting with lowercase letter", + input: "brandGarage", + valid: true, + vectorize: true, + }, + { + name: "combination of valid words and stop words starting with uppercase letter, stored as lowercase", + input: "TheGarage", + valid: true, + vectorize: true, + }, + { + name: "combination of valid words and stop words starting with lowercase letter", + input: "theGarage", + valid: true, + vectorize: true, + }, + + // invalid names + { + name: "Single uppercase word NOT present in the c11y", + input: "Carrot", + valid: false, + vectorize: true, + }, + { + name: "Single lowercase word NOT present in the c11y", + input: "carrot", + valid: false, + vectorize: true, + }, + { + name: "Single lowercase stop word", + input: "the", + valid: false, + vectorize: true, + }, + { + name: "combination of valid and invalid words, valid word first lowercased", + input: "potatoCarrot", + valid: false, + vectorize: true, + }, + { + name: "combination of valid and invalid words, valid word first uppercased", + input: "PotatoCarrot", + valid: false, + vectorize: true, + }, + { + name: "combination of valid and invalid words, invalid word first lowercased", + input: "carrotPotato", + valid: false, + vectorize: true, + }, + { + name: "combination of valid and invalid words, invalid word first uppercased", + input: "CarrotPotato", + valid: false, + vectorize: true, + }, + { + name: "combination of only stop words, first lowercased", + input: "theThe", + valid: false, + vectorize: true, + }, + { + name: "combination of only stop words, first uppercased", + input: "TheThe", + valid: false, + vectorize: true, + }, + + // without vectorizing + { + name: "non-vectorizing: combination of only stop words, first uppercased", + input: "TheThe", + valid: true, + vectorize: false, + }, + { + name: "non-vectorizing: combination of only stop words, first uppercased", + input: "carrot", + valid: true, + vectorize: false, + }, + } + + for _, test := range tests { + t.Run(test.name+" object class", func(t *testing.T) { + class := &models.Class{ + Class: "ValidName", + Properties: []*models.Property{{ + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + Name: test.input, + }}, + } + + logger, _ := ltest.NewNullLogger() + v := NewConfigValidator(&fakeRemote{}, logger) + err := v.Do(context.Background(), class, nil, &fakeIndexChecker{ + vectorizePropertyName: test.vectorize, + propertyIndexed: true, + }) + assert.Equal(t, test.valid, err == nil) + }) + } + }) + + t.Run("all usable props no-indexed", func(t *testing.T) { + t.Run("all schema vectorization turned off", func(t *testing.T) { + class := &models.Class{ + Vectorizer: "text2vec-contextionary", + Class: "ValidName", + Properties: []*models.Property{ + { + DataType: []string{"text"}, + Name: "description", + }, + { + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + Name: "name", + }, + { + DataType: []string{"int"}, + Name: "amount", + }, + }, + } + + logger, _ := ltest.NewNullLogger() + v := NewConfigValidator(&fakeRemote{}, logger) + err := v.Do(context.Background(), class, nil, &fakeIndexChecker{ + vectorizePropertyName: false, + vectorizeClassName: false, + propertyIndexed: false, + }) + assert.NotNil(t, err) + }) + }) + + t.Run("with only array types", func(t *testing.T) { + class := &models.Class{ + Vectorizer: "text2vec-contextionary", + Class: "ValidName", + Properties: []*models.Property{ + { + DataType: []string{"text[]"}, + Name: "descriptions", + }, + { + DataType: schema.DataTypeTextArray.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + Name: "names", + }, + }, + } + + logger, _ := ltest.NewNullLogger() + v := NewConfigValidator(&fakeRemote{}, logger) + err := v.Do(context.Background(), class, nil, &fakeIndexChecker{ + vectorizePropertyName: false, + vectorizeClassName: false, + propertyIndexed: true, + }) + assert.Nil(t, err) + }) +} + +func TestConfigValidator_RiskOfDuplicateVectors(t *testing.T) { + type test struct { + name string + in *models.Class + expectWarning bool + indexChecker *fakeIndexChecker + } + + tests := []test{ + { + name: "usable properties", + in: &models.Class{ + Class: "ValidName", + Properties: []*models.Property{ + { + DataType: []string{string(schema.DataTypeText)}, + Name: "textProp", + }, + }, + }, + expectWarning: false, + indexChecker: &fakeIndexChecker{ + vectorizePropertyName: false, + vectorizeClassName: true, + propertyIndexed: true, + }, + }, + { + name: "no properties", + in: &models.Class{ + Class: "ValidName", + }, + expectWarning: true, + indexChecker: &fakeIndexChecker{ + vectorizePropertyName: false, + vectorizeClassName: true, + propertyIndexed: false, + }, + }, + { + name: "usable properties, but they are no-indexed", + in: &models.Class{ + Class: "ValidName", + Properties: []*models.Property{ + { + DataType: []string{string(schema.DataTypeText)}, + Name: "textProp", + }, + }, + }, + expectWarning: true, + indexChecker: &fakeIndexChecker{ + vectorizePropertyName: false, + vectorizeClassName: true, + propertyIndexed: false, + }, + }, + { + name: "only unusable properties", + in: &models.Class{ + Class: "ValidName", + Properties: []*models.Property{ + { + DataType: []string{string(schema.DataTypeInt)}, + Name: "intProp", + }, + }, + }, + expectWarning: true, + indexChecker: &fakeIndexChecker{ + vectorizePropertyName: false, + vectorizeClassName: true, + propertyIndexed: false, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + logger, hook := ltest.NewNullLogger() + v := NewConfigValidator(&fakeRemote{}, logger) + err := v.Do(context.Background(), test.in, nil, test.indexChecker) + require.Nil(t, err) + + entry := hook.LastEntry() + if test.expectWarning { + require.NotNil(t, entry) + assert.Equal(t, logrus.WarnLevel, entry.Level) + } else { + assert.Nil(t, entry) + } + }) + } +} + +type fakeIndexChecker struct { + vectorizeClassName bool + vectorizePropertyName bool + propertyIndexed bool +} + +func (f *fakeIndexChecker) VectorizeClassName() bool { + return f.vectorizeClassName +} + +func (f *fakeIndexChecker) VectorizePropertyName(propName string) bool { + return f.vectorizePropertyName +} + +func (f *fakeIndexChecker) PropertyIndexed(propName string) bool { + return f.propertyIndexed +} + +// Every word in this fake c11y remote client is present except for the word +// Carrot which is not present +type fakeRemote struct{} + +func (f *fakeRemote) IsWordPresent(ctx context.Context, word string) (bool, error) { + if word == "carrot" || word == "the" { + return false, nil + } + return true, nil +} + +func (f *fakeRemote) IsStopWord(ctx context.Context, word string) (bool, error) { + return word == "the", nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/vectorizer.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/vectorizer.go new file mode 100644 index 0000000000000000000000000000000000000000..bfd8c6271fee1ffc2a35b0540a974bd8b19add9b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/vectorizer.go @@ -0,0 +1,183 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +// TODO: This entire package should be part of the text2vec-contextionary +// module, if methods/objects in here are used from non-modular code, they +// probably shouldn't be in here + +import ( + "context" + "errors" + "fmt" + "strings" + + "github.com/fatih/camelcase" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + txt2vecmodels "github.com/weaviate/weaviate/modules/text2vec-contextionary/additional/models" + objectsvectorizer "github.com/weaviate/weaviate/usecases/modulecomponents/vectorizer" +) + +// Vectorizer turns objects into vectors +type Vectorizer struct { + client client + objectVectorizer *objectsvectorizer.ObjectVectorizer +} + +type ErrNoUsableWords struct { + Err error +} + +func (e ErrNoUsableWords) Error() string { + return e.Err.Error() +} + +func NewErrNoUsableWordsf(pattern string, args ...interface{}) ErrNoUsableWords { + return ErrNoUsableWords{Err: fmt.Errorf(pattern, args...)} +} + +type client interface { + VectorForCorpi(ctx context.Context, corpi []string, + overrides map[string]string) ([]float32, []txt2vecmodels.InterpretationSource, error) +} + +// IndexCheck returns whether a property of a class should be indexed +type ClassIndexCheck interface { + PropertyIndexed(property string) bool + VectorizeClassName() bool + VectorizePropertyName(propertyName string) bool +} + +// New from c11y client +func New(client client) *Vectorizer { + return &Vectorizer{ + client: client, + objectVectorizer: objectsvectorizer.New(), + } +} + +func (v *Vectorizer) Texts(ctx context.Context, inputs []string, + cfg moduletools.ClassConfig, +) ([]float32, error) { + return v.Corpi(ctx, inputs) +} + +// Object object to vector +func (v *Vectorizer) Object(ctx context.Context, object *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + var overrides map[string]string + if object.VectorWeights != nil { + overrides = object.VectorWeights.(map[string]string) + } + + vec, sources, err := v.object(ctx, object, overrides, cfg) + if err != nil { + return nil, nil, err + } + + additional := models.AdditionalProperties{} + additional["interpretation"] = &txt2vecmodels.Interpretation{ + Source: sourceFromInputElements(sources), + } + + return vec, additional, nil +} + +func (v *Vectorizer) object(ctx context.Context, object *models.Object, overrides map[string]string, + cfg moduletools.ClassConfig, +) ([]float32, []txt2vecmodels.InterpretationSource, error) { + icheck := NewIndexChecker(cfg) + corpi := v.objectVectorizer.Texts(ctx, object, icheck) + + vector, ie, err := v.client.VectorForCorpi(ctx, []string{corpi}, overrides) + if err != nil { + switch { + case errors.As(err, &ErrNoUsableWords{}): + return nil, nil, fmt.Errorf("the object is invalid, as weaviate could not extract "+ + "any contextionary-valid words from it. This is the case when you have "+ + "set the options 'vectorizeClassName: false' and 'vectorizePropertyName: false' in this class' schema definition "+ + "and not a single property's value "+ + "contains at least one contextionary-valid word. To fix this, you have several "+ + "options:\n\n1.) Make sure that the schema class name or the set properties are "+ + "a contextionary-valid term and include them in vectorization using the "+ + "'vectorizeClassName' or 'vectorizePropertyName' setting. In this case the vector position "+ + "will be composed of both the class/property names and the values for those fields. "+ + "Even if no property values are contextionary-valid, the overall word corpus is still valid "+ + "due to the contextionary-valid class/property names."+ + "\n\n2.) Alternatively, if you do not want to include schema class/property names "+ + "in vectorization, you must make sure that at least one text/string property contains "+ + "at least one contextionary-valid word."+ + "\n\n3.) If the word corpus weaviate extracted from your object "+ + "(see below) does contain enough meaning to build a vector position, but the contextionary "+ + "did not recognize the words, you can extend the contextionary using the "+ + "REST API. This is the case when you use mostly industry-specific terms which are "+ + "not known to the common language contextionary. Once extended, simply reimport this object."+ + "\n\nThe following words were extracted from your object: %v"+ + "\n\nTo learn more about the contextionary and how it behaves, check out: https://www.semi.technology/documentation/weaviate/current/contextionary.html"+ + "\n\nOriginal error: %v", corpi, err) + default: + return nil, nil, fmt.Errorf("vectorizing object with corpus '%+v': %w", corpi, err) + } + } + + return vector, ie, nil +} + +// Corpi takes any list of strings and builds a common vector for all of them +func (v *Vectorizer) Corpi(ctx context.Context, corpi []string, +) ([]float32, error) { + // can be written to concurrently if multiple named vectors are used + corpiTmp := make([]string, len(corpi)) + for i, corpus := range corpi { + corpiTmp[i] = camelCaseToLower(corpus) + } + + vector, _, err := v.client.VectorForCorpi(ctx, corpiTmp, nil) + if err != nil { + return nil, fmt.Errorf("vectorizing corpus '%+v': %w", corpiTmp, err) + } + + return vector, nil +} + +func camelCaseToLower(in string) string { + parts := camelcase.Split(in) + var sb strings.Builder + for i, part := range parts { + if part == " " { + continue + } + + if i > 0 { + sb.WriteString(" ") + } + + sb.WriteString(strings.ToLower(part)) + } + + return sb.String() +} + +func sourceFromInputElements(in []txt2vecmodels.InterpretationSource) []*txt2vecmodels.InterpretationSource { + out := make([]*txt2vecmodels.InterpretationSource, len(in)) + for i, elem := range in { + out[i] = &txt2vecmodels.InterpretationSource{ + Concept: elem.Concept, + Occurrence: elem.Occurrence, + Weight: float64(elem.Weight), + } + } + + return out +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/vectorizer_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/vectorizer_test.go new file mode 100644 index 0000000000000000000000000000000000000000..d0e0408bd40bf47f7dd4203545b7f4f1645d2fe2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-contextionary/vectorizer/vectorizer_test.go @@ -0,0 +1,316 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" +) + +func TestVectorizingObjects(t *testing.T) { + type testCase struct { + name string + input *models.Object + expectedClientCall []string + noindex string + excludedProperty string // to simulate a schema where property names aren't vectorized + excludedClass string // to simulate a schema where class names aren't vectorized + } + + tests := []testCase{ + { + name: "empty object", + input: &models.Object{ + Class: "Car", + }, + expectedClientCall: []string{"car"}, + }, + { + name: "object with one string prop", + input: &models.Object{ + Class: "Car", + Properties: map[string]interface{}{ + "brand": "Mercedes", + }, + }, + expectedClientCall: []string{"car brand mercedes"}, + }, + + { + name: "object with one non-string prop", + input: &models.Object{ + Class: "Car", + Properties: map[string]interface{}{ + "power": 300, + }, + }, + expectedClientCall: []string{"car"}, + }, + + { + name: "object with a mix of props", + input: &models.Object{ + Class: "Car", + Properties: map[string]interface{}{ + "brand": "best brand", + "power": 300, + "review": "a very great car", + }, + }, + expectedClientCall: []string{"car brand best brand review a very great car"}, + }, + { + name: "with a noindexed property", + noindex: "review", + input: &models.Object{ + Class: "Car", + Properties: map[string]interface{}{ + "brand": "best brand", + "power": 300, + "review": "a very great car", + }, + }, + expectedClientCall: []string{"car brand best brand"}, + }, + + { + name: "with the class name not vectorized", + excludedClass: "Car", + input: &models.Object{ + Class: "Car", + Properties: map[string]interface{}{ + "brand": "best brand", + "power": 300, + "review": "a very great car", + }, + }, + expectedClientCall: []string{"brand best brand review a very great car"}, + }, + + { + name: "with a property name not vectorized", + excludedProperty: "review", + input: &models.Object{ + Class: "Car", + Properties: map[string]interface{}{ + "brand": "best brand", + "power": 300, + "review": "a very great car", + }, + }, + expectedClientCall: []string{"car brand best brand a very great car"}, + }, + + { + name: "with no schema labels vectorized", + excludedProperty: "review", + excludedClass: "Car", + input: &models.Object{ + Class: "Car", + Properties: map[string]interface{}{ + "review": "a very great car", + }, + }, + expectedClientCall: []string{"a very great car"}, + }, + + { + name: "with string/text arrays without propname or classname", + excludedProperty: "reviews", + excludedClass: "Car", + input: &models.Object{ + Class: "Car", + Properties: map[string]interface{}{ + "reviews": []string{ + "a very great car", + "you should consider buying one", + }, + }, + }, + expectedClientCall: []string{"a very great car you should consider buying one"}, + }, + + { + name: "with string/text arrays with propname and classname", + input: &models.Object{ + Class: "Car", + Properties: map[string]interface{}{ + "reviews": []string{ + "a very great car", + "you should consider buying one", + }, + }, + }, + expectedClientCall: []string{"car reviews a very great car reviews you should consider buying one"}, + }, + + { + name: "with compound class and prop names", + input: &models.Object{ + Class: "SuperCar", + Properties: map[string]interface{}{ + "brandOfTheCar": "best brand", + "power": 300, + "review": "a very great car", + }, + }, + expectedClientCall: []string{"super car brand of the car best brand review a very great car"}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + ic := &fakeClassConfig{ + excludedProperty: test.excludedProperty, + skippedProperty: test.noindex, + vectorizeClassName: test.excludedClass != "Car", + vectorizePropertyName: true, + } + + client := &fakeClient{} + v := New(client) + + vector, _, err := v.Object(context.Background(), test.input, ic) + + require.Nil(t, err) + assert.Equal(t, []float32{0, 1, 2, 3}, vector) + expected := strings.Split(test.expectedClientCall[0], " ") + actual := strings.Split(client.lastInput[0], " ") + assert.ElementsMatch(t, expected, actual) + }) + } +} + +func TestVectorizingActions(t *testing.T) { + type testCase struct { + name string + input *models.Object + expectedClientCall []string + noindex string + excludedProperty string // to simulate a schema where property names aren't vectorized + excludedClass string // to simulate a schema where class names aren't vectorized + } + + tests := []testCase{ + { + name: "empty object", + input: &models.Object{ + Class: "Flight", + }, + expectedClientCall: []string{"flight"}, + }, + { + name: "object with one string prop", + input: &models.Object{ + Class: "Flight", + Properties: map[string]interface{}{ + "brand": "Mercedes", + }, + }, + expectedClientCall: []string{"flight brand mercedes"}, + }, + + { + name: "object with one non-string prop", + input: &models.Object{ + Class: "Flight", + Properties: map[string]interface{}{ + "length": 300, + }, + }, + expectedClientCall: []string{"flight"}, + }, + + { + name: "object with a mix of props", + input: &models.Object{ + Class: "Flight", + Properties: map[string]interface{}{ + "brand": "best brand", + "length": 300, + "review": "a very great flight", + }, + }, + expectedClientCall: []string{"flight brand best brand review a very great flight"}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + client := &fakeClient{} + v := New(client) + + ic := &fakeClassConfig{ + excludedProperty: test.excludedProperty, + skippedProperty: test.noindex, + vectorizeClassName: test.excludedClass != "Flight", + vectorizePropertyName: true, + } + vector, _, err := v.Object(context.Background(), test.input, ic) + + require.Nil(t, err) + assert.Equal(t, []float32{0, 1, 2, 3}, vector) + expected := strings.Split(test.expectedClientCall[0], " ") + actual := strings.Split(client.lastInput[0], " ") + assert.ElementsMatch(t, expected, actual) + }) + } +} + +func TestVectorizingSearchTerms(t *testing.T) { + type testCase struct { + name string + input []string + expectedClientCall []string + } + + tests := []testCase{ + { + name: "single word", + input: []string{"car"}, + expectedClientCall: []string{"car"}, + }, + { + name: "multiple entries with multiple words", + input: []string{"car", "car brand"}, + expectedClientCall: []string{"car", "car brand"}, + }, + { + name: "multiple entries with upper casing", + input: []string{"Car", "Car Brand"}, + expectedClientCall: []string{"car", "car brand"}, + }, + { + name: "with camel cased words", + input: []string{"Car", "CarBrand"}, + expectedClientCall: []string{"car", "car brand"}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + client := &fakeClient{} + v := New(client) + + res, err := v.Corpi(context.Background(), test.input) + + require.Nil(t, err) + assert.Equal(t, []float32{0, 1, 2, 3}, res) + assert.ElementsMatch(t, test.expectedClientCall, client.lastInput) + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/clients/databricks.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/clients/databricks.go new file mode 100644 index 0000000000000000000000000000000000000000..34083971a1a93a7fdcc380508c169e040bd2d429 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/clients/databricks.go @@ -0,0 +1,225 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "crypto/sha256" + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "github.com/weaviate/weaviate/entities/moduletools" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/text2vec-databricks/ent" +) + +type embeddingsRequest struct { + Input []string `json:"input"` + Instruction string `json:"instruction,omitempty"` +} + +type embedding struct { + Object string `json:"object"` + Data []embeddingData `json:"data,omitempty"` + ErrorCode string `json:"error_code,omitempty"` + Message string `json:"message,omitempty"` + Error struct { + Message string `json:"message"` + Type string `json:"type"` + } `json:"error,omitempty"` +} + +type embeddingData struct { + Object string `json:"object"` + Index int `json:"index"` + Embedding []float32 `json:"embedding"` +} + +type client struct { + databricksToken string + httpClient *http.Client + logger logrus.FieldLogger +} + +func New(databricksToken string, timeout time.Duration, logger logrus.FieldLogger) *client { + return &client{ + databricksToken: databricksToken, + httpClient: &http.Client{ + Timeout: timeout, + }, + logger: logger, + } +} + +func (v *client) Vectorize(ctx context.Context, input []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], *modulecomponents.RateLimits, int, error) { + config := v.getVectorizationConfig(cfg) + res, limits, err := v.vectorize(ctx, input, config) + return res, limits, 0, err +} + +func (v *client) VectorizeQuery(ctx context.Context, input []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], error) { + config := v.getVectorizationConfig(cfg) + res, _, err := v.vectorize(ctx, input, config) + return res, err +} + +func (v *client) vectorize(ctx context.Context, input []string, config ent.VectorizationConfig) (*modulecomponents.VectorizationResult[[]float32], *modulecomponents.RateLimits, error) { + body, err := json.Marshal(v.getEmbeddingsRequest(input, config.Instruction)) + if err != nil { + return nil, nil, errors.Wrap(err, "marshal body") + } + + endpoint, _ := v.buildURL(ctx, config) + + req, err := http.NewRequestWithContext(ctx, "POST", endpoint, + bytes.NewReader(body)) + if err != nil { + return nil, nil, errors.Wrap(err, "create POST request") + } + apiKey, err := v.getApiKey(ctx) + if err != nil { + return nil, nil, errors.Wrap(err, "API Key") + } + req.Header.Add(v.getApiKeyHeaderAndValue(apiKey)) + req.Header.Add("Content-Type", "application/json") + if userAgent := modulecomponents.GetValueFromContext(ctx, "X-Databricks-User-Agent"); userAgent != "" { + req.Header.Add("User-Agent", userAgent) + } + + res, err := v.httpClient.Do(req) + if err != nil { + return nil, nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, nil, errors.Wrap(err, "read response body") + } + + var resBody embedding + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if res.StatusCode != 200 || resBody.ErrorCode != "" { + return nil, nil, v.getError(res.StatusCode, resBody) + } + rateLimit := ent.GetRateLimitsFromHeader(res.Header) + + texts := make([]string, len(resBody.Data)) + embeddings := make([][]float32, len(resBody.Data)) + databrickserror := make([]error, len(resBody.Data)) + for i := range resBody.Data { + texts[i] = resBody.Data[i].Object + embeddings[i] = resBody.Data[i].Embedding + + } + + return &modulecomponents.VectorizationResult[[]float32]{ + Text: texts, + Dimensions: len(resBody.Data[0].Embedding), + Vector: embeddings, + Errors: databrickserror, + }, rateLimit, nil +} + +func (v *client) buildURL(ctx context.Context, config ent.VectorizationConfig) (string, error) { + endpoint := config.Endpoint + if headerEndpoint := modulecomponents.GetValueFromContext(ctx, "X-Databricks-Endpoint"); headerEndpoint != "" { + endpoint = headerEndpoint + } + return endpoint, nil +} + +func (v *client) getError(statusCode int, resBody embedding) error { + endpoint := "Databricks Foundation Model API" + + if resBody.ErrorCode != "" { + return fmt.Errorf("connection to: %s failed with error code: %s message: %v", endpoint, resBody.ErrorCode, resBody.Message) + } + + if resBody.Error.Message != "" { + return fmt.Errorf("connection to: %s failed with status: %d error: %v", endpoint, statusCode, resBody.Error.Message) + } + return fmt.Errorf("connection to: %s failed with status: %d", endpoint, statusCode) +} + +func (v *client) getEmbeddingsRequest(input []string, instruction string) embeddingsRequest { + return embeddingsRequest{Input: input, Instruction: instruction} +} + +func (v *client) getApiKeyHeaderAndValue(apiKey string) (string, string) { + return "Authorization", fmt.Sprintf("Bearer %s", apiKey) +} + +func (v *client) GetApiKeyHash(ctx context.Context, cfg moduletools.ClassConfig) [32]byte { + key, err := v.getApiKey(ctx) + if err != nil { + return [32]byte{} + } + return sha256.Sum256([]byte(key)) +} + +func (v *client) GetVectorizerRateLimit(ctx context.Context, cfg moduletools.ClassConfig) *modulecomponents.RateLimits { + name := "Databricks" + + rpm, tpm := modulecomponents.GetRateLimitFromContext(ctx, name, 0, 0) + return &modulecomponents.RateLimits{ + RemainingTokens: tpm, + LimitTokens: tpm, + ResetTokens: time.Now().Add(61 * time.Second), + RemainingRequests: rpm, + LimitRequests: rpm, + ResetRequests: time.Now().Add(61 * time.Second), + } +} + +func (v *client) getApiKey(ctx context.Context) (string, error) { + var apiKey, envVarValue, envVar string + + apiKey = "X-Databricks-Token" + envVar = "DATABRICKS_TOKEN" + envVarValue = v.databricksToken + + return v.getApiKeyFromContext(ctx, apiKey, envVarValue, envVar) +} + +func (v *client) getApiKeyFromContext(ctx context.Context, apiKey, envVarValue, envVar string) (string, error) { + if apiKeyValue := modulecomponents.GetValueFromContext(ctx, apiKey); apiKeyValue != "" { + return apiKeyValue, nil + } + if envVarValue != "" { + return envVarValue, nil + } + return "", fmt.Errorf("no Databricks token found neither in request header: %s nor in environment variable under %s", apiKey, envVar) +} + +func (v *client) getVectorizationConfig(cfg moduletools.ClassConfig) ent.VectorizationConfig { + settings := ent.NewClassSettings(cfg) + return ent.VectorizationConfig{ + Endpoint: settings.Endpoint(), + Instruction: settings.Instruction(), + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/clients/databricks_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/clients/databricks_test.go new file mode 100644 index 0000000000000000000000000000000000000000..28891ab18b2f9fa43d4f822f9b033ad5aea60546 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/clients/databricks_test.go @@ -0,0 +1,264 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/modules/text2vec-databricks/ent" +) + +func TestClient(t *testing.T) { + t.Run("when all is fine", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + + c := New("databricksToken", 0, nullLogger()) + ctxWithValue := context.WithValue(context.Background(), + "X-Databricks-Endpoint", []string{server.URL}) + + expected := &modulecomponents.VectorizationResult[[]float32]{ + Text: []string{"This is my text"}, + Vector: [][]float32{{0.1, 0.2, 0.3}}, + Dimensions: 3, + Errors: []error{nil}, + } + res, _, _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Type": "text", "Model": "ada"}}) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when the context is expired", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("databricksToken", 0, nullLogger()) + ctxWithValue := context.WithValue(context.Background(), + "X-Databricks-Endpoint", []string{server.URL}) + + ctx, cancel := context.WithDeadline(ctxWithValue, time.Now()) + defer cancel() + + _, _, _, err := c.Vectorize(ctx, []string{"This is my text"}, fakeClassConfig{}) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "context deadline exceeded") + }) + + t.Run("when the server returns an error", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{ + t: t, + serverError: errors.Errorf("nope, not gonna happen"), + }) + defer server.Close() + c := New("databricksToken", 0, nullLogger()) + ctxWithValue := context.WithValue(context.Background(), + "X-Databricks-Endpoint", []string{server.URL}) + + _, _, _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, + fakeClassConfig{}) + + require.NotNil(t, err) + assert.EqualError(t, err, "connection to: Databricks Foundation Model API failed with status: 500 error: nope, not gonna happen") + }) + + t.Run("when Databricks token is passed using X-Databricks-Token header", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("", 0, nullLogger()) + + ctxWithValue := context.WithValue(context.Background(), + "X-Databricks-Token", []string{"some-key"}) + + ctxWithValue = context.WithValue(ctxWithValue, + "X-Databricks-Endpoint", []string{server.URL}) + + expected := &modulecomponents.VectorizationResult[[]float32]{ + Text: []string{"This is my text"}, + Vector: [][]float32{{0.1, 0.2, 0.3}}, + Dimensions: 3, + Errors: []error{nil}, + } + res, _, _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, + fakeClassConfig{classConfig: map[string]interface{}{"Type": "text", "Model": "ada"}}) + + require.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when Databricks token is empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("", 0, nullLogger()) + + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, _, _, err := c.Vectorize(ctx, []string{"This is my text"}, fakeClassConfig{}) + + require.NotNil(t, err) + assert.EqualError(t, err, "API Key: no Databricks token found "+ + "neither in request header: X-Databricks-Token "+ + "nor in environment variable under DATABRICKS_TOKEN") + }) + + t.Run("when X-Databricks-Token header is passed but empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("", 0, nullLogger()) + + ctxWithValue := context.WithValue(context.Background(), + "X-Databricks-Token", []string{""}) + + _, _, _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, + fakeClassConfig{classConfig: map[string]interface{}{"Type": "text", "Model": "ada"}}) + + require.NotNil(t, err) + assert.EqualError(t, err, "API Key: no Databricks token found "+ + "neither in request header: X-Databricks-Token "+ + "nor in environment variable under DATABRICKS_TOKEN") + }) + + t.Run("when X-Databricks-Endpoint header is passed", func(t *testing.T) { + c := New("", 0, nullLogger()) + + config := ent.VectorizationConfig{ + Endpoint: "http://serving-url-in-config.com", + } + + ctxWithValue := context.WithValue(context.Background(), + "X-Databricks-Endpoint", []string{"http://serving-url-passed-in-header.com"}) + + endpoint, err := c.buildURL(ctxWithValue, config) + require.NoError(t, err) + assert.Equal(t, "http://serving-url-passed-in-header.com", endpoint) + + endpoint, err = c.buildURL(context.TODO(), config) + require.NoError(t, err) + assert.Equal(t, "http://serving-url-in-config.com", endpoint) + }) + + t.Run("pass rate limit headers requests", func(t *testing.T) { + c := New("", 0, nullLogger()) + + ctxWithValue := context.WithValue(context.Background(), + "X-Databricks-Ratelimit-RequestPM-Embedding", []string{"50"}) + + rl := c.GetVectorizerRateLimit(ctxWithValue, fakeClassConfig{}) + assert.Equal(t, 50, rl.LimitRequests) + assert.Equal(t, 50, rl.RemainingRequests) + }) + + t.Run("pass rate limit headers tokens", func(t *testing.T) { + c := New("", 0, nullLogger()) + + ctxWithValue := context.WithValue(context.Background(), "X-Databricks-Ratelimit-TokenPM-Embedding", []string{"60"}) + + rl := c.GetVectorizerRateLimit(ctxWithValue, fakeClassConfig{}) + assert.Equal(t, 60, rl.LimitTokens) + assert.Equal(t, 60, rl.RemainingTokens) + }) + + t.Run("when X-Databricks-User-Agent header is passed", func(t *testing.T) { + userAgent := "weaviate+spark_connector" + server := httptest.NewServer(&fakeHandler{t: t, userAgent: userAgent}) + defer server.Close() + c := New("", 0, nullLogger()) + + ctxWithValue := context.WithValue(context.Background(), + "X-Databricks-Token", []string{"some-key"}) + + ctxWithValue = context.WithValue(ctxWithValue, + "X-Databricks-Endpoint", []string{server.URL}) + + ctxWithValue = context.WithValue(ctxWithValue, + "X-Databricks-User-Agent", []string{userAgent}) + + _, _, _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, + fakeClassConfig{classConfig: map[string]interface{}{"Type": "text", "Model": "ada"}}) + + require.NoError(t, err) + }) +} + +type fakeHandler struct { + t *testing.T + serverError error + userAgent string +} + +func (f *fakeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.userAgent != "" { + assert.Equal(f.t, f.userAgent, r.UserAgent()) + } + if f.serverError != nil { + embeddingError := map[string]interface{}{ + "message": f.serverError.Error(), + "type": "invalid_request_error", + } + embedding := map[string]interface{}{ + "error": embeddingError, + } + outBytes, err := json.Marshal(embedding) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b map[string]interface{} + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + textInputArray := b["input"].([]interface{}) + textInput := textInputArray[0].(string) + assert.Greater(f.t, len(textInput), 0) + + embeddingData := map[string]interface{}{ + "object": textInput, + "index": 0, + "embedding": []float32{0.1, 0.2, 0.3}, + } + embedding := map[string]interface{}{ + "object": "list", + "data": []interface{}{embeddingData}, + } + + outBytes, err := json.Marshal(embedding) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/clients/databricks_tokens.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/clients/databricks_tokens.go new file mode 100644 index 0000000000000000000000000000000000000000..b2252944d88152f17825a0bbee0c318ffe547fbe --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/clients/databricks_tokens.go @@ -0,0 +1,29 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "strings" + + "github.com/weaviate/tiktoken-go" +) + +func GetTokensCount(model string, input string, tke *tiktoken.Tiktoken) int { + tokensPerMessage := 3 + if strings.HasPrefix(model, "gpt-3.5-turbo") { + tokensPerMessage = 4 + } + + tokensCount := tokensPerMessage + tokensCount += len(tke.Encode(input, nil, nil)) + return tokensCount +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/clients/databricks_tokens_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/clients/databricks_tokens_test.go new file mode 100644 index 0000000000000000000000000000000000000000..5fba1f5dff856138e77d83a64bab963a80b543be --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/clients/databricks_tokens_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/tiktoken-go" +) + +func Test_getTokensCount(t *testing.T) { + shortTestText := "I am a short message. Teddy is the best and biggest dog ever." + + tests := []struct { + name string + model string + messages string + want int + wantErr string + }{ + { + name: "text-davinci-002", + model: "text-davinci-002", + messages: shortTestText, + want: 18, + }, + { + name: "gpt-3.5-turbo", + model: "gpt-3.5-turbo", + messages: shortTestText, + want: 19, + }, + { + name: "gpt-4", + model: "gpt-4", + messages: shortTestText, + want: 18, + }, + { + name: "non-existent-model", + model: "non-existent-model", + messages: shortTestText, + wantErr: "no encoding for model non-existent-model", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tke, err := tiktoken.EncodingForModel(tt.model) + if err != nil { + assert.EqualError(t, err, tt.wantErr) + } else { + assert.Nil(t, err) + assert.Equal(t, tt.want, GetTokensCount(tt.model, tt.messages, tke)) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/clients/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/clients/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..66b3424d989ced1b66c11b705780a757ec395d48 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/clients/fakes_for_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizePropertyName bool + skippedProperty string + excludedProperty string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..ffd98f0b8305c702db6fd142d421a7dd3de45298 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/clients/meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *client) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Databricks Foundation Models Module - Embeddings", + "documentationHref": "https://docs.databricks.com/en/machine-learning/foundation-models/api-reference.html#embedding-task", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/config.go new file mode 100644 index 0000000000000000000000000000000000000000..eb22833d6f2c2d2fd521667137ec24e2a5f7c0c5 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/config.go @@ -0,0 +1,47 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package moddatabricks + +import ( + "context" + + "github.com/weaviate/weaviate/modules/text2vec-databricks/ent" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *DatabricksModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{ + "vectorizeClassName": ent.DefaultVectorizeClassName, + } +} + +func (m *DatabricksModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{ + "skip": !ent.DefaultPropertyIndexed, + "vectorizePropertyName": ent.DefaultVectorizePropertyName, + } +} + +func (m *DatabricksModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := ent.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/ent/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/ent/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..c2a2fdb9c0a1c23d084f26b09386436710b5fbe0 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/ent/class_settings.go @@ -0,0 +1,64 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "github.com/pkg/errors" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + DefaultVectorizeClassName = true + DefaultPropertyIndexed = true + DefaultVectorizePropertyName = false + LowerCaseInput = false +) + +type classSettings struct { + basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, BaseClassSettings: *basesettings.NewBaseClassSettings(cfg, LowerCaseInput)} +} + +func (cs *classSettings) Endpoint() string { + return cs.BaseClassSettings.GetPropertyAsString("endpoint", "") +} + +func (cs *classSettings) Instruction() string { + return cs.BaseClassSettings.GetPropertyAsString("instruction", "") +} + +func (cs *classSettings) Validate(class *models.Class) error { + if err := cs.BaseClassSettings.Validate(class); err != nil { + return err + } + + endpoint := cs.Endpoint() + if err := cs.ValidateEndpoint(endpoint); err != nil { + return err + } + + return nil +} + +func (cs *classSettings) ValidateEndpoint(endpoint string) error { + if endpoint == "" { + return errors.New("endpoint cannot be empty") + } + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/ent/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/ent/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..a6698d06e9326c0c07b8e5a20e6d3bba48682f8c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/ent/class_settings_test.go @@ -0,0 +1,111 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "testing" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/modules" +) + +func Test_classSettings_Validate(t *testing.T) { + class := &models.Class{ + Class: "test", + Properties: []*models.Property{ + { + DataType: []string{schema.DataTypeText.String()}, + Name: "test", + }, + }, + } + tests := []struct { + name string + cfg moduletools.ClassConfig + wantErr error + }{ + { + name: "user supplied serving url", + cfg: &fakeClassConfig{ + classConfig: map[string]interface{}{ + "endpoint": "https://foo.databricks.com/serving-endpoints/databricks-gte-large-en/invocations", + }, + }, + }, + { + name: "user did not supply serving url", + cfg: &fakeClassConfig{ + classConfig: map[string]interface{}{}, + }, + wantErr: errors.New("endpoint cannot be empty"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + cs := NewClassSettings(tt.cfg) + err := cs.Validate(class) + if tt.wantErr != nil { + assert.EqualError(t, err, tt.wantErr.Error()) + } else { + assert.NoError(t, err) + } + }) + } +} + +func Test_classSettings(t *testing.T) { + t.Run("with target vector and properties", func(t *testing.T) { + targetVector := "targetVector" + propertyToIndex := "someProp" + class := &models.Class{ + Class: "MyClass", + VectorConfig: map[string]models.VectorConfig{ + targetVector: { + Vectorizer: map[string]interface{}{ + "my-module": map[string]interface{}{ + "vectorizeClassName": false, + "properties": []interface{}{propertyToIndex}, + }, + }, + VectorIndexType: "hnsw", + }, + }, + Properties: []*models.Property{ + { + Name: propertyToIndex, + ModuleConfig: map[string]interface{}{ + "my-module": map[string]interface{}{ + "skip": true, + "vectorizePropertyName": true, + }, + }, + }, + { + Name: "otherProp", + }, + }, + } + + cfg := modules.NewClassBasedModuleConfig(class, "my-module", "tenant", targetVector, nil) + ic := NewClassSettings(cfg) + + assert.True(t, ic.PropertyIndexed(propertyToIndex)) + assert.True(t, ic.VectorizePropertyName(propertyToIndex)) + assert.False(t, ic.PropertyIndexed("otherProp")) + assert.False(t, ic.VectorizePropertyName("otherProp")) + assert.False(t, ic.VectorizeClassName()) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/ent/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/ent/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..5a6b0275a637b585b5623f751a53f67d473dfee7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/ent/fakes_for_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizePropertyName bool + skippedProperty string + excludedProperty string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/ent/vectorization_config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/ent/vectorization_config.go new file mode 100644 index 0000000000000000000000000000000000000000..20e07b8ace0ba5dd840c23dfbbb0306a62cea6d8 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/ent/vectorization_config.go @@ -0,0 +1,17 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type VectorizationConfig struct { + Endpoint string + Instruction string +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/ent/vectorization_result.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/ent/vectorization_result.go new file mode 100644 index 0000000000000000000000000000000000000000..757fdaebb4079145c5a0f0ce8729ac8e88637f6c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/ent/vectorization_result.go @@ -0,0 +1,65 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "net/http" + "strconv" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents" +) + +const dummyLimit = 10000000 + +func GetRateLimitsFromHeader(header http.Header) *modulecomponents.RateLimits { + requestsReset, err := time.ParseDuration(header.Get("x-ratelimit-reset-requests")) + if err != nil { + requestsReset = 0 + } + tokensReset, err := time.ParseDuration(header.Get("x-ratelimit-reset-tokens")) + if err != nil { + tokensReset = 0 + } + limitRequests := getHeaderInt(header, "x-ratelimit-limit-requests") + limitTokens := getHeaderInt(header, "x-ratelimit-limit-tokens") + remainingRequests := getHeaderInt(header, "x-ratelimit-remaining-requests") + remainingTokens := getHeaderInt(header, "x-ratelimit-remaining-tokens") + + // azure returns 0 as limit, make sure this does not block anything by setting a high value + if limitTokens == 0 && remainingTokens > 0 { + limitTokens = dummyLimit + } + if limitRequests == 0 && remainingRequests > 0 { + limitRequests = dummyLimit + } + return &modulecomponents.RateLimits{ + LimitRequests: limitRequests, + LimitTokens: limitTokens, + RemainingRequests: remainingRequests, + RemainingTokens: remainingTokens, + ResetRequests: time.Now().Add(requestsReset), + ResetTokens: time.Now().Add(tokensReset), + } +} + +func getHeaderInt(header http.Header, key string) int { + value := header.Get(key) + if value == "" { + return 0 + } + i, err := strconv.Atoi(value) + if err != nil { + return 0 + } + return i +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/module.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/module.go new file mode 100644 index 0000000000000000000000000000000000000000..85fc76afdb334872e9dda3ea2fdff7113f437892 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/module.go @@ -0,0 +1,160 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package moddatabricks + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/text2vec-databricks/clients" + "github.com/weaviate/weaviate/modules/text2vec-databricks/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents/additional" + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + "github.com/weaviate/weaviate/usecases/modulecomponents/text2vecbase" +) + +const ( + Name = "text2vec-databricks" +) + +var batchSettings = batch.Settings{ + TokenMultiplier: 0, + MaxTimePerBatch: float64(10), + MaxObjectsPerBatch: 2000, + MaxTokensPerBatch: func(cfg moduletools.ClassConfig) int { return 500000 }, + HasTokenLimit: false, + ReturnsRateLimit: false, +} + +func New() *DatabricksModule { + return &DatabricksModule{} +} + +type DatabricksModule struct { + vectorizer text2vecbase.TextVectorizerBatch[[]float32] + metaProvider text2vecbase.MetaProvider + graphqlProvider modulecapabilities.GraphQLArguments + searcher modulecapabilities.Searcher[[]float32] + nearTextTransformer modulecapabilities.TextTransform + logger logrus.FieldLogger + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +func (m *DatabricksModule) Name() string { + return Name +} + +func (m *DatabricksModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2ManyVec +} + +func (m *DatabricksModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, m.logger); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initAdditionalPropertiesProvider(); err != nil { + return errors.Wrap(err, "init additional properties provider") + } + + return nil +} + +func (m *DatabricksModule) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init graphql provider") + } + return nil +} + +func (m *DatabricksModule) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + databricksToken := os.Getenv("DATABRICKS_TOKEN") + + client := clients.New(databricksToken, timeout, logger) + + m.vectorizer = text2vecbase.New(client, + batch.NewBatchVectorizer(client, 50*time.Second, batchSettings, + logger, m.Name()), + batch.ReturnBatchTokenizer(batchSettings.TokenMultiplier, m.Name(), ent.LowerCaseInput), + ) + m.metaProvider = client + + return nil +} + +func (m *DatabricksModule) initAdditionalPropertiesProvider() error { + m.additionalPropertiesProvider = additional.NewText2VecProvider() + return nil +} + +func (m *DatabricksModule) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + icheck := ent.NewClassSettings(cfg) + return m.vectorizer.Object(ctx, obj, cfg, icheck) +} + +func (m *DatabricksModule) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + vecs, errs := m.vectorizer.ObjectBatch(ctx, objs, skipObject, cfg) + return vecs, nil, errs +} + +func (m *DatabricksModule) MetaInfo() (map[string]interface{}, error) { + return m.metaProvider.MetaInfo() +} + +func (m *DatabricksModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +func (m *DatabricksModule) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + return m.vectorizer.Texts(ctx, []string{input}, cfg) +} + +func (m *DatabricksModule) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + return true, nil, nil +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.Searcher[[]float32](New()) + _ = modulecapabilities.GraphQLArguments(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/nearText.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/nearText.go new file mode 100644 index 0000000000000000000000000000000000000000..49e7ff8603030e5cce3c7f622ef05827adc15226 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/nearText.go @@ -0,0 +1,36 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package moddatabricks + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +func (m *DatabricksModule) initNearText() error { + m.searcher = nearText.NewSearcher(m.vectorizer) + m.graphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *DatabricksModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return m.graphqlProvider.Arguments() +} + +func (m *DatabricksModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + return m.searcher.VectorSearches() +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/vectorizer/batch_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/vectorizer/batch_test.go new file mode 100644 index 0000000000000000000000000000000000000000..8f7ccb42ffe102cf292bcec1c922ca583ae7d5fe --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/vectorizer/batch_test.go @@ -0,0 +1,103 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + "github.com/weaviate/weaviate/usecases/modulecomponents/text2vecbase" + + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" +) + +func TestBatch(t *testing.T) { + client := &fakeBatchClient{} + cfg := &FakeClassConfig{vectorizePropertyName: false, classConfig: map[string]interface{}{"vectorizeClassName": false}} + logger, _ := test.NewNullLogger() + cases := []struct { + name string + objects []*models.Object + skip []bool + wantErrors map[int]error + deadline time.Duration + }{ + {name: "skip all", objects: []*models.Object{{Class: "Car"}}, skip: []bool{true}}, + {name: "skip first", objects: []*models.Object{{Class: "Car"}, {Class: "Car", Properties: map[string]interface{}{"test": "test"}}}, skip: []bool{true, false}}, + {name: "one object errors", objects: []*models.Object{{Class: "Car", Properties: map[string]interface{}{"test": "test"}}, {Class: "Car", Properties: map[string]interface{}{"test": "error something"}}}, skip: []bool{false, false}, wantErrors: map[int]error{1: fmt.Errorf("something")}}, + {name: "first object errors", objects: []*models.Object{{Class: "Car", Properties: map[string]interface{}{"test": "error something"}}, {Class: "Car", Properties: map[string]interface{}{"test": "test"}}}, skip: []bool{false, false}, wantErrors: map[int]error{0: fmt.Errorf("something")}}, + {name: "vectorize all", objects: []*models.Object{{Class: "Car", Properties: map[string]interface{}{"test": "test"}}, {Class: "Car", Properties: map[string]interface{}{"test": "something"}}}, skip: []bool{false, false}}, + {name: "multiple vectorizer batches", objects: []*models.Object{ + {Class: "Car", Properties: map[string]interface{}{"test": "tokens 25"}}, // set limit so next 3 objects are one batch + {Class: "Car", Properties: map[string]interface{}{"test": "first object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "second object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "third object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "first object second batch"}}, // rate is 100 again + {Class: "Car", Properties: map[string]interface{}{"test": "second object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "third object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "fourth object second batch"}}, + }, skip: []bool{false, false, false, false, false, false, false, false}}, + {name: "multiple vectorizer batches with skips and errors", objects: []*models.Object{ + {Class: "Car", Properties: map[string]interface{}{"test": "tokens 25"}}, // set limit so next 3 objects are one batch + {Class: "Car", Properties: map[string]interface{}{"test": "first object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "second object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "error something"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "first object second batch"}}, // rate is 100 again + {Class: "Car", Properties: map[string]interface{}{"test": "second object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "third object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "fourth object second batch"}}, + }, skip: []bool{false, true, false, false, false, true, false, false}, wantErrors: map[int]error{3: fmt.Errorf("something")}}, + {name: "skip last item", objects: []*models.Object{ + {Class: "Car", Properties: map[string]interface{}{"test": "fir test object"}}, // set limit + {Class: "Car", Properties: map[string]interface{}{"test": "first object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "second object first batch"}}, + }, skip: []bool{false, false, true}}, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + v := text2vecbase.New(client, + batch.NewBatchVectorizer(client, 50*time.Second, batch.Settings{MaxObjectsPerBatch: 100, MaxTokensPerBatch: func(cfg moduletools.ClassConfig) int { return 500000 }, MaxTimePerBatch: 10}, + logger, "test"), + batch.ReturnBatchTokenizer(1, "", false), + ) + deadline := time.Now().Add(10 * time.Second) + if tt.deadline != 0 { + deadline = time.Now().Add(tt.deadline) + } + + ctx, cancl := context.WithDeadline(context.Background(), deadline) + vecs, errs := v.ObjectBatch( + ctx, tt.objects, tt.skip, cfg, + ) + + require.Len(t, errs, len(tt.wantErrors)) + require.Len(t, vecs, len(tt.objects)) + + for i := range tt.objects { + if tt.wantErrors[i] != nil { + require.Equal(t, tt.wantErrors[i], errs[i]) + } else if tt.skip[i] { + require.Nil(t, vecs[i]) + } else { + require.NotNil(t, vecs[i]) + } + } + cancl() + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/vectorizer/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/vectorizer/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..04f4ca478ecd7e80cbcc5754ae3cac57e2bb009f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-databricks/vectorizer/fakes_for_test.go @@ -0,0 +1,150 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "fmt" + "strconv" + "strings" + "time" + + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +type fakeBatchClient struct { + defaultResetRate int +} + +func (c *fakeBatchClient) Vectorize(ctx context.Context, + text []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], *modulecomponents.RateLimits, int, error) { + if c.defaultResetRate == 0 { + c.defaultResetRate = 60 + } + + vectors := make([][]float32, len(text)) + errors := make([]error, len(text)) + rateLimit := &modulecomponents.RateLimits{RemainingTokens: 100, RemainingRequests: 100, LimitTokens: 200, ResetTokens: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second), ResetRequests: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second)} + for i := range text { + if len(text[i]) >= len("error ") && text[i][:6] == "error " { + errors[i] = fmt.Errorf("%v", text[i][6:]) + continue + } + + tok := len("tokens ") + if len(text[i]) >= tok && text[i][:tok] == "tokens " { + rate, _ := strconv.Atoi(text[i][tok:]) + rateLimit.RemainingTokens = rate + rateLimit.LimitTokens = 2 * rate + } + + azureTok := len("azureTokens ") + if len(text[i]) >= azureTok && text[i][:azureTok] == "azureTokens " { + rate, _ := strconv.Atoi(text[i][tok:]) + rateLimit.RemainingTokens = rate + rateLimit.LimitTokens = 0 + } + + req := len("requests ") + if len(text[i]) >= req && text[i][:req] == "requests " { + reqs, _ := strconv.Atoi(strings.Split(text[i][req:], " ")[0]) + rateLimit.RemainingRequests = reqs + rateLimit.LimitRequests = 2 * reqs + } + + if len(text[i]) >= len("wait ") && text[i][:5] == "wait " { + wait, _ := strconv.Atoi(text[i][5:]) + time.Sleep(time.Duration(wait) * time.Millisecond) + } + vectors[i] = []float32{0, 1, 2, 3} + } + + return &modulecomponents.VectorizationResult[[]float32]{ + Vector: vectors, + Dimensions: 4, + Text: text, + Errors: errors, + }, rateLimit, 0, nil +} + +func (c *fakeBatchClient) VectorizeQuery(ctx context.Context, + text []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], error) { + return &modulecomponents.VectorizationResult[[]float32]{ + Vector: [][]float32{{0.1, 1.1, 2.1, 3.1}}, + Dimensions: 4, + Text: text, + }, nil +} + +func (c *fakeBatchClient) GetVectorizerRateLimit(ctx context.Context, cfg moduletools.ClassConfig) *modulecomponents.RateLimits { + return &modulecomponents.RateLimits{RemainingTokens: 0, RemainingRequests: 0, LimitTokens: 0, ResetTokens: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second), ResetRequests: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second)} +} + +func (c *fakeBatchClient) GetApiKeyHash(ctx context.Context, cfg moduletools.ClassConfig) [32]byte { + return [32]byte{} +} + +type FakeClassConfig struct { + classConfig map[string]interface{} + vectorizePropertyName bool + skippedProperty string + excludedProperty string +} + +func (f FakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f FakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f FakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f FakeClassConfig) Tenant() string { + return "" +} + +func (f FakeClassConfig) TargetVector() string { + return "" +} + +func (f FakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f FakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-google/clients/google.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/clients/google.go new file mode 100644 index 0000000000000000000000000000000000000000..e2b19f06e3cafce0fa8da82e4ce43c745f73b0f3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/clients/google.go @@ -0,0 +1,405 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents/apikey" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/text2vec-google/ent" +) + +type taskType string + +// Retrieval Use cases +var ( + // Document Task Type: + // Specifies the given text is a query in a search/retrieval setting + retrievalDocument taskType = "RETRIEVAL_DOCUMENT" + // Query Task Types: + // Standard search query where you want to find relevant documents + retrievalQuery taskType = "RETRIEVAL_QUERY" + // Queries are expected to be proper questions + questionAnswering taskType = "QUESTION_ANSWERING" + // Retrieve a document from your corpus that proves or disproves a statement + factVerification taskType = "FACT_VERIFICATION" + // Retrieve relevant code blocks using plain text queries + retrievalCode taskType = "CODE_RETRIEVAL_QUERY" +) + +// Single-input Use Cases +var ( + classification taskType = "CLASSIFICATION" + clustering taskType = "CLUSTERING" + semanticSimilarity taskType = "SEMANTIC_SIMILARITY" +) + +func buildURL(useGenerativeAI bool, apiEndpoint, projectID, modelID string) string { + if useGenerativeAI { + if isLegacyModel(modelID) { + // legacy PaLM API + return "https://generativelanguage.googleapis.com/v1beta3/models/embedding-gecko-001:batchEmbedText" + } + return fmt.Sprintf("https://generativelanguage.googleapis.com/v1beta/models/%s:batchEmbedContents", modelID) + } + urlTemplate := "https://%s/v1/projects/%s/locations/us-central1/publishers/google/models/%s:predict" + return fmt.Sprintf(urlTemplate, apiEndpoint, projectID, modelID) +} + +type google struct { + apiKey string + googleApiKey *apikey.GoogleApiKey + useGoogleAuth bool + httpClient *http.Client + urlBuilderFn func(useGenerativeAI bool, apiEndpoint, projectID, modelID string) string + logger logrus.FieldLogger +} + +func New(apiKey string, useGoogleAuth bool, timeout time.Duration, logger logrus.FieldLogger) *google { + return &google{ + apiKey: apiKey, + useGoogleAuth: useGoogleAuth, + googleApiKey: apikey.NewGoogleApiKey(), + httpClient: &http.Client{ + Timeout: timeout, + }, + urlBuilderFn: buildURL, + logger: logger, + } +} + +func (v *google) Vectorize(ctx context.Context, input []string, + config ent.VectorizationConfig, titlePropertyValue string, +) (*ent.VectorizationResult, error) { + return v.vectorize(ctx, input, v.getDocumentTaskType(config.TaskType), titlePropertyValue, config) +} + +func (v *google) VectorizeQuery(ctx context.Context, input []string, + config ent.VectorizationConfig, +) (*ent.VectorizationResult, error) { + return v.vectorize(ctx, input, v.getQueryTaskType(config.TaskType), "", config) +} + +func (v *google) vectorize(ctx context.Context, input []string, taskType taskType, + titlePropertyValue string, config ent.VectorizationConfig, +) (*ent.VectorizationResult, error) { + useGenerativeAIEndpoint := v.useGenerativeAIEndpoint(config) + + payload := v.getPayload(useGenerativeAIEndpoint, input, taskType, titlePropertyValue, config) + body, err := json.Marshal(payload) + if err != nil { + return nil, errors.Wrapf(err, "marshal body") + } + + endpointURL := v.urlBuilderFn(useGenerativeAIEndpoint, + v.getApiEndpoint(config), v.getProjectID(config), v.getModel(config)) + + req, err := http.NewRequestWithContext(ctx, "POST", endpointURL, + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + + apiKey, err := v.getApiKey(ctx, useGenerativeAIEndpoint) + if err != nil { + return nil, errors.Wrapf(err, "Google API Key") + } + req.Header.Add("Content-Type", "application/json") + if useGenerativeAIEndpoint { + req.Header.Add("x-goog-api-key", apiKey) + } else { + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", apiKey)) + } + + res, err := v.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + if useGenerativeAIEndpoint { + return v.parseGenerativeAIApiResponse(res.StatusCode, bodyBytes, input, config) + } + return v.parseEmbeddingsResponse(res.StatusCode, bodyBytes, input) +} + +func (v *google) useGenerativeAIEndpoint(config ent.VectorizationConfig) bool { + return v.getApiEndpoint(config) == "generativelanguage.googleapis.com" +} + +func (v *google) getPayload(useGenerativeAI bool, input []string, + taskType taskType, title string, config ent.VectorizationConfig, +) any { + if useGenerativeAI { + if v.isLegacy(config) { + return batchEmbedTextRequestLegacy{Texts: input} + } + parts := make([]part, len(input)) + for i := range input { + parts[i] = part{Text: input[i]} + } + req := batchEmbedContents{ + Requests: []embedContentRequest{ + { + Model: fmt.Sprintf("models/%s", config.Model), + Content: content{ + Parts: parts, + }, + TaskType: taskType, + Title: title, + OutputDimensionality: config.Dimensions, + }, + }, + } + return req + } + instances := make([]instance, len(input)) + for i := range input { + instances[i] = instance{Content: input[i], TaskType: taskType, Title: title} + } + if config.Dimensions != nil { + return embeddingsRequest{Instances: instances, Parameters: ¶meters{OutputDimensionality: config.Dimensions}} + } + return embeddingsRequest{Instances: instances} +} + +func (v *google) checkResponse(statusCode int, googleApiError *googleApiError) error { + if statusCode != 200 || googleApiError != nil { + if googleApiError != nil { + return fmt.Errorf("connection to Google failed with status: %v error: %v", + statusCode, googleApiError.Message) + } + return fmt.Errorf("connection to Google failed with status: %d", statusCode) + } + return nil +} + +func (v *google) getApiKey(ctx context.Context, useGenerativeAIEndpoint bool) (string, error) { + return v.googleApiKey.GetApiKey(ctx, v.apiKey, useGenerativeAIEndpoint, v.useGoogleAuth) +} + +func (v *google) parseGenerativeAIApiResponse(statusCode int, + bodyBytes []byte, input []string, config ent.VectorizationConfig, +) (*ent.VectorizationResult, error) { + var resBody batchEmbedTextResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if err := v.checkResponse(statusCode, resBody.Error); err != nil { + return nil, err + } + + if len(resBody.Embeddings) == 0 { + return nil, errors.Errorf("empty embeddings response") + } + + vectors := make([][]float32, len(resBody.Embeddings)) + for i := range resBody.Embeddings { + if v.isLegacy(config) { + vectors[i] = resBody.Embeddings[i].Value + } else { + vectors[i] = resBody.Embeddings[i].Values + } + } + dimensions := len(resBody.Embeddings[0].Values) + if v.isLegacy(config) { + dimensions = len(resBody.Embeddings[0].Value) + } + + return v.getResponse(input, dimensions, vectors) +} + +func (v *google) parseEmbeddingsResponse(statusCode int, + bodyBytes []byte, input []string, +) (*ent.VectorizationResult, error) { + var resBody embeddingsResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if err := v.checkResponse(statusCode, resBody.Error); err != nil { + return nil, err + } + + if len(resBody.Predictions) == 0 { + return nil, errors.Errorf("empty embeddings response") + } + + vectors := make([][]float32, len(resBody.Predictions)) + for i := range resBody.Predictions { + vectors[i] = resBody.Predictions[i].Embeddings.Values + } + dimensions := len(resBody.Predictions[0].Embeddings.Values) + + return v.getResponse(input, dimensions, vectors) +} + +func (v *google) getResponse(input []string, dimensions int, vectors [][]float32) (*ent.VectorizationResult, error) { + return &ent.VectorizationResult{ + Texts: input, + Dimensions: dimensions, + Vectors: vectors, + }, nil +} + +func (v *google) getApiEndpoint(config ent.VectorizationConfig) string { + return config.ApiEndpoint +} + +func (v *google) getProjectID(config ent.VectorizationConfig) string { + return config.ProjectID +} + +func (v *google) getModel(config ent.VectorizationConfig) string { + return config.Model +} + +func (v *google) isLegacy(config ent.VectorizationConfig) bool { + return isLegacyModel(config.Model) +} + +func (v *google) getQueryTaskType(in string) taskType { + switch taskType(in) { + // Retrieval Use cases + case retrievalCode: + return retrievalCode + case questionAnswering: + return questionAnswering + case factVerification: + return factVerification + // Single-input Use Cases + case classification: + return classification + case clustering: + return clustering + case semanticSimilarity: + return semanticSimilarity + default: + return retrievalQuery + } +} + +func (v *google) getDocumentTaskType(in string) taskType { + switch taskType(in) { + case classification: + return classification + case clustering: + return clustering + case semanticSimilarity: + return semanticSimilarity + default: + // default are retrieval use cases + return retrievalDocument + } +} + +func isLegacyModel(model string) bool { + // Check if we are using legacy model which runs on deprecated PaLM API + return model == "embedding-gecko-001" +} + +type embeddingsRequest struct { + Instances []instance `json:"instances,omitempty"` + Parameters *parameters `json:"parameters,omitempty"` +} + +type parameters struct { + OutputDimensionality *int64 `json:"outputDimensionality,omitempty"` +} + +type instance struct { + Content string `json:"content"` + TaskType taskType `json:"task_type,omitempty"` + Title string `json:"title,omitempty"` +} + +type embeddingsResponse struct { + Predictions []prediction `json:"predictions,omitempty"` + Error *googleApiError `json:"error,omitempty"` + DeployedModelId string `json:"deployedModelId,omitempty"` + Model string `json:"model,omitempty"` + ModelDisplayName string `json:"modelDisplayName,omitempty"` + ModelVersionId string `json:"modelVersionId,omitempty"` +} + +type prediction struct { + Embeddings embeddings `json:"embeddings,omitempty"` + SafetyAttributes *safetyAttributes `json:"safetyAttributes,omitempty"` +} + +type embeddings struct { + Values []float32 `json:"values,omitempty"` +} + +type safetyAttributes struct { + Scores []float64 `json:"scores,omitempty"` + Blocked *bool `json:"blocked,omitempty"` + Categories []string `json:"categories,omitempty"` +} + +type googleApiError struct { + Code int `json:"code"` + Message string `json:"message"` + Status string `json:"status"` +} + +type batchEmbedTextResponse struct { + Embeddings []embedding `json:"embeddings,omitempty"` + Error *googleApiError `json:"error,omitempty"` +} + +type embedding struct { + Values []float32 `json:"values,omitempty"` + // Legacy PaLM API + Value []float32 `json:"value,omitempty"` +} + +type batchEmbedContents struct { + Requests []embedContentRequest `json:"requests,omitempty"` +} + +type embedContentRequest struct { + Model string `json:"model"` + Content content `json:"content"` + TaskType taskType `json:"taskType,omitempty"` + Title string `json:"title,omitempty"` + OutputDimensionality *int64 `json:"outputDimensionality,omitempty"` +} + +type content struct { + Parts []part `json:"parts,omitempty"` + Role string `json:"role,omitempty"` +} + +type part struct { + Text string `json:"text,omitempty"` +} + +// Legacy PaLM API +type batchEmbedTextRequestLegacy struct { + Texts []string `json:"texts,omitempty"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-google/clients/google_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/clients/google_test.go new file mode 100644 index 0000000000000000000000000000000000000000..82df9c4d85c2607260bd6108802fa2e214055b05 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/clients/google_test.go @@ -0,0 +1,235 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/modules/text2vec-google/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents/apikey" +) + +func TestClient(t *testing.T) { + t.Run("when all is fine", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &google{ + apiKey: "apiKey", + httpClient: &http.Client{}, + googleApiKey: apikey.NewGoogleApiKey(), + urlBuilderFn: func(useGenerativeAI bool, apiEndpoint, projectID, modelID string) string { + assert.Equal(t, "endpoint", apiEndpoint) + assert.Equal(t, "project", projectID) + assert.Equal(t, "model", modelID) + return server.URL + }, + logger: nullLogger(), + } + expected := &ent.VectorizationResult{ + Texts: []string{"This is my text"}, + Vectors: [][]float32{{0.1, 0.2, 0.3}}, + Dimensions: 3, + } + res, err := c.Vectorize(context.Background(), []string{"This is my text"}, + ent.VectorizationConfig{ + ApiEndpoint: "endpoint", + ProjectID: "project", + Model: "model", + }, "") + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when the context is expired", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &google{ + apiKey: "apiKey", + httpClient: &http.Client{}, + googleApiKey: apikey.NewGoogleApiKey(), + urlBuilderFn: func(useGenerativeAI bool, apiEndpoint, projectID, modelID string) string { + return server.URL + }, + logger: nullLogger(), + } + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, err := c.Vectorize(ctx, []string{"This is my text"}, ent.VectorizationConfig{}, "") + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "context deadline exceeded") + }) + + t.Run("when the server returns an error", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{ + t: t, + serverError: errors.Errorf("nope, not gonna happen"), + }) + defer server.Close() + c := &google{ + apiKey: "apiKey", + httpClient: &http.Client{}, + googleApiKey: apikey.NewGoogleApiKey(), + urlBuilderFn: func(useGenerativeAI bool, apiEndpoint, projectID, modelID string) string { + return server.URL + }, + logger: nullLogger(), + } + _, err := c.Vectorize(context.Background(), []string{"This is my text"}, + ent.VectorizationConfig{}, "") + + require.NotNil(t, err) + assert.EqualError(t, err, "connection to Google failed with status: 500 error: nope, not gonna happen") + }) + + t.Run("when Palm key is passed using X-Palm-Api-Key header", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &google{ + apiKey: "", + httpClient: &http.Client{}, + googleApiKey: apikey.NewGoogleApiKey(), + urlBuilderFn: func(useGenerativeAI bool, apiEndpoint, projectID, modelID string) string { + return server.URL + }, + logger: nullLogger(), + } + ctxWithValue := context.WithValue(context.Background(), + "X-Palm-Api-Key", []string{"some-key"}) + + expected := &ent.VectorizationResult{ + Texts: []string{"This is my text"}, + Vectors: [][]float32{{0.1, 0.2, 0.3}}, + Dimensions: 3, + } + res, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, ent.VectorizationConfig{}, "") + + require.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when Palm key is empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &google{ + apiKey: "", + httpClient: &http.Client{}, + googleApiKey: apikey.NewGoogleApiKey(), + urlBuilderFn: func(useGenerativeAI bool, apiEndpoint, projectID, modelID string) string { + return server.URL + }, + logger: nullLogger(), + } + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, err := c.Vectorize(ctx, []string{"This is my text"}, ent.VectorizationConfig{}, "") + + require.NotNil(t, err) + assert.Equal(t, err.Error(), "Google API Key: no api key found "+ + "neither in request header: X-Palm-Api-Key or X-Goog-Api-Key or X-Goog-Vertex-Api-Key or X-Goog-Studio-Api-Key "+ + "nor in environment variable under PALM_APIKEY or GOOGLE_APIKEY") + }) + + t.Run("when X-Palm-Api-Key header is passed but empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &google{ + apiKey: "", + googleApiKey: apikey.NewGoogleApiKey(), + httpClient: &http.Client{}, + urlBuilderFn: buildURL, + logger: nullLogger(), + } + ctxWithValue := context.WithValue(context.Background(), + "X-Palm-Api-Key", []string{""}) + + _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, ent.VectorizationConfig{}, "") + + require.NotNil(t, err) + assert.Equal(t, "Google API Key: no api key found "+ + "neither in request header: X-Palm-Api-Key or X-Goog-Api-Key or X-Goog-Vertex-Api-Key or X-Goog-Studio-Api-Key "+ + "nor in environment variable under PALM_APIKEY or GOOGLE_APIKEY", err.Error()) + }) +} + +type fakeHandler struct { + t *testing.T + serverError error +} + +func (f *fakeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.serverError != nil { + embeddingResponse := &embeddingsResponse{ + Error: &googleApiError{ + Code: http.StatusInternalServerError, + Status: "error", + Message: f.serverError.Error(), + }, + } + + outBytes, err := json.Marshal(embeddingResponse) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var req embeddingsRequest + require.Nil(f.t, json.Unmarshal(bodyBytes, &req)) + + require.NotNil(f.t, req) + require.Len(f.t, req.Instances, 1) + + textInput := req.Instances[0].Content + assert.Greater(f.t, len(textInput), 0) + + embeddingResponse := &embeddingsResponse{ + Predictions: []prediction{ + { + Embeddings: embeddings{ + Values: []float32{0.1, 0.2, 0.3}, + }, + }, + }, + } + + outBytes, err := json.Marshal(embeddingResponse) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-google/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..38b821fc354de9379a0a4d3ddbea79b9c832cbc2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/clients/meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *google) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Google Module", + "documentationHref": "https://cloud.google.com/vertex-ai/docs/generative-ai/embeddings/get-text-embeddings", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-google/config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/config.go new file mode 100644 index 0000000000000000000000000000000000000000..9e37f1d56a749477a7d8ff7f1b073f298b67e71e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/config.go @@ -0,0 +1,47 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgoogle + +import ( + "context" + + "github.com/weaviate/weaviate/modules/text2vec-google/vectorizer" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *GoogleModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{ + "vectorizeClassName": vectorizer.DefaultVectorizeClassName, + } +} + +func (m *GoogleModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{ + "skip": !vectorizer.DefaultPropertyIndexed, + "vectorizePropertyName": vectorizer.DefaultVectorizePropertyName, + } +} + +func (m *GoogleModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := vectorizer.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-google/ent/vectorization_config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/ent/vectorization_config.go new file mode 100644 index 0000000000000000000000000000000000000000..e43187842cad58095062a5f9fa13ba049329e620 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/ent/vectorization_config.go @@ -0,0 +1,20 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type VectorizationConfig struct { + ApiEndpoint string + ProjectID string + Model string + Dimensions *int64 + TaskType string +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-google/ent/vectorization_result.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/ent/vectorization_result.go new file mode 100644 index 0000000000000000000000000000000000000000..148b480583abb20947b952167d6d812c09075a8a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/ent/vectorization_result.go @@ -0,0 +1,18 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type VectorizationResult struct { + Texts []string + Dimensions int + Vectors [][]float32 +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-google/module.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/module.go new file mode 100644 index 0000000000000000000000000000000000000000..91f7a4952f2ea3131211d7b9a4223df656edd0f7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/module.go @@ -0,0 +1,156 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgoogle + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + entcfg "github.com/weaviate/weaviate/entities/config" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/text2vec-google/clients" + "github.com/weaviate/weaviate/modules/text2vec-google/vectorizer" + "github.com/weaviate/weaviate/usecases/modulecomponents/additional" + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + "github.com/weaviate/weaviate/usecases/modulecomponents/text2vecbase" +) + +const ( + Name = "text2vec-google" + LegacyName = "text2vec-palm" +) + +func New() *GoogleModule { + return &GoogleModule{} +} + +type GoogleModule struct { + vectorizer text2vecbase.TextVectorizer[[]float32] + metaProvider text2vecbase.MetaProvider + graphqlProvider modulecapabilities.GraphQLArguments + searcher modulecapabilities.Searcher[[]float32] + nearTextTransformer modulecapabilities.TextTransform + logger logrus.FieldLogger + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +func (m *GoogleModule) Name() string { + return Name +} + +func (m *GoogleModule) AltNames() []string { + return []string{LegacyName} +} + +func (m *GoogleModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2Vec +} + +func (m *GoogleModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, m.logger); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initAdditionalPropertiesProvider(); err != nil { + return errors.Wrap(err, "init additional properties provider") + } + + return nil +} + +func (m *GoogleModule) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init graphql provider") + } + return nil +} + +func (m *GoogleModule) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("GOOGLE_APIKEY") + if apiKey == "" { + apiKey = os.Getenv("PALM_APIKEY") + } + + useGoogleAuth := entcfg.Enabled(os.Getenv("USE_GOOGLE_AUTH")) + client := clients.New(apiKey, useGoogleAuth, timeout, logger) + + m.vectorizer = vectorizer.New(client) + m.metaProvider = client + + return nil +} + +func (m *GoogleModule) initAdditionalPropertiesProvider() error { + m.additionalPropertiesProvider = additional.NewText2VecProvider() + return nil +} + +func (m *GoogleModule) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + return m.vectorizer.Object(ctx, obj, cfg) +} + +func (m *GoogleModule) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + return batch.VectorizeBatch(ctx, objs, skipObject, cfg, m.logger, m.vectorizer.Object) +} + +func (m *GoogleModule) MetaInfo() (map[string]interface{}, error) { + return m.metaProvider.MetaInfo() +} + +func (m *GoogleModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +func (m *GoogleModule) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + return m.vectorizer.Texts(ctx, []string{input}, cfg) +} + +func (m *GoogleModule) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + return true, nil, nil +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.Searcher[[]float32](New()) + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.ModuleHasAltNames(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-google/nearText.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/nearText.go new file mode 100644 index 0000000000000000000000000000000000000000..bb603a17ae58f871372c77dce02d151d34029695 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/nearText.go @@ -0,0 +1,36 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modgoogle + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +func (m *GoogleModule) initNearText() error { + m.searcher = nearText.NewSearcher(m.vectorizer) + m.graphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *GoogleModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return m.graphqlProvider.Arguments() +} + +func (m *GoogleModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + return m.searcher.VectorSearches() +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-google/vectorizer/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/vectorizer/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..3555b33c8040961c4ed640fc217180401f7fabfa --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/vectorizer/class_settings.go @@ -0,0 +1,172 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "fmt" + "slices" + "strings" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + apiEndpointProperty = "apiEndpoint" + projectIDProperty = "projectId" + modelIDProperty = "modelId" + modelProperty = "model" + titleProperty = "titleProperty" + dimensionsProperty = "dimensions" + taskTypeProperty = "taskType" +) + +const ( + DefaultVectorizeClassName = false + DefaultPropertyIndexed = true + DefaultVectorizePropertyName = false + DefaultApiEndpoint = "us-central1-aiplatform.googleapis.com" + DefaultModel = "gemini-embedding-001" + DefaultAIStudioEndpoint = "generativelanguage.googleapis.com" + DefaulAIStudioModel = "gemini-embedding-001" + DefaultTaskType = "RETRIEVAL_QUERY" +) + +// default dimensions are set to 768 bc of being backward compatible with earlier models +// textembedding-gecko@001 and embedding-001 that were default ones +var DefaultDimensions int64 = 768 + +var defaultModelDimensions = map[string]*int64{ + "gemini-embedding-001": &DefaultDimensions, +} + +var availableGoogleModels = []string{ + "textembedding-gecko@001", + "textembedding-gecko@latest", + "textembedding-gecko-multilingual@latest", + "textembedding-gecko@003", + "textembedding-gecko@002", + "textembedding-gecko-multilingual@001", + "textembedding-gecko@001", + "text-embedding-preview-0409", + "text-multilingual-embedding-preview-0409", + DefaultModel, + "text-embedding-005", + "text-multilingual-embedding-002", +} + +var availableGenerativeAIModels = []string{ + "embedding-001", + "text-embedding-004", + DefaulAIStudioModel, + "text-embedding-005", + "text-multilingual-embedding-002", +} + +var availableTaskTypes = []string{ + DefaultTaskType, + "QUESTION_ANSWERING", + "FACT_VERIFICATION", + "CODE_RETRIEVAL_QUERY", + "CLASSIFICATION", + "CLUSTERING", + "SEMANTIC_SIMILARITY", +} + +type classSettings struct { + basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{ + cfg: cfg, + BaseClassSettings: *basesettings.NewBaseClassSettingsWithAltNames(cfg, false, "text2vec-google", []string{"text2vec-palm"}, []string{modelIDProperty}), + } +} + +func (ic *classSettings) Validate(class *models.Class) error { + var errorMessages []string + if err := ic.BaseClassSettings.Validate(class); err != nil { + errorMessages = append(errorMessages, err.Error()) + } + + apiEndpoint := ic.ApiEndpoint() + model := ic.Model() + if apiEndpoint == DefaultAIStudioEndpoint { + if model != "" && !ic.validateGoogleSetting(model, availableGenerativeAIModels) { + errorMessages = append(errorMessages, fmt.Sprintf("wrong %s available AI Studio model names are: %v", modelIDProperty, availableGenerativeAIModels)) + } + } else { + projectID := ic.ProjectID() + if projectID == "" { + errorMessages = append(errorMessages, fmt.Sprintf("%s cannot be empty", projectIDProperty)) + } + if model != "" && !ic.validateGoogleSetting(model, availableGoogleModels) { + errorMessages = append(errorMessages, fmt.Sprintf("wrong %s available model names are: %v", modelIDProperty, availableGoogleModels)) + } + } + + if !slices.Contains(availableTaskTypes, ic.TaskType()) { + errorMessages = append(errorMessages, fmt.Sprintf("wrong taskType supported task types are: %v", availableTaskTypes)) + } + + if len(errorMessages) > 0 { + return fmt.Errorf("%s", strings.Join(errorMessages, ", ")) + } + + return nil +} + +func (ic *classSettings) validateGoogleSetting(value string, availableValues []string) bool { + return slices.Contains(availableValues, value) +} + +func (ic *classSettings) getStringProperty(name, defaultValue string) string { + return ic.BaseClassSettings.GetPropertyAsString(name, defaultValue) +} + +func (ic *classSettings) getDefaultModel(apiEndpoint string) string { + if apiEndpoint == DefaultAIStudioEndpoint { + return DefaulAIStudioModel + } + return DefaultModel +} + +// Google params +func (ic *classSettings) ApiEndpoint() string { + return ic.getStringProperty(apiEndpointProperty, DefaultApiEndpoint) +} + +func (ic *classSettings) ProjectID() string { + return ic.getStringProperty(projectIDProperty, "") +} + +func (ic *classSettings) Model() string { + if model := ic.getStringProperty(modelProperty, ""); model != "" { + return model + } + return ic.getStringProperty(modelIDProperty, ic.getDefaultModel(ic.ApiEndpoint())) +} + +func (ic *classSettings) TitleProperty() string { + return ic.getStringProperty(titleProperty, "") +} + +func (ic *classSettings) Dimensions() *int64 { + return ic.GetPropertyAsInt64(dimensionsProperty, defaultModelDimensions[ic.Model()]) +} + +func (ic *classSettings) TaskType() string { + return ic.getStringProperty(taskTypeProperty, DefaultTaskType) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-google/vectorizer/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/vectorizer/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..403ea441b13e913f9d0366c079b8ddfbb06f828d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/vectorizer/class_settings_test.go @@ -0,0 +1,195 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "testing" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/moduletools" +) + +func Test_classSettings_Validate(t *testing.T) { + tests := []struct { + name string + cfg moduletools.ClassConfig + wantApiEndpoint string + wantProjectID string + wantModelID string + wantTitle string + wantTaskType string + wantDimensions *int64 + wantErr error + }{ + { + name: "happy flow", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "projectId": "projectId", + }, + }, + wantApiEndpoint: "us-central1-aiplatform.googleapis.com", + wantProjectID: "projectId", + wantModelID: "gemini-embedding-001", + wantTaskType: DefaultTaskType, + wantDimensions: &DefaultDimensions, + wantErr: nil, + }, + { + name: "custom values", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "apiEndpoint": "google.com", + "projectId": "projectId", + "titleProperty": "title", + "taskType": "CODE_RETRIEVAL_QUERY", + }, + }, + wantApiEndpoint: "google.com", + wantProjectID: "projectId", + wantModelID: "gemini-embedding-001", + wantTitle: "title", + wantTaskType: "CODE_RETRIEVAL_QUERY", + wantDimensions: &DefaultDimensions, + wantErr: nil, + }, + { + name: "empty projectId", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "projectId": "", + }, + }, + wantErr: errors.Errorf("projectId cannot be empty"), + }, + { + name: "wrong modelId", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "projectId": "projectId", + "modelId": "wrong-model", + }, + }, + wantErr: errors.Errorf("wrong modelId available model names are: " + + "[textembedding-gecko@001 textembedding-gecko@latest " + + "textembedding-gecko-multilingual@latest textembedding-gecko@003 " + + "textembedding-gecko@002 textembedding-gecko-multilingual@001 textembedding-gecko@001 " + + "text-embedding-preview-0409 text-multilingual-embedding-preview-0409 " + + "gemini-embedding-001 text-embedding-005 text-multilingual-embedding-002]"), + }, + { + name: "all wrong", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "projectId": "", + "modelId": "wrong-model", + }, + }, + wantErr: errors.Errorf("projectId cannot be empty, " + + "wrong modelId available model names are: " + + "[textembedding-gecko@001 textembedding-gecko@latest " + + "textembedding-gecko-multilingual@latest textembedding-gecko@003 " + + "textembedding-gecko@002 textembedding-gecko-multilingual@001 textembedding-gecko@001 " + + "text-embedding-preview-0409 text-multilingual-embedding-preview-0409 " + + "gemini-embedding-001 text-embedding-005 text-multilingual-embedding-002]"), + }, + { + name: "Generative AI", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "apiEndpoint": "generativelanguage.googleapis.com", + }, + }, + wantApiEndpoint: "generativelanguage.googleapis.com", + wantProjectID: "", + wantModelID: "gemini-embedding-001", + wantTaskType: DefaultTaskType, + wantDimensions: &DefaultDimensions, + wantErr: nil, + }, + { + name: "Generative AI with model", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "apiEndpoint": "generativelanguage.googleapis.com", + "modelId": "embedding-gecko-001", + }, + }, + wantApiEndpoint: "generativelanguage.googleapis.com", + wantProjectID: "", + wantModelID: "embedding-gecko-001", + wantTaskType: DefaultTaskType, + wantDimensions: nil, + wantErr: nil, + }, + { + name: "Generative AI with wrong model", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "apiEndpoint": "generativelanguage.googleapis.com", + "modelId": "textembedding-gecko@001", + }, + }, + wantErr: errors.Errorf("wrong modelId available AI Studio model names are: [embedding-001 text-embedding-004 gemini-embedding-001 text-embedding-005 text-multilingual-embedding-002]"), + }, + { + name: "wrong properties", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "projectId": "projectId", + }, + properties: "wrong-properties", + }, + wantApiEndpoint: "us-central1-aiplatform.googleapis.com", + wantProjectID: "projectId", + wantModelID: "textembedding-gecko@001", + wantTaskType: DefaultTaskType, + wantDimensions: nil, + wantErr: errors.New("properties field needs to be of array type, got: string"), + }, + { + name: "wrong taskType", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "projectId": "projectId", + "taskType": "wrong-task-type", + }, + }, + wantErr: errors.Errorf("wrong taskType supported task types are: " + + "[RETRIEVAL_QUERY QUESTION_ANSWERING FACT_VERIFICATION CODE_RETRIEVAL_QUERY CLASSIFICATION CLUSTERING SEMANTIC_SIMILARITY]"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.cfg) + if tt.wantErr != nil { + assert.EqualError(t, ic.Validate(&models.Class{Class: "Test", Properties: []*models.Property{ + { + Name: "test", + DataType: []string{schema.DataTypeText.String()}, + }, + }}), tt.wantErr.Error()) + } else { + assert.Equal(t, tt.wantApiEndpoint, ic.ApiEndpoint()) + assert.Equal(t, tt.wantProjectID, ic.ProjectID()) + assert.Equal(t, tt.wantModelID, ic.Model()) + assert.Equal(t, tt.wantTitle, ic.TitleProperty()) + assert.Equal(t, tt.wantTaskType, ic.TaskType()) + assert.Equal(t, tt.wantDimensions, ic.Dimensions()) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-google/vectorizer/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/vectorizer/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..16e0dbc62aa81fd7ec3172dcf9d71936753d7c59 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/vectorizer/fakes_for_test.go @@ -0,0 +1,126 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/text2vec-google/ent" + "github.com/weaviate/weaviate/usecases/config" +) + +type fakeClient struct { + lastInput []string + lastConfig ent.VectorizationConfig +} + +func (c *fakeClient) Vectorize(ctx context.Context, + text []string, cfg ent.VectorizationConfig, titlePropertyValue string, +) (*ent.VectorizationResult, error) { + c.lastInput = text + c.lastConfig = cfg + return &ent.VectorizationResult{ + Vectors: [][]float32{{0, 1, 2, 3}}, + Dimensions: 4, + Texts: text, + }, nil +} + +func (c *fakeClient) VectorizeQuery(ctx context.Context, + text []string, cfg ent.VectorizationConfig, +) (*ent.VectorizationResult, error) { + c.lastInput = text + c.lastConfig = cfg + return &ent.VectorizationResult{ + Vectors: [][]float32{{0.1, 1.1, 2.1, 3.1}}, + Dimensions: 4, + Texts: text, + }, nil +} + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizeClassName bool + vectorizePropertyName bool + skippedProperty string + excludedProperty string + apiEndpoint string + projectID string + endpointID string + modelID string + properties interface{} +} + +func (f fakeClassConfig) Class() map[string]interface{} { + classSettings := map[string]interface{}{ + "vectorizeClassName": f.vectorizeClassName, + } + if f.apiEndpoint != "" { + classSettings["apiEndpoint"] = f.apiEndpoint + } + if f.projectID != "" { + classSettings["projectID"] = f.projectID + } + if f.endpointID != "" { + classSettings["endpointID"] = f.endpointID + } + if f.modelID != "" { + classSettings["modelID"] = f.modelID + } + if f.properties != nil { + classSettings["properties"] = f.properties + } + for k, v := range f.classConfig { + classSettings[k] = v + } + return classSettings +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.Class() +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-google/vectorizer/objects.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/vectorizer/objects.go new file mode 100644 index 0000000000000000000000000000000000000000..7220272abb0aa2d8386a4961c315505cc24ffda6 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/vectorizer/objects.go @@ -0,0 +1,85 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "fmt" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/text2vec-google/ent" + objectsvectorizer "github.com/weaviate/weaviate/usecases/modulecomponents/vectorizer" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +type Vectorizer struct { + client Client + objectVectorizer *objectsvectorizer.ObjectVectorizer +} + +func New(client Client) *Vectorizer { + return &Vectorizer{ + client: client, + objectVectorizer: objectsvectorizer.New(), + } +} + +type Client interface { + Vectorize(ctx context.Context, input []string, + config ent.VectorizationConfig, titlePropertyValue string) (*ent.VectorizationResult, error) + VectorizeQuery(ctx context.Context, input []string, + config ent.VectorizationConfig) (*ent.VectorizationResult, error) +} + +// IndexCheck returns whether a property of a class should be indexed +type ClassSettings interface { + PropertyIndexed(property string) bool + VectorizePropertyName(propertyName string) bool + VectorizeClassName() bool + ApiEndpoint() string + ProjectID() string + ModelID() string + TitleProperty() string +} + +func (v *Vectorizer) Object(ctx context.Context, object *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + vec, err := v.object(ctx, object, cfg) + return vec, nil, err +} + +func (v *Vectorizer) object(ctx context.Context, object *models.Object, cfg moduletools.ClassConfig, +) ([]float32, error) { + icheck := NewClassSettings(cfg) + + corpi, titlePropertyValue := v.objectVectorizer.TextsWithTitleProperty(ctx, object, icheck, icheck.TitleProperty()) + // vectorize text + res, err := v.client.Vectorize(ctx, []string{corpi}, ent.VectorizationConfig{ + ApiEndpoint: icheck.ApiEndpoint(), + ProjectID: icheck.ProjectID(), + Model: icheck.Model(), + Dimensions: icheck.Dimensions(), + TaskType: icheck.TaskType(), + }, titlePropertyValue) + if err != nil { + return nil, err + } + if len(res.Vectors) == 0 { + return nil, fmt.Errorf("no vectors generated") + } + + if len(res.Vectors) > 1 { + return libvectorizer.CombineVectors(res.Vectors), nil + } + return res.Vectors[0], nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-google/vectorizer/texts.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/vectorizer/texts.go new file mode 100644 index 0000000000000000000000000000000000000000..d20adc99c5582b4425123feaa48a36c59a25fe15 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/vectorizer/texts.go @@ -0,0 +1,38 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/text2vec-google/ent" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +func (v *Vectorizer) Texts(ctx context.Context, inputs []string, + cfg moduletools.ClassConfig, +) ([]float32, error) { + settings := NewClassSettings(cfg) + res, err := v.client.VectorizeQuery(ctx, inputs, ent.VectorizationConfig{ + ApiEndpoint: settings.ApiEndpoint(), + ProjectID: settings.ProjectID(), + Model: settings.Model(), + Dimensions: settings.Dimensions(), + TaskType: settings.TaskType(), + }) + if err != nil { + return nil, errors.Wrap(err, "remote client vectorize") + } + return libvectorizer.CombineVectors(res.Vectors), nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-google/vectorizer/texts_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/vectorizer/texts_test.go new file mode 100644 index 0000000000000000000000000000000000000000..0601d8a47a0d9e80c598d38f0ff321e95061788a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-google/vectorizer/texts_test.go @@ -0,0 +1,97 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// as used in the nearText searcher +func TestVectorizingTexts(t *testing.T) { + type testCase struct { + name string + input []string + expectedGoogleModel string + googleModel string + } + + tests := []testCase{ + { + name: "single word", + input: []string{"hello"}, + googleModel: DefaultModel, + expectedGoogleModel: DefaultModel, + }, + { + name: "multiple words", + input: []string{"hello world, this is me!"}, + googleModel: DefaultModel, + expectedGoogleModel: DefaultModel, + }, + { + name: "multiple sentences (joined with a dot)", + input: []string{"this is sentence 1", "and here's number 2"}, + googleModel: DefaultModel, + expectedGoogleModel: DefaultModel, + }, + { + name: "multiple sentences already containing a dot", + input: []string{"this is sentence 1.", "and here's number 2"}, + googleModel: DefaultModel, + expectedGoogleModel: DefaultModel, + }, + { + name: "multiple sentences already containing a question mark", + input: []string{"this is sentence 1?", "and here's number 2"}, + googleModel: DefaultModel, + expectedGoogleModel: DefaultModel, + }, + { + name: "multiple sentences already containing an exclamation mark", + input: []string{"this is sentence 1!", "and here's number 2"}, + googleModel: DefaultModel, + expectedGoogleModel: DefaultModel, + }, + { + name: "multiple sentences already containing comma", + input: []string{"this is sentence 1,", "and here's number 2"}, + googleModel: DefaultModel, + expectedGoogleModel: DefaultModel, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + client := &fakeClient{} + + v := New(client) + + cfg := &fakeClassConfig{ + apiEndpoint: "", + projectID: "", + endpointID: "", + modelID: test.googleModel, + } + vec, err := v.Texts(context.Background(), test.input, cfg) + + require.Nil(t, err) + assert.Equal(t, []float32{0.1, 1.1, 2.1, 3.1}, vec) + assert.Equal(t, test.input, client.lastInput) + assert.Equal(t, DefaultApiEndpoint, client.lastConfig.ApiEndpoint) + assert.Equal(t, test.googleModel, client.lastConfig.Model) + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/clients/gpt4all.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/clients/gpt4all.go new file mode 100644 index 0000000000000000000000000000000000000000..8fdd2bcdd5cd7d70fa822c2b8805787e36d3752c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/clients/gpt4all.go @@ -0,0 +1,99 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/text2vec-gpt4all/ent" +) + +type client struct { + origin string + httpClient *http.Client + logger logrus.FieldLogger +} + +func New(origin string, timeout time.Duration, logger logrus.FieldLogger) *client { + return &client{ + origin: origin, + httpClient: &http.Client{ + Timeout: timeout, + }, + logger: logger, + } +} + +func (c *client) Vectorize(ctx context.Context, text string) (*ent.VectorizationResult, error) { + body, err := json.Marshal(vecRequest{text}) + if err != nil { + return nil, errors.Wrapf(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", c.url("/vectorize"), + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + + res, err := c.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + var resBody vecResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if res.StatusCode != 200 { + if resBody.Error != "" { + return nil, errors.Errorf("failed with status: %d error: %v", res.StatusCode, resBody.Error) + } + return nil, errors.Errorf("failed with status: %d", res.StatusCode) + } + + return &ent.VectorizationResult{ + Vector: resBody.Vector, + Dimensions: resBody.Dim, + Text: resBody.Text, + }, nil +} + +func (c *client) url(path string) string { + return fmt.Sprintf("%s%s", c.origin, path) +} + +type vecRequest struct { + Text string `json:"text"` +} + +type vecResponse struct { + Text string `json:"text"` + Vector []float32 `json:"vector"` + Dim int `json:"dim"` + Error string `json:"error"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..838eaf3ac435f5b59e3a26dca36b670413b1c80c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/clients/meta.go @@ -0,0 +1,45 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + + "github.com/pkg/errors" +) + +func (c *client) MetaInfo() (map[string]interface{}, error) { + req, err := http.NewRequestWithContext(context.Background(), "GET", c.url("/meta"), nil) + if err != nil { + return nil, errors.Wrap(err, "create GET meta request") + } + + res, err := c.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send GET meta request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read meta response body") + } + + var resBody map[string]interface{} + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrap(err, "unmarshal meta response body") + } + return resBody, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/clients/meta_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/clients/meta_test.go new file mode 100644 index 0000000000000000000000000000000000000000..947b346acc033dffb198948310908047d4027d55 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/clients/meta_test.go @@ -0,0 +1,74 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestGetMeta(t *testing.T) { + t.Run("when the server is providing meta", func(t *testing.T) { + server := httptest.NewServer(&testMetaHandler{t: t}) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + meta, err := c.MetaInfo() + + assert.Nil(t, err) + assert.NotNil(t, meta) + metaModel := meta["name"] + require.NotNil(t, metaModel) + assert.Equal(t, "Bert", metaModel) + }) +} + +type testMetaHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testMetaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/meta", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.Write([]byte(f.metaInfo())) +} + +func (f *testMetaHandler) metaInfo() string { + return `{ + "description": "Sbert
  • For embeddings", + "disableGUI": "true", + "filename": "ggml-all-MiniLM-L6-v2-f16.bin", + "filesize": "45521167", + "md5sum": "031bb5d5722c08d13e3e8eaf55c37391", + "name": "Bert", + "order": "t", + "parameters": "1 million", + "path": "/Users/marcin/.cache/gpt4all/ggml-all-MiniLM-L6-v2-f16.bin", + "promptTemplate": "### Human: \n{0}\n### Assistant:\n", + "quant": "f16", + "ramrequired": "1", + "requires": "2.4.14", + "systemPrompt": "", + "type": "Bert" +}` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/clients/startup.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/clients/startup.go new file mode 100644 index 0000000000000000000000000000000000000000..d54894ba080084b7fb304e89f19c86678d91293b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/clients/startup.go @@ -0,0 +1,68 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "net/http" + "time" + + "github.com/pkg/errors" +) + +func (c *client) WaitForStartup(initCtx context.Context, + interval time.Duration, +) error { + t := time.NewTicker(interval) + defer t.Stop() + expired := initCtx.Done() + var lastErr error + for { + select { + case <-t.C: + lastErr = c.checkReady(initCtx) + if lastErr == nil { + return nil + } + c.logger. + WithField("action", "gpt4all_remote_wait_for_startup"). + WithError(lastErr).Warnf("gpt4all remote service not ready") + case <-expired: + return errors.Wrapf(lastErr, "init context expired before remote was ready") + } + } +} + +func (c *client) checkReady(initCtx context.Context) error { + // spawn a new context (derived on the overall context) which is used to + // consider an individual request timed out + requestCtx, cancel := context.WithTimeout(initCtx, 500*time.Millisecond) + defer cancel() + + req, err := http.NewRequestWithContext(requestCtx, http.MethodGet, + c.url("/.well-known/ready"), nil) + if err != nil { + return errors.Wrap(err, "create check ready request") + } + + res, err := c.httpClient.Do(req) + if err != nil { + return errors.Wrap(err, "send check ready request") + } + + defer res.Body.Close() + if res.StatusCode > 299 { + return errors.Errorf("not ready: status %d", res.StatusCode) + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/clients/startup_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/clients/startup_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ff65558b1e87f7f143c6c43afdaf422dd3829c34 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/clients/startup_test.go @@ -0,0 +1,98 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestWaitForStartup(t *testing.T) { + t.Run("when the server is immediately ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{t: t}) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + err := c.WaitForStartup(context.Background(), 50*time.Millisecond) + + assert.Nil(t, err) + }) + + t.Run("when the server is down", func(t *testing.T) { + c := New("http://nothing-running-at-this-url", 0, nullLogger()) + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 150*time.Millisecond) + + require.NotNil(t, err, nullLogger()) + assert.Contains(t, err.Error(), "expired before remote was ready") + }) + + t.Run("when the server is alive, but not ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(1 * time.Minute), + }) + c := New(server.URL, 0, nullLogger()) + defer server.Close() + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 50*time.Millisecond) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "expired before remote was ready") + }) + + t.Run("when the server is initially not ready, but then becomes ready", + func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(100 * time.Millisecond), + }) + c := New(server.URL, 0, nullLogger()) + defer server.Close() + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := c.WaitForStartup(ctx, 50*time.Millisecond) + + require.Nil(t, err) + }) +} + +type testReadyHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testReadyHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/.well-known/ready", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } + + w.WriteHeader(http.StatusNoContent) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/config.go new file mode 100644 index 0000000000000000000000000000000000000000..4f8d66290a018aaf6b835956e8cfebf3118877d2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/config.go @@ -0,0 +1,39 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modtransformers + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *GPT4AllModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GPT4AllModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{} +} + +func (m *GPT4AllModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + return nil +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/ent/vectorization_result.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/ent/vectorization_result.go new file mode 100644 index 0000000000000000000000000000000000000000..ba40dffb40d1f2d0bc1507ba7a839452c9485986 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/ent/vectorization_result.go @@ -0,0 +1,18 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type VectorizationResult struct { + Text string + Dimensions int + Vector []float32 +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/module.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/module.go new file mode 100644 index 0000000000000000000000000000000000000000..ecbb8cb0cd92fcb3cb1003974af8e2a64fa053ea --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/module.go @@ -0,0 +1,155 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modtransformers + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + entcfg "github.com/weaviate/weaviate/entities/config" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/text2vec-gpt4all/clients" + "github.com/weaviate/weaviate/modules/text2vec-gpt4all/vectorizer" + "github.com/weaviate/weaviate/usecases/modulecomponents/additional" + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + "github.com/weaviate/weaviate/usecases/modulecomponents/text2vecbase" +) + +const Name = "text2vec-gpt4all" + +func New() *GPT4AllModule { + return &GPT4AllModule{} +} + +type GPT4AllModule struct { + vectorizer text2vecbase.TextVectorizer[[]float32] + metaProvider text2vecbase.MetaProvider + graphqlProvider modulecapabilities.GraphQLArguments + searcher modulecapabilities.Searcher[[]float32] + nearTextTransformer modulecapabilities.TextTransform + logger logrus.FieldLogger + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +func (m *GPT4AllModule) Name() string { + return Name +} + +func (m *GPT4AllModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2Vec +} + +func (m *GPT4AllModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, m.logger); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initAdditionalPropertiesProvider(); err != nil { + return errors.Wrap(err, "init additional properties provider") + } + + return nil +} + +func (m *GPT4AllModule) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init graphql provider") + } + return nil +} + +func (m *GPT4AllModule) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + uri := os.Getenv("GPT4ALL_INFERENCE_API") + if uri == "" { + return errors.New("required variable GPT4ALL_INFERENCE_API is not set") + } + + waitForStartup := true + if envWaitForStartup := os.Getenv("GPT4ALL_WAIT_FOR_STARTUP"); envWaitForStartup != "" { + waitForStartup = entcfg.Enabled(envWaitForStartup) + } + + client := clients.New(uri, timeout, logger) + if waitForStartup { + if err := client.WaitForStartup(ctx, 1*time.Second); err != nil { + return errors.Wrap(err, "init remote vectorizer") + } + } + + m.vectorizer = vectorizer.New(client) + m.metaProvider = client + + return nil +} + +func (m *GPT4AllModule) initAdditionalPropertiesProvider() error { + m.additionalPropertiesProvider = additional.NewText2VecProvider() + return nil +} + +func (m *GPT4AllModule) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + return m.vectorizer.Object(ctx, obj, cfg) +} + +func (m *GPT4AllModule) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + return batch.VectorizeBatch(ctx, objs, skipObject, cfg, m.logger, m.vectorizer.Object) +} + +func (m *GPT4AllModule) MetaInfo() (map[string]interface{}, error) { + return m.metaProvider.MetaInfo() +} + +func (m *GPT4AllModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +func (m *GPT4AllModule) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + return true, nil, nil +} + +func (m *GPT4AllModule) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + return m.vectorizer.Texts(ctx, []string{input}, cfg) +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.MetaProvider(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/nearText.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/nearText.go new file mode 100644 index 0000000000000000000000000000000000000000..f204516a2d262bcd57c2a9bafcc3ce93def0f4bf --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/nearText.go @@ -0,0 +1,36 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modtransformers + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +func (m *GPT4AllModule) initNearText() error { + m.searcher = nearText.NewSearcher(m.vectorizer) + m.graphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *GPT4AllModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return m.graphqlProvider.Arguments() +} + +func (m *GPT4AllModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + return m.searcher.VectorSearches() +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/vectorizer/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/vectorizer/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..a370da024230a257598f99c8e1db06bb6d4dd5b4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/vectorizer/class_settings.go @@ -0,0 +1,37 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + DefaultPropertyIndexed = true + DefaultVectorizeClassName = true + DefaultVectorizePropertyName = false +) + +type classSettings struct { + basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, BaseClassSettings: *basesettings.NewBaseClassSettings(cfg, false)} +} + +func (cs *classSettings) Validate(class *models.Class) error { + return cs.BaseClassSettings.ValidateClassSettings() +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/vectorizer/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/vectorizer/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..7d6c876ce2d32fed8ad030275b88859eb8dc24e9 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/vectorizer/class_settings_test.go @@ -0,0 +1,106 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/usecases/modules" +) + +func TestClassSettings(t *testing.T) { + t.Run("with all defaults", func(t *testing.T) { + class := &models.Class{ + Class: "MyClass", + Properties: []*models.Property{{ + Name: "someProp", + }}, + } + + cfg := modules.NewClassBasedModuleConfig(class, "my-module", "tenant", "", nil) + ic := NewClassSettings(cfg) + + assert.True(t, ic.PropertyIndexed("someProp")) + assert.False(t, ic.VectorizePropertyName("someProp")) + assert.True(t, ic.VectorizeClassName()) + }) + + t.Run("with a nil config", func(t *testing.T) { + // this is the case if we were running in a situation such as a + // cross-class vectorization of search time, as is the case with Explore + // {}, we then expect all default values + + ic := NewClassSettings(nil) + + assert.True(t, ic.PropertyIndexed("someProp")) + assert.False(t, ic.VectorizePropertyName("someProp")) + assert.True(t, ic.VectorizeClassName()) + }) + + t.Run("with all explicit config matching the defaults", func(t *testing.T) { + class := &models.Class{ + Class: "MyClass", + ModuleConfig: map[string]interface{}{ + "my-module": map[string]interface{}{ + "vectorizeClassName": true, + "poolingStrategy": "masked_mean", + }, + }, + Properties: []*models.Property{{ + Name: "someProp", + ModuleConfig: map[string]interface{}{ + "my-module": map[string]interface{}{ + "skip": false, + "vectorizePropertyName": false, + }, + }, + }}, + } + + cfg := modules.NewClassBasedModuleConfig(class, "my-module", "tenant", "", nil) + ic := NewClassSettings(cfg) + + assert.True(t, ic.PropertyIndexed("someProp")) + assert.False(t, ic.VectorizePropertyName("someProp")) + assert.True(t, ic.VectorizeClassName()) + }) + + t.Run("with all explicit config using non-default values", func(t *testing.T) { + class := &models.Class{ + Class: "MyClass", + ModuleConfig: map[string]interface{}{ + "my-module": map[string]interface{}{ + "vectorizeClassName": false, + "poolingStrategy": "cls", + }, + }, + Properties: []*models.Property{{ + Name: "someProp", + ModuleConfig: map[string]interface{}{ + "my-module": map[string]interface{}{ + "skip": true, + "vectorizePropertyName": true, + }, + }, + }}, + } + + cfg := modules.NewClassBasedModuleConfig(class, "my-module", "tenant", "", nil) + ic := NewClassSettings(cfg) + + assert.False(t, ic.PropertyIndexed("someProp")) + assert.True(t, ic.VectorizePropertyName("someProp")) + assert.False(t, ic.VectorizeClassName()) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/vectorizer/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/vectorizer/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..d41ad3f3c1ca8fef01279d85ffb9f4861f4904ed --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/vectorizer/fakes_for_test.go @@ -0,0 +1,89 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/text2vec-gpt4all/ent" + "github.com/weaviate/weaviate/usecases/config" +) + +type fakeClient struct { + lastInput string +} + +func (c *fakeClient) Vectorize(ctx context.Context, + text string, +) (*ent.VectorizationResult, error) { + c.lastInput = text + return &ent.VectorizationResult{ + Vector: []float32{0, 1, 2, 3}, + Dimensions: 4, + Text: text, + }, nil +} + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizeClassName bool + vectorizePropertyName bool + skippedProperty string + excludedProperty string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + classSettings := map[string]interface{}{ + "vectorizeClassName": f.vectorizeClassName, + } + return classSettings +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/vectorizer/objects.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/vectorizer/objects.go new file mode 100644 index 0000000000000000000000000000000000000000..fc7c94ee598db569d32d6f756b5b0504684faab8 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/vectorizer/objects.go @@ -0,0 +1,60 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/text2vec-gpt4all/ent" + objectsvectorizer "github.com/weaviate/weaviate/usecases/modulecomponents/vectorizer" +) + +type Vectorizer struct { + client Client + objectVectorizer *objectsvectorizer.ObjectVectorizer +} + +func New(client Client) *Vectorizer { + return &Vectorizer{ + client: client, + objectVectorizer: objectsvectorizer.New(), + } +} + +type Client interface { + Vectorize(ctx context.Context, text string) (*ent.VectorizationResult, error) +} + +type ClassSettings interface { + PropertyIndexed(property string) bool + VectorizeClassName() bool + VectorizePropertyName(propertyName string) bool +} + +func (v *Vectorizer) Object(ctx context.Context, object *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + vec, err := v.object(ctx, object, cfg) + return vec, nil, err +} + +func (v *Vectorizer) object(ctx context.Context, object *models.Object, cfg moduletools.ClassConfig, +) ([]float32, error) { + text := v.objectVectorizer.Texts(ctx, object, NewClassSettings(cfg)) + res, err := v.client.Vectorize(ctx, text) + if err != nil { + return nil, err + } + + return res.Vector, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/vectorizer/texts.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/vectorizer/texts.go new file mode 100644 index 0000000000000000000000000000000000000000..5d4f4d326180e55acc9a8aa32932726ac1ddbe33 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/vectorizer/texts.go @@ -0,0 +1,34 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/moduletools" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +func (v *Vectorizer) Texts(ctx context.Context, inputs []string, + cfg moduletools.ClassConfig, +) ([]float32, error) { + vectors := make([][]float32, len(inputs)) + for i := range inputs { + res, err := v.client.Vectorize(ctx, inputs[i]) + if err != nil { + return nil, errors.Wrap(err, "remote client vectorize") + } + vectors[i] = res.Vector + } + return libvectorizer.CombineVectors(vectors), nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/vectorizer/texts_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/vectorizer/texts_test.go new file mode 100644 index 0000000000000000000000000000000000000000..a0631f94717391290cc4deedb108fcfbb8b6b299 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-gpt4all/vectorizer/texts_test.go @@ -0,0 +1,63 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// as used in the nearText searcher +func TestVectorizingTexts(t *testing.T) { + type testCase struct { + name string + input []string + expectedClientCall string + } + + tests := []testCase{ + { + name: "single word", + input: []string{"hello"}, + expectedClientCall: "hello", + }, + { + name: "multiple words", + input: []string{"hello world, this is me!"}, + expectedClientCall: "hello world, this is me!", + }, + + { + name: "multiple sentences", + input: []string{"this is sentence 1", "and here's number 2"}, + expectedClientCall: "and here's number 2", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + client := &fakeClient{} + + v := New(client) + + settings := &fakeClassConfig{} + vec, err := v.Texts(context.Background(), test.input, settings) + + require.Nil(t, err) + assert.Equal(t, []float32{0, 1, 2, 3}, vec) + assert.Equal(t, test.expectedClientCall, client.lastInput) + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/clients/bert_embeddings.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/clients/bert_embeddings.go new file mode 100644 index 0000000000000000000000000000000000000000..ae2a6cdc08417b76bfcca37b3e57cef9bbb88a33 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/clients/bert_embeddings.go @@ -0,0 +1,41 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import "errors" + +type bertEmbeddingsDecoder struct{} + +func newBertEmbeddingsDecoder() *bertEmbeddingsDecoder { + return &bertEmbeddingsDecoder{} +} + +func (d bertEmbeddingsDecoder) calculateVector(embeddings [][]float32) ([]float32, error) { + if len(embeddings) > 0 { + vectorLen := len(embeddings[0]) + sumEmbeddings := make([]float32, vectorLen) + embeddingsLen := len(embeddings) + var sum float32 + for i := 0; i < vectorLen; i++ { + sum = 0 + for j := 0; j < embeddingsLen; j++ { + sum += embeddings[j][i] + } + sumEmbeddings[i] = sum + } + for i := range sumEmbeddings { + sumEmbeddings[i] = sumEmbeddings[i] / float32(embeddingsLen) + } + return sumEmbeddings, nil + } + return nil, errors.New("missing embeddings") +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/clients/bert_embeddings_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/clients/bert_embeddings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ee757a2ed292413031625214d3bd088c5db49b62 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/clients/bert_embeddings_test.go @@ -0,0 +1,64 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "reflect" + "testing" +) + +func Test_bertEmbeddingsDecoder_calculateVector(t *testing.T) { + tests := []struct { + name string + embeddings [][]float32 + want []float32 + wantErr bool + }{ + { + name: "nil", + embeddings: nil, + wantErr: true, + }, + { + name: "empty", + embeddings: [][]float32{}, + wantErr: true, + }, + { + name: "just one vector", + embeddings: [][]float32{{-0.17978577315807343}}, + want: []float32{-0.17978577315807343}, + }, + { + name: "distilbert-base-uncased", + embeddings: [][]float32{ + {-0.17978577315807343, -0.0678672045469284, 0.1706605851650238, -0.1639413982629776, -0.12804915010929108, 0.017568372189998627, 0.1610901951789856, 0.19909054040908813, -0.26103103160858154, -0.14505508542060852}, + {-0.25516796112060547, -0.054695576429367065, 0.13527897000312805, -0.3919253945350647, 0.1900954395532608, 0.5994636416435242, 0.5798457264900208, 0.6522972583770752, -0.08617493510246277, -0.35053199529647827}, + {0.930827260017395, 0.3315476179122925, -0.323006272315979, 0.18198077380657196, -0.3299236297607422, -0.5998684763908386, 0.3299814462661743, -0.6352149844169617, 0.5154204368591309, 0.11740084737539291}, + }, + want: []float32{0.1652911752462387, 0.06966160982847214, -0.005688905715942383, -0.12462866306304932, -0.08929244428873062, 0.005721171852201223, 0.35697245597839355, 0.07205760478973389, 0.05607149004936218, -0.1260620802640915}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + d := bertEmbeddingsDecoder{} + got, err := d.calculateVector(tt.embeddings) + if (err != nil) != tt.wantErr { + t.Errorf("bertEmbeddingsDecoder.calculateVector() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("bertEmbeddingsDecoder.calculateVector() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/clients/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/clients/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..66b3424d989ced1b66c11b705780a757ec395d48 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/clients/fakes_for_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizePropertyName bool + skippedProperty string + excludedProperty string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/clients/huggingface.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/clients/huggingface.go new file mode 100644 index 0000000000000000000000000000000000000000..4b9bd5bcb4ea0f1e3381d3aa4a3390032f9679d6 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/clients/huggingface.go @@ -0,0 +1,293 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "crypto/sha256" + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "github.com/weaviate/weaviate/entities/moduletools" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/text2vec-huggingface/ent" +) + +const ( + DefaultOrigin = "https://router.huggingface.co/hf-inference/models" + DefaultPath = "pipeline/feature-extraction" +) + +// there are no explicit rate limits: https://huggingface.co/docs/inference-providers/providers/hf-inference#feature-extraction +// so we set values that work and leave it up to the users to increase these values +const ( + DefaultRPM = 100 // + DefaultTPM = 10000000 // no token limit +) + +type embeddingsRequest struct { + Inputs []string `json:"inputs"` + Options *options `json:"options,omitempty"` +} + +type options struct { + WaitForModel bool `json:"wait_for_model,omitempty"` + UseGPU bool `json:"use_gpu,omitempty"` + UseCache bool `json:"use_cache,omitempty"` +} + +type embedding [][]float32 + +type embeddingBert [][][][]float32 + +type embeddingObject struct { + Embeddings embedding `json:"embeddings"` +} + +type huggingFaceApiError struct { + Error string `json:"error"` + EstimatedTime *float32 `json:"estimated_time,omitempty"` + Warnings []string `json:"warnings"` +} + +type vectorizer struct { + apiKey string + httpClient *http.Client + bertEmbeddingsDecoder *bertEmbeddingsDecoder + logger logrus.FieldLogger +} + +func New(apiKey string, timeout time.Duration, logger logrus.FieldLogger) *vectorizer { + return &vectorizer{ + apiKey: apiKey, + httpClient: &http.Client{ + Timeout: timeout, + }, + bertEmbeddingsDecoder: newBertEmbeddingsDecoder(), + logger: logger, + } +} + +func (v *vectorizer) Vectorize(ctx context.Context, input []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], *modulecomponents.RateLimits, int, error) { + config := v.getVectorizationConfig(cfg) + res, err := v.vectorize(ctx, v.getURL(config), input, v.getOptions(config)) + return res, nil, 0, err +} + +func (v *vectorizer) VectorizeQuery(ctx context.Context, input []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], error) { + config := v.getVectorizationConfig(cfg) + return v.vectorize(ctx, v.getURL(config), input, v.getOptions(config)) +} + +func (v *vectorizer) getVectorizationConfig(cfg moduletools.ClassConfig) ent.VectorizationConfig { + icheck := ent.NewClassSettings(cfg) + return ent.VectorizationConfig{ + EndpointURL: icheck.EndpointURL(), + Model: icheck.PassageModel(), + WaitForModel: icheck.OptionWaitForModel(), + UseGPU: icheck.OptionUseGPU(), + UseCache: icheck.OptionUseCache(), + } +} + +func (v *vectorizer) vectorize(ctx context.Context, url string, + input []string, options options, +) (*modulecomponents.VectorizationResult[[]float32], error) { + body, err := json.Marshal(embeddingsRequest{ + Inputs: input, + Options: &options, + }) + if err != nil { + return nil, errors.Wrapf(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", url, + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + if apiKey, err := v.getApiKey(ctx); apiKey != "" && err == nil { + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", apiKey)) + } + req.Header.Add("Content-Type", "application/json") + + res, err := v.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + if err := checkResponse(res, bodyBytes); err != nil { + return nil, err + } + + vector, errs, err := v.decodeVector(bodyBytes) + if err != nil { + return nil, errors.Wrap(err, "cannot decode vector") + } + + return &modulecomponents.VectorizationResult[[]float32]{ + Text: input, + Dimensions: len(vector[0]), + Vector: vector, + Errors: errs, + }, nil +} + +func checkResponse(res *http.Response, bodyBytes []byte) error { + if res.StatusCode < 400 { + return nil + } + + var resBody huggingFaceApiError + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return fmt.Errorf("unmarshal error response body: %v", string(bodyBytes)) + } + + message := fmt.Sprintf("failed with status: %d", res.StatusCode) + if resBody.Error != "" { + message = fmt.Sprintf("%s error: %v", message, resBody.Error) + if resBody.EstimatedTime != nil { + message = fmt.Sprintf("%s estimated time: %v", message, *resBody.EstimatedTime) + } + if len(resBody.Warnings) > 0 { + message = fmt.Sprintf("%s warnings: %v", message, resBody.Warnings) + } + } + + if res.StatusCode == http.StatusInternalServerError { + message = fmt.Sprintf("connection to HuggingFace %v", message) + } + + return errors.New(message) +} + +func (v *vectorizer) decodeVector(bodyBytes []byte) ([][]float32, []error, error) { + var emb embedding + if err := json.Unmarshal(bodyBytes, &emb); err != nil { + var embObject embeddingObject + if err := json.Unmarshal(bodyBytes, &embObject); err != nil { + var embBert embeddingBert + if err := json.Unmarshal(bodyBytes, &embBert); err != nil { + return nil, nil, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if len(embBert) == 1 && len(embBert[0]) > 0 { + vectors := make([][]float32, len(embBert[0])) + errs := make([]error, len(embBert[0])) + for i, embBer := range embBert[0] { + vectors[i], errs[i] = v.bertEmbeddingsDecoder.calculateVector(embBer) + } + return vectors, errs, nil + } + + return nil, nil, errors.New("unprocessable response body") + } + if len(embObject.Embeddings) > 0 { + return embObject.Embeddings, nil, nil + } + + return nil, nil, errors.New("unprocessable response body") + } + + if len(emb) > 0 { + return emb, nil, nil + } + + return nil, nil, errors.New("unprocessable response body") +} + +func (v *vectorizer) GetApiKeyHash(ctx context.Context, config moduletools.ClassConfig) [32]byte { + key, err := v.getApiKey(ctx) + if err != nil { + return [32]byte{} + } + return sha256.Sum256([]byte(key)) +} + +func (v *vectorizer) GetVectorizerRateLimit(ctx context.Context, cfg moduletools.ClassConfig) *modulecomponents.RateLimits { + rpm, _ := modulecomponents.GetRateLimitFromContext(ctx, "Cohere", DefaultRPM, 0) + + execAfterRequestFunction := func(limits *modulecomponents.RateLimits, tokensUsed int, deductRequest bool) { + // refresh is after 60 seconds but leave a bit of room for errors. Otherwise, we only deduct the request that just happened + if limits.LastOverwrite.Add(61 * time.Second).After(time.Now()) { + if deductRequest { + limits.RemainingRequests -= 1 + } + return + } + + limits.RemainingRequests = rpm + limits.ResetRequests = time.Now().Add(time.Duration(61) * time.Second) + limits.LimitRequests = rpm + limits.LastOverwrite = time.Now() + + // high dummy values + limits.RemainingTokens = DefaultTPM + limits.LimitTokens = DefaultTPM + limits.ResetTokens = time.Now().Add(time.Duration(1) * time.Second) + } + + initialRL := &modulecomponents.RateLimits{AfterRequestFunction: execAfterRequestFunction, LastOverwrite: time.Now().Add(-61 * time.Minute)} + initialRL.ResetAfterRequestFunction(0) // set initial values + + return initialRL +} + +func (v *vectorizer) getApiKey(ctx context.Context) (string, error) { + if apiKey := modulecomponents.GetValueFromContext(ctx, "X-Huggingface-Api-Key"); apiKey != "" { + return apiKey, nil + } + if v.apiKey != "" { + return v.apiKey, nil + } + return "", errors.New("no api key found " + + "neither in request header: X-Huggingface-Api-Key " + + "nor in environment variable under HUGGINGFACE_APIKEY") +} + +func (v *vectorizer) getOptions(config ent.VectorizationConfig) options { + return options{ + WaitForModel: config.WaitForModel, + UseGPU: config.UseGPU, + UseCache: config.UseCache, + } +} + +func (v *vectorizer) getURL(config ent.VectorizationConfig) string { + if config.EndpointURL != "" { + return config.EndpointURL + } + + return fmt.Sprintf("%s/%s/%s", DefaultOrigin, config.Model, DefaultPath) +} + +func (v *vectorizer) HasTokenLimit() bool { return false } + +func (v *vectorizer) ReturnsRateLimit() bool { return false } diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/clients/huggingface_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/clients/huggingface_test.go new file mode 100644 index 0000000000000000000000000000000000000000..c73f16e86989d634e700610c255884b2269db63c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/clients/huggingface_test.go @@ -0,0 +1,278 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/modules/text2vec-huggingface/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents" +) + +func TestClient(t *testing.T) { + t.Run("when all is fine", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + apiKey: "apiKey", + httpClient: &http.Client{}, + logger: nullLogger(), + } + expected := &modulecomponents.VectorizationResult[[]float32]{ + Text: []string{"This is my text"}, + Vector: [][]float32{{0.1, 0.2, 0.3}}, + Dimensions: 3, + } + res, _, _, err := c.Vectorize(context.Background(), []string{"This is my text"}, + fakeClassConfig{classConfig: map[string]interface{}{ + "Model": "sentence-transformers/gtr-t5-xxl", + "endpointURL": server.URL, + "WaitForModel": false, + "UseGPU": false, + "UseCache": true, + }}) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when the context is expired", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + apiKey: "apiKey", + httpClient: &http.Client{}, + logger: nullLogger(), + } + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + _, _, _, err := c.Vectorize(ctx, []string{"This is my text"}, + fakeClassConfig{classConfig: map[string]interface{}{"endpointURL": server.URL}}) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "context deadline exceeded") + }) + + t.Run("when the server returns an error", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{ + t: t, + serverError: errors.Errorf("nope, not gonna happen"), + }) + defer server.Close() + c := &vectorizer{ + apiKey: "apiKey", + httpClient: &http.Client{}, + logger: nullLogger(), + } + _, _, _, err := c.Vectorize(context.Background(), []string{"This is my text"}, + fakeClassConfig{classConfig: map[string]interface{}{"endpointURL": server.URL}}) + + require.NotNil(t, err) + assert.Equal(t, err.Error(), "connection to HuggingFace failed with status: 500 error: nope, not gonna happen estimated time: 20") + }) + + t.Run("when HuggingFace key is passed using X-Huggingface-Api-Key header", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + apiKey: "", + httpClient: &http.Client{}, + logger: nullLogger(), + } + ctxWithValue := context.WithValue(context.Background(), + "X-Huggingface-Api-Key", []string{"some-key"}) + + expected := &modulecomponents.VectorizationResult[[]float32]{ + Text: []string{"This is my text"}, + Vector: [][]float32{{0.1, 0.2, 0.3}}, + Dimensions: 3, + } + + res, _, _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, + fakeClassConfig{classConfig: map[string]interface{}{ + "Model": "sentence-transformers/gtr-t5-xxl", + "endpointURL": server.URL, + "WaitForModel": true, + "UseGPU": false, + "UseCache": true, + }}) + + require.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when a request requires an API KEY", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{ + t: t, + serverError: errors.Errorf("A valid user or organization token is required"), + }) + defer server.Close() + c := &vectorizer{ + apiKey: "", + httpClient: &http.Client{}, + logger: nullLogger(), + } + ctxWithValue := context.WithValue(context.Background(), + "X-Huggingface-Api-Key", []string{""}) + + _, _, _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, + fakeClassConfig{classConfig: map[string]interface{}{ + "Model": "sentence-transformers/gtr-t5-xxl", + "endpointURL": server.URL, + }}) + require.NotNil(t, err) + assert.Equal(t, err.Error(), "failed with status: 401 error: A valid user or organization token is required") + }) + + t.Run("when the server returns an error with warnings", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{ + t: t, + serverError: errors.Errorf("with warnings"), + }) + defer server.Close() + c := &vectorizer{ + apiKey: "apiKey", + httpClient: &http.Client{}, + logger: nullLogger(), + } + _, _, _, err := c.Vectorize(context.Background(), []string{"This is my text"}, + fakeClassConfig{classConfig: map[string]interface{}{"endpointURL": server.URL}}) + + require.NotNil(t, err) + assert.Equal(t, err.Error(), "connection to HuggingFace failed with status: 500 error: with warnings "+ + "warnings: [There was an inference error: CUDA error: all CUDA-capable devices are busy or unavailable\n"+ + "CUDA kernel errors might be asynchronously reported at some other API call,so the stacktrace below might be incorrect.\n"+ + "For debugging consider passing CUDA_LAUNCH_BLOCKING=1.]") + }) +} + +type fakeHandler struct { + t *testing.T + serverError error +} + +func (f *fakeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.serverError != nil { + switch f.serverError.Error() { + case "with warnings": + embeddingError := map[string]interface{}{ + "error": f.serverError.Error(), + "warnings": []string{ + "There was an inference error: CUDA error: all CUDA-capable devices are busy or unavailable\n" + + "CUDA kernel errors might be asynchronously reported at some other API call,so the stacktrace below might be incorrect.\n" + + "For debugging consider passing CUDA_LAUNCH_BLOCKING=1.", + }, + } + outBytes, err := json.Marshal(embeddingError) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + case "A valid user or organization token is required": + embeddingError := map[string]interface{}{ + "error": "A valid user or organization token is required", + } + outBytes, err := json.Marshal(embeddingError) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusUnauthorized) + w.Write(outBytes) + return + default: + embeddingError := map[string]interface{}{ + "error": f.serverError.Error(), + "estimated_time": 20.0, + } + outBytes, err := json.Marshal(embeddingError) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b map[string]interface{} + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + textInputs := b["inputs"].([]interface{}) + assert.Greater(f.t, len(textInputs), 0) + textInput := textInputs[0].(string) + assert.Greater(f.t, len(textInput), 0) + + // TODO: fix this + embedding := [][]float32{{0.1, 0.2, 0.3}} + outBytes, err := json.Marshal(embedding) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} + +func Test_getURL(t *testing.T) { + v := &vectorizer{} + + tests := []struct { + name string + config ent.VectorizationConfig + want string + }{ + { + name: "Facebook DPR model", + config: ent.VectorizationConfig{ + Model: "sentence-transformers/facebook-dpr-ctx_encoder-multiset-base", + }, + want: "https://router.huggingface.co/hf-inference/models/sentence-transformers/facebook-dpr-ctx_encoder-multiset-base/pipeline/feature-extraction", + }, + { + name: "BERT base model (uncased)", + config: ent.VectorizationConfig{ + Model: "bert-base-uncased", + }, + want: "https://router.huggingface.co/hf-inference/models/bert-base-uncased/pipeline/feature-extraction", + }, + { + name: "BERT base model (uncased)", + config: ent.VectorizationConfig{ + EndpointURL: "https://self-hosted-instance.com/bert-base-uncased", + }, + want: "https://self-hosted-instance.com/bert-base-uncased", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, v.getURL(tt.config)) + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..26b4e97ed3acbf1ea4d23f746f92b2d5493bbf34 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/clients/meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *vectorizer) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Hugging Face Module", + "documentationHref": "https://huggingface.co/docs/api-inference/detailed_parameters#feature-extraction-task", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/config.go new file mode 100644 index 0000000000000000000000000000000000000000..121c713a6c459a0b863218d315cf1198c18c9627 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/config.go @@ -0,0 +1,51 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modhuggingface + +import ( + "context" + + "github.com/weaviate/weaviate/modules/text2vec-huggingface/ent" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *HuggingFaceModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{ + "vectorizeClassName": ent.DefaultVectorizeClassName, + "model": ent.DefaultHuggingFaceModel, + "waitForModel": ent.DefaultOptionWaitForModel, + "useGPU": ent.DefaultOptionUseGPU, + "useCache": ent.DefaultOptionUseCache, + } +} + +func (m *HuggingFaceModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{ + "skip": !ent.DefaultPropertyIndexed, + "vectorizePropertyName": ent.DefaultVectorizePropertyName, + } +} + +func (m *HuggingFaceModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := ent.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/ent/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/ent/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..b990dea49464be57ee27d6b16ec69b13bde1eddb --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/ent/class_settings.go @@ -0,0 +1,134 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + // Default values for model, WaitForModel, useGPU and useCache cannot be changed before we solve how old classes + // that have the defaults NOT set will handle the change + DefaultHuggingFaceModel = "sentence-transformers/msmarco-bert-base-dot-v5" + DefaultOptionWaitForModel = false + DefaultOptionUseGPU = false + DefaultOptionUseCache = true + DefaultVectorizeClassName = true + DefaultPropertyIndexed = true + DefaultVectorizePropertyName = false + LowerCaseInput = false +) + +type classSettings struct { + basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, BaseClassSettings: *basesettings.NewBaseClassSettingsWithCustomModel(cfg, LowerCaseInput, "passageModel")} +} + +func (cs *classSettings) EndpointURL() string { + return cs.getEndpointURL() +} + +func (cs *classSettings) PassageModel() string { + model := cs.getPassageModel() + if model == "" { + return DefaultHuggingFaceModel + } + return model +} + +func (cs *classSettings) OptionWaitForModel() bool { + return cs.getOptionOrDefault("waitForModel", DefaultOptionWaitForModel) +} + +func (cs *classSettings) OptionUseGPU() bool { + return cs.getOptionOrDefault("useGPU", DefaultOptionUseGPU) +} + +func (cs *classSettings) OptionUseCache() bool { + return cs.getOptionOrDefault("useCache", DefaultOptionUseCache) +} + +func (cs *classSettings) Validate(class *models.Class) error { + if err := cs.BaseClassSettings.Validate(class); err != nil { + return err + } + + endpointURL := cs.getEndpointURL() + if endpointURL != "" { + // endpoint is set, should be used for feature extraction + // all other settings are not relevant + return nil + } + + model := cs.getProperty("model") + passageModel := cs.getProperty("passageModel") + + if model != "" && passageModel != "" { + return errors.New("only one setting must be set either 'model' or 'passageModel'") + } + + return nil +} + +func (cs *classSettings) getPassageModel() string { + model := cs.getProperty("model") + if model == "" { + model = cs.getProperty("passageModel") + } + return model +} + +func (cs *classSettings) getEndpointURL() string { + endpointURL := cs.getProperty("endpointUrl") + if endpointURL == "" { + endpointURL = cs.getProperty("endpointURL") + } + return endpointURL +} + +func (cs *classSettings) getOption(option string) *bool { + if cs.cfg != nil { + options, ok := cs.cfg.Class()["options"] + if ok { + asMap, ok := options.(map[string]interface{}) + if ok { + option, ok := asMap[option] + if ok { + asBool, ok := option.(bool) + if ok { + return &asBool + } + } + } + } + } + return nil +} + +func (cs *classSettings) getOptionOrDefault(option string, defaultValue bool) bool { + optionValue := cs.getOption(option) + if optionValue != nil { + return *optionValue + } + return defaultValue +} + +func (cs *classSettings) getProperty(name string) string { + return cs.BaseClassSettings.GetPropertyAsString(name, "") +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/ent/vectorization_config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/ent/vectorization_config.go new file mode 100644 index 0000000000000000000000000000000000000000..aafaa416d9da180c828cc0673debc0f9c635d660 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/ent/vectorization_config.go @@ -0,0 +1,18 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type VectorizationConfig struct { + EndpointURL string + Model string + WaitForModel, UseGPU, UseCache bool +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/module.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/module.go new file mode 100644 index 0000000000000000000000000000000000000000..2e25926917e442217c1f379350e5954bb56f8dd6 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/module.go @@ -0,0 +1,156 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modhuggingface + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/text2vec-huggingface/clients" + "github.com/weaviate/weaviate/modules/text2vec-huggingface/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents/additional" + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + "github.com/weaviate/weaviate/usecases/modulecomponents/text2vecbase" +) + +const Name = "text2vec-huggingface" + +var batchSettings = batch.Settings{ + TokenMultiplier: 0, // no token limit + MaxTimePerBatch: float64(10), + MaxObjectsPerBatch: 100, + MaxTokensPerBatch: func(cfg moduletools.ClassConfig) int { return 500000 }, // there does not seem to be a limit + HasTokenLimit: false, + ReturnsRateLimit: false, +} + +func New() *HuggingFaceModule { + return &HuggingFaceModule{} +} + +type HuggingFaceModule struct { + vectorizer text2vecbase.TextVectorizerBatch[[]float32] + metaProvider text2vecbase.MetaProvider + graphqlProvider modulecapabilities.GraphQLArguments + searcher modulecapabilities.Searcher[[]float32] + nearTextTransformer modulecapabilities.TextTransform + logger logrus.FieldLogger + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +func (m *HuggingFaceModule) Name() string { + return Name +} + +func (m *HuggingFaceModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2ManyVec +} + +func (m *HuggingFaceModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, m.logger); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initAdditionalPropertiesProvider(); err != nil { + return errors.Wrap(err, "init additional properties provider") + } + + return nil +} + +func (m *HuggingFaceModule) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init graphql provider") + } + return nil +} + +func (m *HuggingFaceModule) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("HUGGINGFACE_APIKEY") + client := clients.New(apiKey, timeout, logger) + + m.vectorizer = text2vecbase.New(client, + batch.NewBatchVectorizer(client, 50*time.Second, batchSettings, logger, m.Name()), + batch.ReturnBatchTokenizer(batchSettings.TokenMultiplier, m.Name(), ent.LowerCaseInput), + ) + m.metaProvider = client + + return nil +} + +func (m *HuggingFaceModule) initAdditionalPropertiesProvider() error { + m.additionalPropertiesProvider = additional.NewText2VecProvider() + return nil +} + +func (m *HuggingFaceModule) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + return m.vectorizer.Object(ctx, obj, cfg, ent.NewClassSettings(cfg)) +} + +func (m *HuggingFaceModule) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + return true, nil, nil +} + +func (m *HuggingFaceModule) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + vecs, errs := m.vectorizer.ObjectBatch(ctx, objs, skipObject, cfg) + return vecs, nil, errs +} + +func (m *HuggingFaceModule) MetaInfo() (map[string]interface{}, error) { + return m.metaProvider.MetaInfo() +} + +func (m *HuggingFaceModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +func (m *HuggingFaceModule) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + return m.vectorizer.Texts(ctx, []string{input}, cfg) +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.Searcher[[]float32](New()) + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.InputVectorizer[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/nearText.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/nearText.go new file mode 100644 index 0000000000000000000000000000000000000000..30b1cad7f213ed8ed60e37c40a5d3d3282301690 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-huggingface/nearText.go @@ -0,0 +1,36 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modhuggingface + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +func (m *HuggingFaceModule) initNearText() error { + m.searcher = nearText.NewSearcher(m.vectorizer) + m.graphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *HuggingFaceModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return m.graphqlProvider.Arguments() +} + +func (m *HuggingFaceModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + return m.searcher.VectorSearches() +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/clients/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/clients/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..66b3424d989ced1b66c11b705780a757ec395d48 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/clients/fakes_for_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizePropertyName bool + skippedProperty string + excludedProperty string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/clients/jinaai.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/clients/jinaai.go new file mode 100644 index 0000000000000000000000000000000000000000..09af60f9d0bab32dde7837cd52e4e2394983470a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/clients/jinaai.go @@ -0,0 +1,76 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "time" + + "github.com/weaviate/weaviate/entities/moduletools" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/text2vec-jinaai/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/jinaai" +) + +const ( + defaultRPM = 500 // from https://jina.ai/embeddings/ + defaultTPM = 1_000_000 +) + +type vectorizer struct { + client *jinaai.Client[[]float32] + logger logrus.FieldLogger +} + +func New(jinaAIApiKey string, timeout time.Duration, logger logrus.FieldLogger) *vectorizer { + return &vectorizer{ + client: jinaai.New[[]float32](jinaAIApiKey, timeout, defaultRPM, defaultTPM, jinaai.EmbeddingsBuildUrlFn, logger), + logger: logger, + } +} + +func (v *vectorizer) Vectorize(ctx context.Context, input []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], *modulecomponents.RateLimits, int, error) { + settings := ent.NewClassSettings(cfg) + res, _, usage, err := v.client.Vectorize(ctx, input, jinaai.Settings{ + BaseURL: settings.BaseURL(), + Model: settings.Model(), + Dimensions: settings.Dimensions(), + Task: jinaai.RetrievalPassage, + }) + return res, nil, usage, err +} + +func (v *vectorizer) VectorizeQuery(ctx context.Context, input []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], error) { + settings := ent.NewClassSettings(cfg) + res, _, _, err := v.client.Vectorize(ctx, input, jinaai.Settings{ + BaseURL: settings.BaseURL(), + Model: settings.Model(), + Dimensions: settings.Dimensions(), + Task: jinaai.RetrievalQuery, + }) + return res, err +} + +func (v *vectorizer) GetApiKeyHash(ctx context.Context, config moduletools.ClassConfig) [32]byte { + return v.client.GetApiKeyHash(ctx, config) +} + +func (v *vectorizer) GetVectorizerRateLimit(ctx context.Context, cfg moduletools.ClassConfig) *modulecomponents.RateLimits { + return v.client.GetVectorizerRateLimit(ctx, cfg) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/clients/jinaai_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/clients/jinaai_test.go new file mode 100644 index 0000000000000000000000000000000000000000..3ee2ddf7a3994de3fa0e1389681b587bec548549 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/clients/jinaai_test.go @@ -0,0 +1,180 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestClient(t *testing.T) { + t.Run("when all is fine", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + + c := New("apiKey", 0, nullLogger()) + + expected := &modulecomponents.VectorizationResult[[]float32]{ + Text: []string{"This is my text"}, + Vector: [][]float32{{0.1, 0.2, 0.3}}, + Dimensions: 3, + } + res, _, _, err := c.Vectorize(context.Background(), []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Model": "jina-embedding-v2", "baseURL": server.URL}}) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when the context is expired", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("apiKey", 0, nullLogger()) + + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, _, _, err := c.Vectorize(ctx, []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{}}) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "context deadline exceeded") + }) + + t.Run("when the server returns an error", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{ + t: t, + serverError: errors.Errorf("nope, not gonna happen"), + }) + defer server.Close() + c := New("apiKey", 0, nullLogger()) + + _, _, _, err := c.Vectorize(context.Background(), []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"baseURL": server.URL}}) + + require.NotNil(t, err) + assert.EqualError(t, err, "connection to: JinaAI API failed with status: 500 error: nope, not gonna happen") + }) + + t.Run("when JinaAI key is passed using X-Jinaai-Api-Key header", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("", 0, nullLogger()) + + ctxWithValue := context.WithValue(context.Background(), + "X-Jinaai-Api-Key", []string{"some-key"}) + + expected := &modulecomponents.VectorizationResult[[]float32]{ + Text: []string{"This is my text"}, + Vector: [][]float32{{0.1, 0.2, 0.3}}, + Dimensions: 3, + } + res, _, _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Model": "jina-embedding-v2", "baseURL": server.URL}}) + + require.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when JinaAI key is empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("", 0, nullLogger()) + + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, _, _, err := c.Vectorize(ctx, []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{}}) + + require.NotNil(t, err) + assert.EqualError(t, err, "API Key: no api key found "+ + "neither in request header: X-Jinaai-Api-Key "+ + "nor in environment variable under JINAAI_APIKEY") + }) + + t.Run("when X-Jinaai-Api-Key header is passed but empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("", 0, nullLogger()) + + ctxWithValue := context.WithValue(context.Background(), + "X-Jinaai-Api-Key", []string{""}) + + _, _, _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Model": "jina-embedding-v2"}}) + + require.NotNil(t, err) + assert.EqualError(t, err, "API Key: no api key found "+ + "neither in request header: X-Jinaai-Api-Key "+ + "nor in environment variable under JINAAI_APIKEY") + }) +} + +type fakeHandler struct { + t *testing.T + serverError error +} + +func (f *fakeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.serverError != nil { + embedding := map[string]interface{}{ + "detail": f.serverError.Error(), + } + outBytes, err := json.Marshal(embedding) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b map[string]interface{} + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + textInputArray := b["input"].([]interface{}) + textInput := textInputArray[0].(string) + assert.Greater(f.t, len(textInput), 0) + + embeddingData := map[string]interface{}{ + "object": textInput, + "index": 0, + "embedding": []float32{0.1, 0.2, 0.3}, + } + embedding := map[string]interface{}{ + "object": "list", + "data": []interface{}{embeddingData}, + } + + outBytes, err := json.Marshal(embedding) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..9a7784456d04bfb8a39a1c533cbbe4ffaf76cb63 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/clients/meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *vectorizer) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "JinaAI Module", + "documentationHref": "https://jina.ai/embeddings/", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/config.go new file mode 100644 index 0000000000000000000000000000000000000000..3bb5b29f8d3aea3a50d3179396dc809122295c43 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/config.go @@ -0,0 +1,49 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modjinaai + +import ( + "context" + + "github.com/weaviate/weaviate/modules/text2vec-jinaai/ent" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *JinaAIModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{ + "vectorizeClassName": ent.DefaultVectorizeClassName, + "model": ent.DefaultJinaAIModel, + "baseURL": ent.DefaultBaseURL, + } +} + +func (m *JinaAIModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{ + "skip": !ent.DefaultPropertyIndexed, + "vectorizePropertyName": ent.DefaultVectorizePropertyName, + } +} + +func (m *JinaAIModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := ent.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/ent/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/ent/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..5b1e66ac06614d443c4e98f55678e87c76277ce9 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/ent/class_settings.go @@ -0,0 +1,54 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + // Default values for URL (model is ok) cannot be changed before we solve how old classes that have the defaults + // NOT set will handle the change + DefaultJinaAIModel = "jina-embeddings-v4" + DefaultVectorizeClassName = false + DefaultPropertyIndexed = true + DefaultVectorizePropertyName = false + DefaultBaseURL = "https://api.jina.ai" + LowerCaseInput = false +) + +type classSettings struct { + basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, BaseClassSettings: *basesettings.NewBaseClassSettings(cfg, LowerCaseInput)} +} + +func (cs *classSettings) Model() string { + return cs.BaseClassSettings.GetPropertyAsString("model", DefaultJinaAIModel) +} + +func (cs *classSettings) BaseURL() string { + return cs.BaseClassSettings.GetPropertyAsString("baseURL", DefaultBaseURL) +} + +func (cs *classSettings) Dimensions() *int64 { + return cs.BaseClassSettings.GetPropertyAsInt64("dimensions", nil) +} + +func (cs *classSettings) Validate(class *models.Class) error { + return cs.BaseClassSettings.Validate(class) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/module.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/module.go new file mode 100644 index 0000000000000000000000000000000000000000..49201e0515064593e253570a2cdcc3b540f244e1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/module.go @@ -0,0 +1,159 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modjinaai + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/text2vec-jinaai/clients" + "github.com/weaviate/weaviate/modules/text2vec-jinaai/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents/additional" + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + "github.com/weaviate/weaviate/usecases/modulecomponents/text2vecbase" +) + +const Name = "text2vec-jinaai" + +var batchSettings = batch.Settings{ + // the encoding is different than OpenAI, but the code is not available in Go and too complicated to port. + // using 30% more than the OpenAI model is a rough estimate but seems to work + TokenMultiplier: 1.3, + MaxTimePerBatch: float64(10), + MaxObjectsPerBatch: 512, // Info from jina + // real limit is 8192, but the vectorization times go up by A LOT if the batches are larger + MaxTokensPerBatch: func(cfg moduletools.ClassConfig) int { return 2500 }, + HasTokenLimit: true, + ReturnsRateLimit: false, +} + +func New() *JinaAIModule { + return &JinaAIModule{} +} + +type JinaAIModule struct { + vectorizer text2vecbase.TextVectorizerBatch[[]float32] + metaProvider text2vecbase.MetaProvider + graphqlProvider modulecapabilities.GraphQLArguments + searcher modulecapabilities.Searcher[[]float32] + nearTextTransformer modulecapabilities.TextTransform + logger logrus.FieldLogger + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +func (m *JinaAIModule) Name() string { + return Name +} + +func (m *JinaAIModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2ManyVec +} + +func (m *JinaAIModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, m.logger); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initAdditionalPropertiesProvider(); err != nil { + return errors.Wrap(err, "init additional properties provider") + } + + return nil +} + +func (m *JinaAIModule) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init graphql provider") + } + return nil +} + +func (m *JinaAIModule) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + jinaAIApiKey := os.Getenv("JINAAI_APIKEY") + + client := clients.New(jinaAIApiKey, timeout, logger) + + m.vectorizer = text2vecbase.New(client, + batch.NewBatchVectorizer(client, 50*time.Second, batchSettings, logger, m.Name()), + batch.ReturnBatchTokenizer(batchSettings.TokenMultiplier, m.Name(), ent.LowerCaseInput), + ) + m.metaProvider = client + + return nil +} + +func (m *JinaAIModule) initAdditionalPropertiesProvider() error { + m.additionalPropertiesProvider = additional.NewText2VecProvider() + return nil +} + +func (m *JinaAIModule) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + return m.vectorizer.Object(ctx, obj, cfg, ent.NewClassSettings(cfg)) +} + +func (m *JinaAIModule) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + return true, nil, nil +} + +func (m *JinaAIModule) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + vecs, errs := m.vectorizer.ObjectBatch(ctx, objs, skipObject, cfg) + return vecs, nil, errs +} + +func (m *JinaAIModule) MetaInfo() (map[string]interface{}, error) { + return m.metaProvider.MetaInfo() +} + +func (m *JinaAIModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +func (m *JinaAIModule) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + return m.vectorizer.Texts(ctx, []string{input}, cfg) +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.Searcher[[]float32](New()) + _ = modulecapabilities.GraphQLArguments(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/nearText.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/nearText.go new file mode 100644 index 0000000000000000000000000000000000000000..7710c0cc174446caa41639720d345d89ae2f9d95 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-jinaai/nearText.go @@ -0,0 +1,36 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modjinaai + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +func (m *JinaAIModule) initNearText() error { + m.searcher = nearText.NewSearcher(m.vectorizer) + m.graphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *JinaAIModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return m.graphqlProvider.Arguments() +} + +func (m *JinaAIModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + return m.searcher.VectorSearches() +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/clients/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/clients/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..66b3424d989ced1b66c11b705780a757ec395d48 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/clients/fakes_for_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizePropertyName bool + skippedProperty string + excludedProperty string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..906239b6d3c8cc3933b0b98c95e0043d680736a1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/clients/meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *vectorizer) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Mistral Module", + "documentationHref": "https://docs.mistral.ai/api/#operation/createEmbedding", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/clients/mistral.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/clients/mistral.go new file mode 100644 index 0000000000000000000000000000000000000000..d53d2360c02039b8b5a23b2256656898384411e1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/clients/mistral.go @@ -0,0 +1,226 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "crypto/sha256" + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "golang.org/x/time/rate" + + "github.com/weaviate/weaviate/entities/moduletools" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/text2vec-mistral/ent" +) + +type embeddingsRequest struct { + Input []string `json:"input"` + Model string `json:"model"` +} + +type embeddingsDataResponse struct { + Embedding []float32 `json:"embedding"` + Index int64 `json:"index,omitempty"` + Object string `json:"object,omitempty"` +} + +type embeddingsResponse struct { + Data []embeddingsDataResponse `json:"data,omitempty"` + Model string `json:"model,omitempty"` + Message string `json:"message,omitempty"` + Usage *modulecomponents.Usage `json:"usage,omitempty"` +} + +type vectorizer struct { + apiKey string + httpClient *http.Client + logger logrus.FieldLogger + // Mistral has a requests per second limit, but tokens limits are per minute. As all other vectorizers have + // a per minute limit we will handle this special behaviour in here and not add it to the shared logic + rateLimiterPerSecond *rate.Limiter +} + +// info from mistral devs +const ( + defaultRPM = 300 // 5 req per second + defaultTPM = 20_000_000 +) + +func New(apiKey string, timeout time.Duration, logger logrus.FieldLogger) *vectorizer { + return &vectorizer{ + apiKey: apiKey, + httpClient: &http.Client{ + Timeout: timeout, + }, + logger: logger, + } +} + +func (v *vectorizer) Vectorize(ctx context.Context, input []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], *modulecomponents.RateLimits, int, error) { + if v.rateLimiterPerSecond != nil { + err := v.rateLimiterPerSecond.Wait(ctx) + if err != nil { + return nil, nil, 0, err + } + } + + config := v.getVectorizationConfig(cfg) + res, usage, err := v.vectorize(ctx, input, config.Model, config.BaseURL) + return res, nil, usage, err +} + +func (v *vectorizer) VectorizeQuery(ctx context.Context, input []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], error) { + config := v.getVectorizationConfig(cfg) + res, _, err := v.vectorize(ctx, input, config.Model, config.BaseURL) + return res, err +} + +func (v *vectorizer) vectorize(ctx context.Context, input []string, + model string, url string, +) (*modulecomponents.VectorizationResult[[]float32], int, error) { + body, err := json.Marshal(embeddingsRequest{ + Input: input, + Model: model, + }) + if err != nil { + return nil, 0, errors.Wrapf(err, "marshal body") + } + + req, err := http.NewRequestWithContext(ctx, "POST", url, + bytes.NewReader(body)) + if err != nil { + return nil, 0, errors.Wrap(err, "create POST request") + } + apiKey, err := v.getApiKey(ctx) + if err != nil { + return nil, 0, errors.Wrapf(err, "Mistral API Key") + } + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", apiKey)) + req.Header.Add("Content-Type", "application/json") + + res, err := v.httpClient.Do(req) + if err != nil { + return nil, 0, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, 0, errors.Wrap(err, "read response body") + } + var resBody embeddingsResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, 0, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if res.StatusCode != 200 { + if resBody.Message != "" { + errorMessage := getErrorMessage(res.StatusCode, resBody.Message, "connection to Mistral failed with status: %d error: %v") + return nil, 0, errors.New(errorMessage) + } + errorMessage := getErrorMessage(res.StatusCode, "", "connection to Mistral failed with status: %d") + return nil, 0, errors.New(errorMessage) + } + + if len(resBody.Data) == 0 || len(resBody.Data[0].Embedding) == 0 { + return nil, 0, errors.Errorf("empty embeddings response") + } + + vectors := make([][]float32, len(resBody.Data)) + for i, data := range resBody.Data { + vectors[i] = data.Embedding + } + + return &modulecomponents.VectorizationResult[[]float32]{ + Text: input, + Dimensions: len(resBody.Data[0].Embedding), + Vector: vectors, + }, modulecomponents.GetTotalTokens(resBody.Usage), nil +} + +func getErrorMessage(statusCode int, resBodyError string, errorTemplate string) string { + if resBodyError != "" { + return fmt.Sprintf(errorTemplate, statusCode, resBodyError) + } + return fmt.Sprintf(errorTemplate, statusCode) +} + +func (v *vectorizer) getApiKey(ctx context.Context) (string, error) { + if apiKey := modulecomponents.GetValueFromContext(ctx, "X-Mistral-Api-Key"); apiKey != "" { + return apiKey, nil + } + if v.apiKey != "" { + return v.apiKey, nil + } + return "", errors.New("no api key found " + + "neither in request header: X-Mistral-Api-Key " + + "nor in environment variable under MISTRAL_APIKEY") +} + +func (v *vectorizer) GetApiKeyHash(ctx context.Context, cfg moduletools.ClassConfig) [32]byte { + key, err := v.getApiKey(ctx) + if err != nil { + return [32]byte{} + } + return sha256.Sum256([]byte(key)) +} + +func (v *vectorizer) GetVectorizerRateLimit(ctx context.Context, cfg moduletools.ClassConfig) *modulecomponents.RateLimits { + rpm, tpm := modulecomponents.GetRateLimitFromContext(ctx, "Mistral", defaultRPM, defaultTPM) + rps := rpm / 60 + // use a bit less than theoretically possible to not run into the rate limit + v.rateLimiterPerSecond = rate.NewLimiter(rate.Limit(rps)-1.5, max(rps-3, 1)) + + execAfterRequestFunction := func(limits *modulecomponents.RateLimits, tokensUsed int, deductRequest bool) { + // refresh is after 60 seconds but leave a bit of room for errors. Otherwise, we only deduct the request that just happened + if limits.LastOverwrite.Add(61 * time.Second).After(time.Now()) { + if deductRequest { + limits.RemainingRequests -= 1 + } + limits.RemainingTokens -= tokensUsed + return + } + + limits.RemainingRequests = rpm + limits.ResetRequests = time.Now().Add(time.Duration(61) * time.Second) + limits.LimitRequests = rpm + limits.LastOverwrite = time.Now() + + limits.RemainingTokens = tpm + limits.LimitTokens = tpm + limits.ResetTokens = time.Now().Add(time.Duration(61) * time.Second) + } + + initialRL := &modulecomponents.RateLimits{AfterRequestFunction: execAfterRequestFunction, LastOverwrite: time.Now().Add(-61 * time.Minute)} + initialRL.ResetAfterRequestFunction(0) // set initial values + + return initialRL +} + +func (v *vectorizer) getVectorizationConfig(cfg moduletools.ClassConfig) ent.VectorizationConfig { + settings := ent.NewClassSettings(cfg) + return ent.VectorizationConfig{ + Model: settings.Model(), BaseURL: settings.BaseURL(), + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/clients/mistral_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/clients/mistral_test.go new file mode 100644 index 0000000000000000000000000000000000000000..f5bcff55dbd47d531787d41f9b5fd48351977ef1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/clients/mistral_test.go @@ -0,0 +1,188 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestClient(t *testing.T) { + t.Run("when all is fine", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + apiKey: "apiKey", + httpClient: &http.Client{}, + logger: nullLogger(), + } + expected := &modulecomponents.VectorizationResult[[]float32]{ + Text: []string{"This is my text"}, + Vector: [][]float32{{0.1, 0.2, 0.3}}, + Dimensions: 3, + } + res, _, _, err := c.Vectorize(context.Background(), []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Model": "mistral-embed", "baseURL": server.URL}}) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when the context is expired", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + apiKey: "apiKey", + httpClient: &http.Client{}, + logger: nullLogger(), + } + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, _, _, err := c.Vectorize(ctx, []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Model": "mistral-embed"}}) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "context deadline exceeded") + }) + + t.Run("when the server returns an error", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{ + t: t, + serverError: errors.Errorf("nope, not gonna happen"), + }) + defer server.Close() + c := &vectorizer{ + apiKey: "apiKey", + httpClient: &http.Client{}, + logger: nullLogger(), + } + _, _, _, err := c.Vectorize(context.Background(), []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Model": "mistral-embed", "baseURL": server.URL}}) + + require.NotNil(t, err) + assert.Equal(t, err.Error(), "connection to Mistral failed with status: 500 error: nope, not gonna happen") + }) + + t.Run("when Mistral key is passed using Mistral-Api-Key header", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + apiKey: "", + httpClient: &http.Client{}, + logger: nullLogger(), + } + ctxWithValue := context.WithValue(context.Background(), + "X-Mistral-Api-Key", []string{"some-key"}) + + expected := &modulecomponents.VectorizationResult[[]float32]{ + Text: []string{"This is my text"}, + Vector: [][]float32{{0.1, 0.2, 0.3}}, + Dimensions: 3, + } + res, _, _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Model": "mistral-embed", "baseURL": server.URL}}) + + require.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when Mistral key is empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + apiKey: "", + httpClient: &http.Client{}, + logger: nullLogger(), + } + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, _, _, err := c.Vectorize(ctx, []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Model": "mistral-embed"}}) + + require.NotNil(t, err) + assert.Equal(t, err.Error(), "Mistral API Key: no api key found "+ + "neither in request header: X-Mistral-Api-Key "+ + "nor in environment variable under MISTRAL_APIKEY") + }) + + t.Run("when X-Mistral-Api-Key header is passed but empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + apiKey: "", + httpClient: &http.Client{}, + logger: nullLogger(), + } + ctxWithValue := context.WithValue(context.Background(), + "X-Mistral-Api-Key", []string{""}) + + _, _, _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Model": "mistral-embed"}}) + + require.NotNil(t, err) + assert.Equal(t, err.Error(), "Mistral API Key: no api key found "+ + "neither in request header: X-Mistral-Api-Key "+ + "nor in environment variable under MISTRAL_APIKEY") + }) +} + +type fakeHandler struct { + t *testing.T + serverError error +} + +func (f *fakeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.serverError != nil { + resp := embeddingsResponse{Message: "nope, not gonna happen"} + outBytes, err := json.Marshal(resp) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var req embeddingsRequest + require.Nil(f.t, json.Unmarshal(bodyBytes, &req)) + + assert.NotNil(f.t, req) + assert.NotEmpty(f.t, req.Input) + + resp := embeddingsResponse{ + Data: []embeddingsDataResponse{{Embedding: []float32{0.1, 0.2, 0.3}}}, + Model: "model", + } + outBytes, err := json.Marshal(resp) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/config.go new file mode 100644 index 0000000000000000000000000000000000000000..829e1abaa4c8fe47852666a5ef9d7979c7682276 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/config.go @@ -0,0 +1,49 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modmistral + +import ( + "context" + + "github.com/weaviate/weaviate/modules/text2vec-mistral/ent" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *MistralModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{ + "vectorizeClassName": ent.DefaultVectorizeClassName, + "baseURL": ent.DefaultBaseURL, + "model": ent.DefaultMistralModel, + } +} + +func (m *MistralModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{ + "skip": !ent.DefaultPropertyIndexed, + "vectorizePropertyName": ent.DefaultVectorizePropertyName, + } +} + +func (m *MistralModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := ent.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/ent/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/ent/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..bbc5e56af49db26c1acf1b8a5c842030868189ee --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/ent/class_settings.go @@ -0,0 +1,53 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + // Default values for URL and model cannot be changed before we solve how old classes that have the defaults + // NOT set will handle the change + DefaultMistralModel = "mistral-embed" + DefaultBaseURL = "https://api.mistral.ai/v1/embeddings" + DefaultVectorizeClassName = true + DefaultPropertyIndexed = false + DefaultVectorizePropertyName = false + LowerCaseInput = false +) + +type classSettings struct { + basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, BaseClassSettings: *basesettings.NewBaseClassSettings(cfg, LowerCaseInput)} +} + +func (cs *classSettings) Model() string { + return cs.BaseClassSettings.GetPropertyAsString("model", DefaultMistralModel) +} + +func (cs *classSettings) Validate(class *models.Class) error { + if err := cs.BaseClassSettings.Validate(class); err != nil { + return err + } + return nil +} + +func (cs *classSettings) BaseURL() string { + return cs.BaseClassSettings.GetPropertyAsString("baseURL", DefaultBaseURL) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/ent/vectorization_config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/ent/vectorization_config.go new file mode 100644 index 0000000000000000000000000000000000000000..9ca94386dfb28c945c6a2bac9df68e89b0b3e8ac --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/ent/vectorization_config.go @@ -0,0 +1,17 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type VectorizationConfig struct { + Model string + BaseURL string +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/module.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/module.go new file mode 100644 index 0000000000000000000000000000000000000000..3b5258cc9267678ddee2a8fb5e22373fea4accb4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/module.go @@ -0,0 +1,159 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modmistral + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/text2vec-mistral/clients" + "github.com/weaviate/weaviate/modules/text2vec-mistral/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents/additional" + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + "github.com/weaviate/weaviate/usecases/modulecomponents/text2vecbase" +) + +const Name = "text2vec-mistral" + +var batchSettings = batch.Settings{ + // the encoding is different than OpenAI, but the code is not available in Go and too complicated to port. + // using 30% more than the OpenAI model is a rough estimate but seems to work + TokenMultiplier: 1.3, + MaxTimePerBatch: float64(10), + MaxObjectsPerBatch: 1000000, // dummy value, there is only a token limit + MaxTokensPerBatch: func(cfg moduletools.ClassConfig) int { return 8192 }, + HasTokenLimit: true, + ReturnsRateLimit: false, +} + +func New() *MistralModule { + return &MistralModule{} +} + +type MistralModule struct { + vectorizer text2vecbase.TextVectorizerBatch[[]float32] + metaProvider text2vecbase.MetaProvider + graphqlProvider modulecapabilities.GraphQLArguments + searcher modulecapabilities.Searcher[[]float32] + nearTextTransformer modulecapabilities.TextTransform + logger logrus.FieldLogger + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +func (m *MistralModule) Name() string { + return Name +} + +func (m *MistralModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2ManyVec +} + +func (m *MistralModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, m.logger); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initAdditionalPropertiesProvider(); err != nil { + return errors.Wrap(err, "init additional properties provider") + } + + return nil +} + +func (m *MistralModule) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init graphql provider") + } + return nil +} + +func (m *MistralModule) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("MISTRAL_APIKEY") + client := clients.New(apiKey, timeout, logger) + + m.vectorizer = text2vecbase.New(client, + batch.NewBatchVectorizer(client, 50*time.Second, batchSettings, logger, m.Name()), + batch.ReturnBatchTokenizer(batchSettings.TokenMultiplier, m.Name(), ent.LowerCaseInput), + ) + m.metaProvider = client + + return nil +} + +func (m *MistralModule) initAdditionalPropertiesProvider() error { + m.additionalPropertiesProvider = additional.NewText2VecProvider() + return nil +} + +func (m *MistralModule) VectorizeObject(ctx context.Context, obj *models.Object, + cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + return m.vectorizer.Object(ctx, obj, cfg, ent.NewClassSettings(cfg)) +} + +func (m *MistralModule) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + vecs, errs := m.vectorizer.ObjectBatch(ctx, objs, skipObject, cfg) + return vecs, nil, errs +} + +func (m *MistralModule) VectorizableProperties(cfg moduletools.ClassConfig, +) (bool, []string, error) { + return true, nil, nil +} + +func (m *MistralModule) MetaInfo() (map[string]interface{}, error) { + return m.metaProvider.MetaInfo() +} + +func (m *MistralModule) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + return m.vectorizer.Texts(ctx, []string{input}, cfg) +} + +func (m *MistralModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.Searcher[[]float32](New()) + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.InputVectorizer[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/nearText.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/nearText.go new file mode 100644 index 0000000000000000000000000000000000000000..aca79f1cb3ade1f923db9f5ea664b958f36c5a1f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/nearText.go @@ -0,0 +1,36 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modmistral + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +func (m *MistralModule) initNearText() error { + m.searcher = nearText.NewSearcher(m.vectorizer) + m.graphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *MistralModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return m.graphqlProvider.Arguments() +} + +func (m *MistralModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + return m.searcher.VectorSearches() +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/vectorizer/batch_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/vectorizer/batch_test.go new file mode 100644 index 0000000000000000000000000000000000000000..1bfb368ab22f7aacf00378214a945579a4ecaa08 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/vectorizer/batch_test.go @@ -0,0 +1,104 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + "github.com/weaviate/weaviate/usecases/modulecomponents/text2vecbase" + + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" +) + +func TestBatch(t *testing.T) { + client := &fakeBatchClient{} + cfg := &fakeClassConfig{vectorizePropertyName: false, classConfig: map[string]interface{}{"vectorizeClassName": false}} + logger, _ := test.NewNullLogger() + cases := []struct { + name string + objects []*models.Object + skip []bool + wantErrors map[int]error + deadline time.Duration + }{ + {name: "skip all", objects: []*models.Object{{Class: "Car"}}, skip: []bool{true}}, + {name: "skip first", objects: []*models.Object{{Class: "Car"}, {Class: "Car", Properties: map[string]interface{}{"test": "test"}}}, skip: []bool{true, false}}, + {name: "one object errors", objects: []*models.Object{{Class: "Car", Properties: map[string]interface{}{"test": "test"}}, {Class: "Car", Properties: map[string]interface{}{"test": "error something"}}}, skip: []bool{false, false}, wantErrors: map[int]error{1: fmt.Errorf("something")}}, + {name: "first object errors", objects: []*models.Object{{Class: "Car", Properties: map[string]interface{}{"test": "error something"}}, {Class: "Car", Properties: map[string]interface{}{"test": "test"}}}, skip: []bool{false, false}, wantErrors: map[int]error{0: fmt.Errorf("something")}}, + {name: "vectorize all", objects: []*models.Object{{Class: "Car", Properties: map[string]interface{}{"test": "test"}}, {Class: "Car", Properties: map[string]interface{}{"test": "something"}}}, skip: []bool{false, false}}, + {name: "multiple vectorizer batches", objects: []*models.Object{ + {Class: "Car", Properties: map[string]interface{}{"test": "tokens 25"}}, // set limit so next 3 objects are one batch + {Class: "Car", Properties: map[string]interface{}{"test": "first object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "second object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "third object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "first object second batch"}}, // rate is 100 again + {Class: "Car", Properties: map[string]interface{}{"test": "second object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "third object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "fourth object second batch"}}, + }, skip: []bool{false, false, false, false, false, false, false, false}}, + {name: "multiple vectorizer batches with skips and errors", objects: []*models.Object{ + {Class: "Car", Properties: map[string]interface{}{"test": "tokens 25"}}, // set limit so next 3 objects are one batch + {Class: "Car", Properties: map[string]interface{}{"test": "first object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "second object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "error something"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "first object second batch"}}, // rate is 100 again + {Class: "Car", Properties: map[string]interface{}{"test": "second object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "third object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "fourth object second batch"}}, + }, skip: []bool{false, true, false, false, false, true, false, false}, wantErrors: map[int]error{3: fmt.Errorf("something")}}, + {name: "skip last item", objects: []*models.Object{ + {Class: "Car", Properties: map[string]interface{}{"test": "fir test object"}}, // set limit + {Class: "Car", Properties: map[string]interface{}{"test": "first object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "second object first batch"}}, + }, skip: []bool{false, false, true}}, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + v := text2vecbase.New(client, + batch.NewBatchVectorizer(client, 50*time.Second, + batch.Settings{MaxObjectsPerBatch: 100, MaxTokensPerBatch: func(cfg moduletools.ClassConfig) int { return 500000 }, MaxTimePerBatch: 10}, + logger, "test"), + batch.ReturnBatchTokenizer(1.3, "", false), + ) + deadline := time.Now().Add(10 * time.Second) + if tt.deadline != 0 { + deadline = time.Now().Add(tt.deadline) + } + + ctx, cancl := context.WithDeadline(context.Background(), deadline) + vecs, errs := v.ObjectBatch( + ctx, tt.objects, tt.skip, cfg, + ) + + require.Len(t, errs, len(tt.wantErrors)) + require.Len(t, vecs, len(tt.objects)) + + for i := range tt.objects { + if tt.wantErrors[i] != nil { + require.Equal(t, tt.wantErrors[i], errs[i]) + } else if tt.skip[i] { + require.Nil(t, vecs[i]) + } else { + require.NotNil(t, vecs[i]) + } + } + cancl() + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/vectorizer/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/vectorizer/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..20efe91828e961d9032a75d9b0b2477f5f7c6da4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-mistral/vectorizer/fakes_for_test.go @@ -0,0 +1,160 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "fmt" + "strconv" + "strings" + "time" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/modulecomponents" +) + +type fakeBatchClient struct { + defaultResetRate int +} + +func (c *fakeBatchClient) Vectorize(ctx context.Context, + text []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], *modulecomponents.RateLimits, int, error) { + if c.defaultResetRate == 0 { + c.defaultResetRate = 60 + } + + vectors := make([][]float32, len(text)) + errors := make([]error, len(text)) + rateLimit := &modulecomponents.RateLimits{RemainingTokens: 100, RemainingRequests: 100, LimitTokens: 200, ResetTokens: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second), ResetRequests: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second)} + for i := range text { + if len(text[i]) >= len("error ") && text[i][:6] == "error " { + errors[i] = fmt.Errorf("%s", text[i][6:]) + continue + } + + req := len("requests ") + if len(text[i]) >= req && text[i][:req] == "requests " { + reqs, _ := strconv.Atoi(strings.Split(text[i][req:], " ")[0]) + rateLimit.RemainingRequests = reqs + rateLimit.LimitRequests = 2 * reqs + } + + if len(text[i]) >= len("wait ") && text[i][:5] == "wait " { + wait, _ := strconv.Atoi(text[i][5:]) + time.Sleep(time.Duration(wait) * time.Millisecond) + } + vectors[i] = []float32{0, 1, 2, 3} + } + + return &modulecomponents.VectorizationResult[[]float32]{ + Vector: vectors, + Dimensions: 4, + Text: text, + Errors: errors, + }, rateLimit, 0, nil +} + +func (c *fakeBatchClient) VectorizeQuery(ctx context.Context, + text []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], error) { + return &modulecomponents.VectorizationResult[[]float32]{ + Vector: [][]float32{{0.1, 1.1, 2.1, 3.1}}, + Dimensions: 4, + Text: text, + }, nil +} + +func (c *fakeBatchClient) GetVectorizerRateLimit(ctx context.Context, cfg moduletools.ClassConfig) *modulecomponents.RateLimits { + return &modulecomponents.RateLimits{RemainingTokens: 100, RemainingRequests: 100, LimitTokens: 200, ResetTokens: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second), ResetRequests: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second)} +} + +func (c *fakeBatchClient) GetApiKeyHash(ctx context.Context, cfg moduletools.ClassConfig) [32]byte { + return [32]byte{} +} + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizeClassName bool + vectorizePropertyName bool + skippedProperty string + excludedProperty string + // module specific settings + Model string + baseURL string +} + +func (f fakeClassConfig) PropertyIndexed(property string) bool { + return !((property == f.skippedProperty) || (property == f.excludedProperty)) +} + +func (f fakeClassConfig) Class() map[string]interface{} { + classSettings := map[string]interface{}{ + "vectorizeClassName": f.vectorizeClassName, + "model": f.Model, + "baseURL": f.baseURL, + } + return classSettings +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) VectorizeClassName() bool { + return f.classConfig["vectorizeClassName"].(bool) +} + +func (f fakeClassConfig) VectorizePropertyName(propertyName string) bool { + return f.vectorizePropertyName +} + +func (f fakeClassConfig) Properties() []string { + return nil +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..8f7ddc24234d6000ff89caafa8ba34e3805292e1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/clients/meta.go @@ -0,0 +1,21 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/transformers" +) + +func (v *vectorizer) MetaInfo() (map[string]any, error) { + endpoint := v.urlBuilder.GetPassageURL("/meta", transformers.VectorizationConfig{}) + return v.client.MetaInfo(endpoint) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/clients/model2vec.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/clients/model2vec.go new file mode 100644 index 0000000000000000000000000000000000000000..0d44c34fe8885390c043ec6b0abf677a0b2cfa5c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/clients/model2vec.go @@ -0,0 +1,49 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "time" + + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/transformers" +) + +type vectorizer struct { + url string + client *transformers.Client + urlBuilder *transformers.URLBuilder + logger logrus.FieldLogger +} + +func New(url string, timeout time.Duration, logger logrus.FieldLogger) *vectorizer { + urlBuilder := transformers.NewURLBuilder(url, url) + return &vectorizer{ + url: url, + urlBuilder: urlBuilder, + client: transformers.New(urlBuilder, timeout, logger), + logger: logger, + } +} + +func (v *vectorizer) VectorizeObject(ctx context.Context, input string, + config transformers.VectorizationConfig, +) (*transformers.VectorizationResult, error) { + return v.client.VectorizeObject(ctx, input, config) +} + +func (v *vectorizer) VectorizeQuery(ctx context.Context, input string, + config transformers.VectorizationConfig, +) (*transformers.VectorizationResult, error) { + return v.client.VectorizeQuery(ctx, input, config) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/clients/model2vec_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/clients/model2vec_test.go new file mode 100644 index 0000000000000000000000000000000000000000..cd08b12c44f729671182b9821a29ab13328805e1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/clients/model2vec_test.go @@ -0,0 +1,121 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/transformers" +) + +func TestClient(t *testing.T) { + t.Run("when all is fine", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + expected := &transformers.VectorizationResult{ + Text: "This is my text", + Vector: []float32{0.1, 0.2, 0.3}, + Dimensions: 3, + } + res, err := c.VectorizeObject(context.Background(), "This is my text", + transformers.VectorizationConfig{ + PoolingStrategy: "masked_mean", + }) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when the context is expired", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, err := c.VectorizeObject(ctx, "This is my text", transformers.VectorizationConfig{}) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "context deadline exceeded") + }) + + t.Run("when the server returns an error", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{ + t: t, + serverError: errors.Errorf("nope, not gonna happen"), + }) + defer server.Close() + c := New(server.URL, 0, nullLogger()) + _, err := c.VectorizeObject(context.Background(), "This is my text", + transformers.VectorizationConfig{}) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "nope, not gonna happen") + }) +} + +type fakeHandler struct { + t *testing.T + serverError error +} + +func (f *fakeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/vectors", r.URL.String()) + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.serverError != nil { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(fmt.Sprintf(`{"error":"%s"}`, f.serverError.Error()))) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b map[string]interface{} + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + textInput := b["text"].(string) + assert.Greater(f.t, len(textInput), 0) + + pooling := b["config"].(map[string]interface{})["pooling_strategy"].(string) + assert.Equal(f.t, "masked_mean", pooling) + + out := map[string]interface{}{ + "text": textInput, + "dims": 3, + "vector": []float32{0.1, 0.2, 0.3}, + } + outBytes, err := json.Marshal(out) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/clients/startup.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/clients/startup.go new file mode 100644 index 0000000000000000000000000000000000000000..9c347fe3216ed0d7a044ef1e4e263ba1fdd14773 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/clients/startup.go @@ -0,0 +1,44 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "fmt" + "time" + + "github.com/pkg/errors" +) + +func (v *vectorizer) WaitForStartup(initCtx context.Context, + interval time.Duration, +) error { + wellKnownReadyEndpoint := fmt.Sprintf("%s/.well-known/ready", v.url) + t := time.NewTicker(interval) + defer t.Stop() + expired := initCtx.Done() + var lastErr error + for { + select { + case <-t.C: + lastErr = v.client.CheckReady(initCtx, wellKnownReadyEndpoint) + if lastErr == nil { + return nil + } + v.logger. + WithField("action", "text2vec_model2vec_remote_wait_for_startup"). + WithError(lastErr).Warnf("text2vev-model2vec inference service not ready") + case <-expired: + return errors.Wrapf(lastErr, "init context expired before remote was ready") + } + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/clients/startup_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/clients/startup_test.go new file mode 100644 index 0000000000000000000000000000000000000000..0f87f6b1aeb15baa88fbb8e295510420667a9698 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/clients/startup_test.go @@ -0,0 +1,104 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "net/http" + "net/http/httptest" + "regexp" + "strings" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestWaitForStartup(t *testing.T) { + t.Run("when common server is immediately ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{t: t}) + defer server.Close() + v := New(server.URL, 0, nullLogger()) + err := v.WaitForStartup(context.Background(), 150*time.Millisecond) + + assert.Nil(t, err) + }) + + t.Run("when common server is down", func(t *testing.T) { + url := "http://nothing-running-at-this-url" + v := New(url, 0, nullLogger()) + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := v.WaitForStartup(ctx, 50*time.Millisecond) + + require.NotNil(t, err, nullLogger()) + assert.Contains(t, err.Error(), "init context expired before remote was ready: send check ready request") + assertContainsEither(t, err.Error(), "dial tcp", "context deadline exceeded") + assert.NotContains(t, err.Error(), "[passage]") + assert.NotContains(t, err.Error(), "[query]") + }) + + t.Run("when common server is alive, but not ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(time.Hour), + }) + defer server.Close() + v := New(server.URL, 0, nullLogger()) + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := v.WaitForStartup(ctx, 50*time.Millisecond) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "init context expired before remote was ready") + assertContainsEither(t, err.Error(), "not ready: status 503", "context deadline exceeded") + assert.NotContains(t, err.Error(), "[passage]") + assert.NotContains(t, err.Error(), "[query]") + }) + + t.Run("when common server is initially not ready, but then becomes ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(100 * time.Millisecond), + }) + v := New(server.URL, 0, nullLogger()) + defer server.Close() + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := v.WaitForStartup(ctx, 50*time.Millisecond) + + require.Nil(t, err) + }) +} + +type testReadyHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testReadyHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/.well-known/ready", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } else { + w.WriteHeader(http.StatusNoContent) + } +} + +func assertContainsEither(t *testing.T, str string, contains ...string) { + reg := regexp.MustCompile(strings.Join(contains, "|")) + assert.Regexp(t, reg, str) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/config.go new file mode 100644 index 0000000000000000000000000000000000000000..e2253b9f1aecfcbd867bf18c02b9ac66d9d6b164 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/config.go @@ -0,0 +1,49 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modtransformers + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/text2vec-model2vec/vectorizer" +) + +func (m *Model2VecModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{ + "vectorizeClassName": vectorizer.DefaultVectorizeClassName, + } +} + +func (m *Model2VecModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{ + "skip": !vectorizer.DefaultPropertyIndexed, + "vectorizePropertyName": vectorizer.DefaultVectorizePropertyName, + } +} + +func (m *Model2VecModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := vectorizer.NewClassSettings(cfg) + if err := settings.Validate(class); err != nil { + return err + } + return nil +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/config_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/config_test.go new file mode 100644 index 0000000000000000000000000000000000000000..750e2edd020a9942f9d75913a1bafd48b4650d16 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/config_test.go @@ -0,0 +1,34 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modtransformers + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/schema" +) + +func TestConfigDefaults(t *testing.T) { + t.Run("for properties", func(t *testing.T) { + def := New().ClassConfigDefaults() + + assert.Equal(t, false, def["vectorizeClassName"]) + }) + + t.Run("for the class", func(t *testing.T) { + dt := schema.DataTypeText + def := New().PropertyConfigDefaults(&dt) + assert.Equal(t, false, def["vectorizePropertyName"]) + assert.Equal(t, false, def["skip"]) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/module.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/module.go new file mode 100644 index 0000000000000000000000000000000000000000..882254b772ee7ac6bc6e74d6ca64a258db385d8e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/module.go @@ -0,0 +1,169 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modtransformers + +import ( + "context" + "os" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents/text2vecbase" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + entcfg "github.com/weaviate/weaviate/entities/config" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/text2vec-model2vec/clients" + "github.com/weaviate/weaviate/modules/text2vec-model2vec/vectorizer" + "github.com/weaviate/weaviate/usecases/modulecomponents/additional" +) + +const Name = "text2vec-model2vec" + +func New() *Model2VecModule { + return &Model2VecModule{} +} + +type Model2VecModule struct { + vectorizer text2vecbase.TextVectorizer[[]float32] + metaProvider text2vecbase.MetaProvider + graphqlProvider modulecapabilities.GraphQLArguments + searcher modulecapabilities.Searcher[[]float32] + nearTextTransformer modulecapabilities.TextTransform + logger logrus.FieldLogger + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +func (m *Model2VecModule) Name() string { + return Name +} + +func (m *Model2VecModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2Vec +} + +func (m *Model2VecModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, m.logger); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initAdditionalPropertiesProvider(); err != nil { + return errors.Wrap(err, "init additional properties provider") + } + + return nil +} + +func (m *Model2VecModule) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init graphql provider") + } + return nil +} + +func (m *Model2VecModule) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + waitForStartup := true + if envWaitForStartup := os.Getenv("MODEL2VEC_WAIT_FOR_STARTUP"); envWaitForStartup != "" { + waitForStartup = entcfg.Enabled(envWaitForStartup) + } + + url := os.Getenv("MODEL2VEC_INFERENCE_API") + + client := clients.New(url, timeout, logger) + if waitForStartup { + if err := client.WaitForStartup(ctx, 1*time.Second); err != nil { + return errors.Wrap(err, "init remote vectorizer") + } + } + + m.vectorizer = vectorizer.New(client) + m.metaProvider = client + + return nil +} + +func (m *Model2VecModule) initAdditionalPropertiesProvider() error { + m.additionalPropertiesProvider = additional.NewText2VecProvider() + return nil +} + +func (m *Model2VecModule) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + return m.vectorizer.Object(ctx, obj, cfg) +} + +// VectorizeBatch is _slower_ if many requests are done in parallel. So do all objects sequentially +func (m *Model2VecModule) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + vecs := make([][]float32, len(objs)) + addProps := make([]models.AdditionalProperties, len(objs)) + // error should be the exception so dont preallocate + errs := make(map[int]error, 0) + for i, obj := range objs { + if skipObject[i] { + continue + } + vec, addProp, err := m.vectorizer.Object(ctx, obj, cfg) + if err != nil { + errs[i] = err + continue + } + addProps[i] = addProp + vecs[i] = vec + } + + return vecs, addProps, errs +} + +func (m *Model2VecModule) MetaInfo() (map[string]interface{}, error) { + return m.metaProvider.MetaInfo() +} + +func (m *Model2VecModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +func (m *Model2VecModule) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + return m.vectorizer.Texts(ctx, []string{input}, cfg) +} + +func (m *Model2VecModule) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + return true, nil, nil +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.MetaProvider(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/nearText.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/nearText.go new file mode 100644 index 0000000000000000000000000000000000000000..15c5a161d97a99df226785c4cfae13d03eca1df5 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/nearText.go @@ -0,0 +1,36 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modtransformers + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +func (m *Model2VecModule) initNearText() error { + m.searcher = nearText.NewSearcher(m.vectorizer) + m.graphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *Model2VecModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return m.graphqlProvider.Arguments() +} + +func (m *Model2VecModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + return m.searcher.VectorSearches() +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/vectorizer/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/vectorizer/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..a7373b91d3059589cce2e18e6e8227e5c32ed7c3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/vectorizer/class_settings.go @@ -0,0 +1,44 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + DefaultPropertyIndexed = true + DefaultVectorizeClassName = false + DefaultVectorizePropertyName = false +) + +type classSettings struct { + *basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, BaseClassSettings: basesettings.NewBaseClassSettings(cfg, false)} +} + +func (ic *classSettings) InferenceURL() string { + return ic.BaseClassSettings.GetPropertyAsString("inferenceUrl", "") +} + +func (ic *classSettings) Validate(class *models.Class) error { + if err := ic.BaseClassSettings.Validate(class); err != nil { + return err + } + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/vectorizer/objects_texts.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/vectorizer/objects_texts.go new file mode 100644 index 0000000000000000000000000000000000000000..d5918c07c8e0650c82b3dfb99af204e5b8d2b4b7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-model2vec/vectorizer/objects_texts.go @@ -0,0 +1,88 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/transformers" + objectsvectorizer "github.com/weaviate/weaviate/usecases/modulecomponents/vectorizer" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +type Vectorizer struct { + client Client + objectVectorizer *objectsvectorizer.ObjectVectorizer +} + +func New(client Client) *Vectorizer { + return &Vectorizer{ + client: client, + objectVectorizer: objectsvectorizer.New(), + } +} + +type Client interface { + VectorizeObject(ctx context.Context, input string, + cfg transformers.VectorizationConfig) (*transformers.VectorizationResult, error) + VectorizeQuery(ctx context.Context, input string, + cfg transformers.VectorizationConfig) (*transformers.VectorizationResult, error) +} + +// IndexCheck returns whether a property of a class should be indexed +type ClassSettings interface { + PropertyIndexed(property string) bool + VectorizeClassName() bool + VectorizePropertyName(propertyName string) bool + PoolingStrategy() string +} + +func (v *Vectorizer) Object(ctx context.Context, object *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + vec, err := v.object(ctx, object, cfg) + return vec, nil, err +} + +func (v *Vectorizer) object(ctx context.Context, object *models.Object, cfg moduletools.ClassConfig, +) ([]float32, error) { + icheck := NewClassSettings(cfg) + text := v.objectVectorizer.Texts(ctx, object, icheck) + res, err := v.client.VectorizeObject(ctx, text, v.getVectorizationConfig(cfg)) + if err != nil { + return nil, err + } + + return res.Vector, nil +} + +func (v *Vectorizer) Texts(ctx context.Context, inputs []string, + cfg moduletools.ClassConfig, +) ([]float32, error) { + vectors := make([][]float32, len(inputs)) + for i := range inputs { + res, err := v.client.VectorizeQuery(ctx, inputs[i], v.getVectorizationConfig(cfg)) + if err != nil { + return nil, errors.Wrap(err, "remote client vectorize") + } + vectors[i] = res.Vector + } + + return libvectorizer.CombineVectors(vectors), nil +} + +func (v *Vectorizer) getVectorizationConfig(cfg moduletools.ClassConfig) transformers.VectorizationConfig { + settings := NewClassSettings(cfg) + return transformers.VectorizationConfig{InferenceURL: settings.InferenceURL()} +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-nvidia/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-nvidia/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..9b7694796fe9034a27b93f3a498242cf21e4fec0 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-nvidia/clients/meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *vectorizer) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "NVIDIA Module", + "documentationHref": "https://docs.api.nvidia.com/nim/reference/retrieval-apis", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-nvidia/clients/nvidia.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-nvidia/clients/nvidia.go new file mode 100644 index 0000000000000000000000000000000000000000..ddee08822f541510a6c751abf9bf8b193f9816b0 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-nvidia/clients/nvidia.go @@ -0,0 +1,70 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "time" + + "github.com/weaviate/weaviate/entities/moduletools" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/nvidia" + + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/text2vec-nvidia/ent" +) + +type vectorizer struct { + client *nvidia.Client + logger logrus.FieldLogger +} + +func New(apiKey string, timeout time.Duration, logger logrus.FieldLogger) *vectorizer { + return &vectorizer{ + client: nvidia.New(apiKey, timeout, logger), + logger: logger, + } +} + +func (v *vectorizer) Vectorize(ctx context.Context, input []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], *modulecomponents.RateLimits, int, error) { + settings := ent.NewClassSettings(cfg) + res, err := v.client.Vectorize(ctx, input, nvidia.Settings{ + Model: settings.Model(), + BaseURL: settings.BaseURL(), + Truncate: settings.Truncate(), + InputType: &nvidia.Passage, + }) + return res, nil, 0, err +} + +func (v *vectorizer) VectorizeQuery(ctx context.Context, input []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], error) { + settings := ent.NewClassSettings(cfg) + return v.client.Vectorize(ctx, input, nvidia.Settings{ + Model: settings.Model(), + BaseURL: settings.BaseURL(), + Truncate: settings.Truncate(), + InputType: &nvidia.Query, + }) +} + +func (v *vectorizer) GetApiKeyHash(ctx context.Context, config moduletools.ClassConfig) [32]byte { + return v.client.GetApiKeyHash(ctx, config) +} + +func (v *vectorizer) GetVectorizerRateLimit(ctx context.Context, cfg moduletools.ClassConfig) *modulecomponents.RateLimits { + return v.client.GetVectorizerRateLimit(ctx, cfg) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-nvidia/clients/nvidia_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-nvidia/clients/nvidia_test.go new file mode 100644 index 0000000000000000000000000000000000000000..8e36c68ecbb8cc058355ab8f6de11c755428f33f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-nvidia/clients/nvidia_test.go @@ -0,0 +1,255 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/modulecomponents" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/nvidia" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestClient(t *testing.T) { + t.Run("when all is fine", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + client: nvidia.New("apiKey", 1*time.Minute, nullLogger()), + logger: nullLogger(), + } + cfg := fakeClassConfig{classConfig: map[string]interface{}{"Model": "large", "baseURL": server.URL}} + expected := &modulecomponents.VectorizationResult[[]float32]{ + Text: []string{"This is my text"}, + Vector: [][]float32{{0.1, 0.2, 0.3}}, + Dimensions: 3, + } + res, _, _, err := c.Vectorize(context.Background(), []string{"This is my text"}, cfg) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when the context is expired", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + client: nvidia.New("apiKey", 1*time.Minute, nullLogger()), + logger: nullLogger(), + } + cfg := fakeClassConfig{classConfig: map[string]interface{}{"Model": "large", "baseURL": server.URL}} + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, _, _, err := c.Vectorize(ctx, []string{"This is my text"}, cfg) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "context deadline exceeded") + }) + + t.Run("when the server returns an error", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{ + t: t, + serverError: errors.Errorf("nope, not gonna happen"), + }) + defer server.Close() + cfg := fakeClassConfig{classConfig: map[string]interface{}{"Model": "large", "baseURL": server.URL}} + c := &vectorizer{ + client: nvidia.New("apiKey", 1*time.Minute, nullLogger()), + logger: nullLogger(), + } + _, _, _, err := c.Vectorize(context.Background(), []string{"This is my text"}, cfg) + + require.NotNil(t, err) + assert.Equal(t, err.Error(), "connection to NVIDIA API failed with status: 400 error: nope, not gonna happen") + }) + + t.Run("when Nvidia key is passed using X-Nvidia-Api-Key header", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + client: nvidia.New("apiKey", 1*time.Minute, nullLogger()), + logger: nullLogger(), + } + cfg := fakeClassConfig{classConfig: map[string]interface{}{"Model": "large", "baseURL": server.URL}} + + ctxWithValue := context.WithValue(context.Background(), + "X-Nvidia-Api-Key", []string{"some-key"}) + + expected := &modulecomponents.VectorizationResult[[]float32]{ + Text: []string{"This is my text"}, + Vector: [][]float32{{0.1, 0.2, 0.3}}, + Dimensions: 3, + } + res, _, _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, cfg) + + require.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when Nvidia key is empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + client: nvidia.New("", 1*time.Minute, nullLogger()), + logger: nullLogger(), + } + cfg := fakeClassConfig{classConfig: map[string]interface{}{"Model": "large", "baseURL": server.URL}} + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, _, _, err := c.Vectorize(ctx, []string{"This is my text"}, cfg) + + require.NotNil(t, err) + assert.Equal(t, err.Error(), "Nvidia API Key: no api key found "+ + "neither in request header: X-Nvidia-Api-Key "+ + "nor in environment variable under NVIDIA_APIKEY") + }) + + t.Run("when X-Nvidia-Api-Key header is passed but empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + client: nvidia.New("", 1*time.Minute, nullLogger()), + logger: nullLogger(), + } + cfg := fakeClassConfig{classConfig: map[string]interface{}{"Model": "large", "baseURL": server.URL}} + + ctxWithValue := context.WithValue(context.Background(), + "X-Nvidia-Api-Key", []string{""}) + + _, _, _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, cfg) + + require.NotNil(t, err) + assert.Equal(t, err.Error(), "Nvidia API Key: no api key found "+ + "neither in request header: X-Nvidia-Api-Key "+ + "nor in environment variable under NVIDIA_APIKEY") + }) +} + +type fakeHandler struct { + t *testing.T + serverError error +} + +func (f *fakeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.serverError != nil { + embeddingError := map[string]interface{}{ + "message": f.serverError.Error(), + } + embeddingResponse := map[string]interface{}{ + "message": embeddingError["message"], + } + outBytes, err := json.Marshal(embeddingResponse) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusBadRequest) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b map[string]interface{} + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + textInput := b["input"].([]interface{}) + assert.Greater(f.t, len(textInput), 0) + + embeddingResponse := map[string]interface{}{ + "object": "list", + "data": []interface{}{ + map[string]interface{}{ + "index": 0, + "object": "embedding", + "embedding": []float32{0.1, 0.2, 0.3}, + }, + }, + } + + outBytes, err := json.Marshal(embeddingResponse) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizePropertyName bool + skippedProperty string + excludedProperty string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-nvidia/config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-nvidia/config.go new file mode 100644 index 0000000000000000000000000000000000000000..d20249d57cd2bd26a7ee5ae7e435f974d3a6d43d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-nvidia/config.go @@ -0,0 +1,50 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modnvidia + +import ( + "context" + + "github.com/weaviate/weaviate/modules/text2vec-nvidia/ent" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *NvidiaModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{ + "vectorizeClassName": ent.DefaultVectorizeClassName, + "model": ent.DefaultNvidiaModel, + "truncate": ent.DefaultTruncate, + "baseUrl": ent.DefaultBaseURL, + } +} + +func (m *NvidiaModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{ + "skip": !ent.DefaultPropertyIndexed, + "vectorizePropertyName": ent.DefaultVectorizePropertyName, + } +} + +func (m *NvidiaModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := ent.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-nvidia/ent/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-nvidia/ent/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..9ffd91c7326c3b86c7c8f09c5526cf4326cf88f4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-nvidia/ent/class_settings.go @@ -0,0 +1,66 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + DefaultBaseURL = "https://integrate.api.nvidia.com" + DefaultNvidiaModel = "nvidia/nv-embed-v1" + DefaultTruncate = "NONE" + DefaultVectorizeClassName = false + DefaultPropertyIndexed = true + DefaultVectorizePropertyName = false + LowerCaseInput = false +) + +var availableTruncates = []string{"NONE", "START", "END"} + +type classSettings struct { + basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, BaseClassSettings: *basesettings.NewBaseClassSettings(cfg, LowerCaseInput)} +} + +func (cs *classSettings) BaseURL() string { + return cs.BaseClassSettings.GetPropertyAsString("baseURL", DefaultBaseURL) +} + +func (cs *classSettings) Model() string { + return cs.BaseClassSettings.GetPropertyAsString("model", DefaultNvidiaModel) +} + +func (cs *classSettings) Truncate() *string { + truncate := cs.BaseClassSettings.GetPropertyAsString("truncate", DefaultTruncate) + return &truncate +} + +func (cs *classSettings) Validate(class *models.Class) error { + if err := cs.BaseClassSettings.Validate(class); err != nil { + return err + } + + truncate := cs.Truncate() + if truncate != nil && !basesettings.ValidateSetting[string](*truncate, availableTruncates) { + return errors.Errorf("wrong truncate type, available types are: %v", availableTruncates) + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-nvidia/module.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-nvidia/module.go new file mode 100644 index 0000000000000000000000000000000000000000..fe08a6c81b8a51f9af7dc6572718ae61e70291b7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-nvidia/module.go @@ -0,0 +1,159 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modnvidia + +import ( + "context" + "os" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + + "github.com/weaviate/weaviate/modules/text2vec-nvidia/ent" + + "github.com/weaviate/weaviate/usecases/modulecomponents/text2vecbase" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/text2vec-nvidia/clients" + "github.com/weaviate/weaviate/usecases/modulecomponents/additional" +) + +const Name = "text2vec-nvidia" + +var batchSettings = batch.Settings{ + TokenMultiplier: 0, + MaxObjectsPerBatch: 10, + MaxTokensPerBatch: func(cfg moduletools.ClassConfig) int { return 2500 }, + MaxTimePerBatch: float64(10), + HasTokenLimit: false, + ReturnsRateLimit: false, +} + +func New() *NvidiaModule { + return &NvidiaModule{} +} + +type NvidiaModule struct { + vectorizer text2vecbase.TextVectorizerBatch[[]float32] + metaProvider text2vecbase.MetaProvider + graphqlProvider modulecapabilities.GraphQLArguments + searcher modulecapabilities.Searcher[[]float32] + nearTextTransformer modulecapabilities.TextTransform + logger logrus.FieldLogger + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +func (m *NvidiaModule) Name() string { + return Name +} + +func (m *NvidiaModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2ManyVec +} + +func (m *NvidiaModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, m.logger); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initAdditionalPropertiesProvider(); err != nil { + return errors.Wrap(err, "init additional properties provider") + } + + return nil +} + +func (m *NvidiaModule) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init graphql provider") + } + return nil +} + +func (m *NvidiaModule) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("NVIDIA_APIKEY") + client := clients.New(apiKey, timeout, logger) + + m.vectorizer = text2vecbase.New(client, + batch.NewBatchVectorizer(client, 50*time.Second, batchSettings, logger, m.Name()), + batch.ReturnBatchTokenizer(batchSettings.TokenMultiplier, m.Name(), ent.LowerCaseInput), + ) + m.metaProvider = client + + return nil +} + +func (m *NvidiaModule) initAdditionalPropertiesProvider() error { + m.additionalPropertiesProvider = additional.NewText2VecProvider() + return nil +} + +func (m *NvidiaModule) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + return m.vectorizer.Object(ctx, obj, cfg, ent.NewClassSettings(cfg)) +} + +func (m *NvidiaModule) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + vecs, errs := m.vectorizer.ObjectBatch(ctx, objs, skipObject, cfg) + + return vecs, nil, errs +} + +func (m *NvidiaModule) MetaInfo() (map[string]interface{}, error) { + return m.metaProvider.MetaInfo() +} + +func (m *NvidiaModule) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + return true, nil, nil +} + +func (m *NvidiaModule) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + return m.vectorizer.Texts(ctx, []string{input}, cfg) +} + +func (m *NvidiaModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.Searcher[[]float32](New()) + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.InputVectorizer[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-nvidia/nearText.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-nvidia/nearText.go new file mode 100644 index 0000000000000000000000000000000000000000..57c9eb6a678f7a7fdd4fcab83b379838a887aefa --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-nvidia/nearText.go @@ -0,0 +1,36 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modnvidia + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +func (m *NvidiaModule) initNearText() error { + m.searcher = nearText.NewSearcher(m.vectorizer) + m.graphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *NvidiaModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return m.graphqlProvider.Arguments() +} + +func (m *NvidiaModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + return m.searcher.VectorSearches() +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-octoai/clients/octoai.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-octoai/clients/octoai.go new file mode 100644 index 0000000000000000000000000000000000000000..90e39e9f60068d2e0340342367183153a2339b0e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-octoai/clients/octoai.go @@ -0,0 +1,57 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "errors" + "time" + + "github.com/weaviate/weaviate/entities/moduletools" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/sirupsen/logrus" +) + +type vectorizer struct{} + +func New(apiKey string, timeout time.Duration, logger logrus.FieldLogger) *vectorizer { + return &vectorizer{} +} + +func (v *vectorizer) Vectorize(ctx context.Context, input []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], *modulecomponents.RateLimits, int, error) { + return nil, nil, 0, errors.New("OctoAI is permanently shut down") +} + +func (v *vectorizer) VectorizeQuery(ctx context.Context, input []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], error) { + return nil, errors.New("OctoAI is permanently shut down") +} + +func (v *vectorizer) GetApiKeyHash(ctx context.Context, config moduletools.ClassConfig) [32]byte { + return [32]byte{} +} + +func (v *vectorizer) GetVectorizerRateLimit(ctx context.Context, cfg moduletools.ClassConfig) *modulecomponents.RateLimits { + return nil +} + +func (v *vectorizer) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "OctoAI Module (deprecated)", + "documentationHref": "https://octo.ai/docs/text-gen-solution/getting-started", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-octoai/config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-octoai/config.go new file mode 100644 index 0000000000000000000000000000000000000000..17d9c45ee5601855de02f7f776e1e75d24627f47 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-octoai/config.go @@ -0,0 +1,48 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modoctoai + +import ( + "context" + + "github.com/weaviate/weaviate/modules/text2vec-octoai/ent" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *OctoAIModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{ + "vectorizeClassName": ent.DefaultVectorizeClassName, + "model": ent.DefaultOctoAIModel, + } +} + +func (m *OctoAIModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{ + "skip": !ent.DefaultPropertyIndexed, + "vectorizePropertyName": ent.DefaultVectorizePropertyName, + } +} + +func (m *OctoAIModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := ent.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-octoai/ent/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-octoai/ent/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..d763947726109525f3968d262551a1e7aa554707 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-octoai/ent/class_settings.go @@ -0,0 +1,48 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + DefaultOctoAIDocumentType = "text" + DefaultOctoAIModel = "thenlper/gte-large" + DefaultVectorizeClassName = true + DefaultPropertyIndexed = true + DefaultVectorizePropertyName = false + DefaultBaseURL = "https://text.octoai.run" +) + +type classSettings struct { + basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, BaseClassSettings: *basesettings.NewBaseClassSettings(cfg, false)} +} + +func (cs *classSettings) Model() string { + return cs.BaseClassSettings.GetPropertyAsString("model", DefaultOctoAIModel) +} + +func (cs *classSettings) BaseURL() string { + return cs.BaseClassSettings.GetPropertyAsString("baseURL", DefaultBaseURL) +} + +func (cs *classSettings) Validate(class *models.Class) error { + return cs.BaseClassSettings.Validate(class) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-octoai/ent/vectorization_config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-octoai/ent/vectorization_config.go new file mode 100644 index 0000000000000000000000000000000000000000..9ca94386dfb28c945c6a2bac9df68e89b0b3e8ac --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-octoai/ent/vectorization_config.go @@ -0,0 +1,17 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type VectorizationConfig struct { + Model string + BaseURL string +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-octoai/module.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-octoai/module.go new file mode 100644 index 0000000000000000000000000000000000000000..a8ad0b504f440139d14b14f3962fe17f5d1bae0c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-octoai/module.go @@ -0,0 +1,149 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modoctoai + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/text2vec-octoai/clients" + "github.com/weaviate/weaviate/usecases/modulecomponents/additional" + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + "github.com/weaviate/weaviate/usecases/modulecomponents/text2vecbase" +) + +const Name = "text2vec-octoai" + +func New() *OctoAIModule { + return &OctoAIModule{} +} + +type OctoAIModule struct { + vectorizer text2vecbase.TextVectorizerBatch[[]float32] + metaProvider text2vecbase.MetaProvider + graphqlProvider modulecapabilities.GraphQLArguments + searcher modulecapabilities.Searcher[[]float32] + nearTextTransformer modulecapabilities.TextTransform + logger logrus.FieldLogger + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +func (m *OctoAIModule) Name() string { + return Name +} + +func (m *OctoAIModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2ManyVec +} + +func (m *OctoAIModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, m.logger); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initAdditionalPropertiesProvider(); err != nil { + return errors.Wrap(err, "init additional properties provider") + } + + return nil +} + +func (m *OctoAIModule) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init graphql provider") + } + return nil +} + +func (m *OctoAIModule) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + octoAIApiKey := os.Getenv("OCTOAI_APIKEY") + + client := clients.New(octoAIApiKey, timeout, logger) + + m.vectorizer = text2vecbase.New(client, + batch.NewBatchVectorizer(client, 50*time.Second, batch.Settings{}, logger, m.Name()), + batch.ReturnBatchTokenizer(0, m.Name(), false), + ) + m.metaProvider = client + + return nil +} + +func (m *OctoAIModule) initAdditionalPropertiesProvider() error { + m.additionalPropertiesProvider = additional.NewText2VecProvider() + return nil +} + +func (m *OctoAIModule) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + return nil, nil, errors.New("OctoAI is permanently shut down") +} + +func (m *OctoAIModule) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + return true, nil, nil +} + +func (m *OctoAIModule) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + errs := make(map[int]error) + for i := range objs { + errs[i] = errors.New("OctoAI is permanently shut down") + } + return nil, nil, errs +} + +func (m *OctoAIModule) MetaInfo() (map[string]interface{}, error) { + return m.metaProvider.MetaInfo() +} + +func (m *OctoAIModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +func (m *OctoAIModule) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + return nil, errors.New("OctoAI is permanently shut down") +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.Searcher[[]float32](New()) + _ = modulecapabilities.GraphQLArguments(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-octoai/nearText.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-octoai/nearText.go new file mode 100644 index 0000000000000000000000000000000000000000..c14c6aa6cf7bf9d5c1545031157c3dad3283faa2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-octoai/nearText.go @@ -0,0 +1,36 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modoctoai + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +func (m *OctoAIModule) initNearText() error { + m.searcher = nearText.NewSearcher(m.vectorizer) + m.graphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *OctoAIModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return m.graphqlProvider.Arguments() +} + +func (m *OctoAIModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + return m.searcher.VectorSearches() +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..5a491351115b574f0134537279b5598709cb0058 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/clients/meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *ollama) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Ollama Module", + "documentationHref": "https://github.com/ollama/ollama/blob/main/docs/api.md#generate-embeddings", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/clients/ollama.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/clients/ollama.go new file mode 100644 index 0000000000000000000000000000000000000000..ad84b83a1da072831da038aa3da836d08b659339 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/clients/ollama.go @@ -0,0 +1,151 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "crypto/sha256" + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/moduletools" + + "github.com/weaviate/weaviate/modules/text2vec-ollama/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents" +) + +func buildURL(apiEndpoint string) string { + return fmt.Sprintf("%s/api/embed", apiEndpoint) +} + +type ollama struct { + httpClient *http.Client + urlBuilderFn func(apiEndpoint string) string + logger logrus.FieldLogger +} + +func New(timeout time.Duration, logger logrus.FieldLogger) *ollama { + return &ollama{ + httpClient: &http.Client{ + Timeout: timeout, + }, + urlBuilderFn: buildURL, + logger: logger, + } +} + +func (v *ollama) Vectorize(ctx context.Context, input []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], *modulecomponents.RateLimits, int, error) { + res, err := v.vectorize(ctx, input, cfg) + return res, nil, 0, err +} + +func (v *ollama) VectorizeQuery(ctx context.Context, input []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], error) { + return v.vectorize(ctx, input, cfg) +} + +func (v *ollama) GetApiKeyHash(ctx context.Context, config moduletools.ClassConfig) [32]byte { + return sha256.Sum256([]byte("ollama")) +} + +func (v *ollama) GetVectorizerRateLimit(ctx context.Context, cfg moduletools.ClassConfig) *modulecomponents.RateLimits { + return &modulecomponents.RateLimits{ + LimitRequests: 100, + LimitTokens: 1000000, + RemainingRequests: 100, + RemainingTokens: 1000000, + ResetRequests: time.Now(), + ResetTokens: time.Now(), + AfterRequestFunction: func(limits *modulecomponents.RateLimits, tokensUsed int, deductRequest bool) {}, + } +} + +func (v *ollama) vectorize(ctx context.Context, input []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], error) { + settings := ent.NewClassSettings(cfg) + body, err := json.Marshal(embeddingsRequest{ + Model: settings.Model(), + Input: input, + }) + if err != nil { + return nil, errors.Wrapf(err, "marshal body") + } + + endpointURL := v.urlBuilderFn(settings.ApiEndpoint()) + + req, err := http.NewRequestWithContext(ctx, "POST", endpointURL, + bytes.NewReader(body)) + if err != nil { + return nil, errors.Wrap(err, "create POST request") + } + + res, err := v.httpClient.Do(req) + if err != nil { + return nil, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "read response body") + } + + return v.parseEmbeddingsResponse(res.StatusCode, bodyBytes, input) +} + +func (v *ollama) parseEmbeddingsResponse(statusCode int, + bodyBytes []byte, input []string, +) (*modulecomponents.VectorizationResult[[]float32], error) { + var resBody embeddingsResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, errors.Wrapf(err, "unmarshal response body. Got: %v", string(bodyBytes)) + } + + if resBody.Error != "" { + return nil, errors.Errorf("connection to Ollama API failed with error: %s", resBody.Error) + } + + if statusCode != 200 { + return nil, errors.Errorf("connection to Ollama API failed with status: %d", statusCode) + } + + if len(resBody.Embeddings) == 0 { + return nil, errors.Errorf("empty embeddings response") + } + + return &modulecomponents.VectorizationResult[[]float32]{ + Text: input, + Vector: resBody.Embeddings, + Dimensions: len(resBody.Embeddings[0]), + }, nil +} + +type embeddingsRequest struct { + Model string `json:"model"` + Input []string `json:"input"` +} + +type embeddingsResponse struct { + Model string `json:"model"` + Embeddings [][]float32 `json:"embeddings,omitempty"` + Error string `json:"error,omitempty"` +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/clients/ollama_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/clients/ollama_test.go new file mode 100644 index 0000000000000000000000000000000000000000..59776fc96ca13877ca01363831de4834098da13a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/clients/ollama_test.go @@ -0,0 +1,213 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/modulecomponents" +) + +func TestClient(t *testing.T) { + t.Run("when all is fine", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &ollama{ + httpClient: &http.Client{}, + urlBuilderFn: func(apiEndpoint string) string { + assert.Equal(t, "endpoint", apiEndpoint) + return server.URL + }, + logger: nullLogger(), + } + expected := &modulecomponents.VectorizationResult[[]float32]{ + Text: []string{"This is my text"}, + Vector: [][]float32{{0.1, 0.2, 0.3}}, + Dimensions: 3, + } + cfg := fakeClassConfig{ + classConfig: map[string]interface{}{ + "apiEndpoint": "endpoint", + "model": "future-text-embed", + }, + } + res, _, _, err := c.Vectorize(context.Background(), []string{"This is my text"}, cfg) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when the context is expired", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &ollama{ + httpClient: &http.Client{}, + urlBuilderFn: func(apiEndpoint string) string { + return server.URL + }, + logger: nullLogger(), + } + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, _, _, err := c.Vectorize(ctx, []string{"This is my text"}, fakeClassConfig{}) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "context deadline exceeded") + }) + + t.Run("when the server returns an error", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{ + t: t, + serverError: errors.Errorf("nope, not gonna happen"), + }) + defer server.Close() + c := &ollama{ + httpClient: &http.Client{}, + urlBuilderFn: func(apiEndpoint string) string { + return server.URL + }, + logger: nullLogger(), + } + _, _, _, err := c.Vectorize(context.Background(), []string{"This is my text"}, fakeClassConfig{}) + + require.NotNil(t, err) + assert.EqualError(t, err, "connection to Ollama API failed with error: nope, not gonna happen") + }) +} + +type fakeHandler struct { + t *testing.T + serverError error +} + +func (f *fakeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.serverError != nil { + embeddingResponse := &embeddingsResponse{ + Error: f.serverError.Error(), + } + + outBytes, err := json.Marshal(embeddingResponse) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var req embeddingsRequest + require.Nil(f.t, json.Unmarshal(bodyBytes, &req)) + + require.NotNil(f.t, req) + + embeddingResponse := &embeddingsResponse{ + Embeddings: [][]float32{{0.1, 0.2, 0.3}}, + } + + outBytes, err := json.Marshal(embeddingResponse) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizeClassName bool + vectorizePropertyName bool + skippedProperty string + excludedProperty string + apiEndpoint string + modelID string + properties interface{} +} + +func (f fakeClassConfig) Class() map[string]interface{} { + classSettings := map[string]interface{}{ + "vectorizeClassName": f.vectorizeClassName, + } + if f.apiEndpoint != "" { + classSettings["apiEndpoint"] = f.apiEndpoint + } + if f.modelID != "" { + classSettings["modelID"] = f.modelID + } + if f.properties != nil { + classSettings["properties"] = f.properties + } + for k, v := range f.classConfig { + classSettings[k] = v + } + return classSettings +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.Class() +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/config.go new file mode 100644 index 0000000000000000000000000000000000000000..a50502b9b2d3899fa74ca56ac88269428ff6a10b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/config.go @@ -0,0 +1,47 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modollama + +import ( + "context" + + "github.com/weaviate/weaviate/modules/text2vec-ollama/ent" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *OllamaModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{ + "vectorizeClassName": ent.DefaultVectorizeClassName, + } +} + +func (m *OllamaModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{ + "skip": !ent.DefaultPropertyIndexed, + "vectorizePropertyName": ent.DefaultVectorizePropertyName, + } +} + +func (m *OllamaModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := ent.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/ent/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/ent/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..b451c0b5a224681d6eb718df4a05d47fd335c23f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/ent/class_settings.go @@ -0,0 +1,68 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "github.com/pkg/errors" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + apiEndpointProperty = "apiEndpoint" + modelProperty = "model" +) + +const ( + DefaultVectorizeClassName = false + DefaultPropertyIndexed = true + DefaultVectorizePropertyName = false + DefaultApiEndpoint = "http://localhost:11434" + DefaultModel = "nomic-embed-text" + LowerCaseInput = false +) + +type classSettings struct { + basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, BaseClassSettings: *basesettings.NewBaseClassSettings(cfg, LowerCaseInput)} +} + +func (ic *classSettings) Validate(class *models.Class) error { + if err := ic.BaseClassSettings.Validate(class); err != nil { + return err + } + if ic.ApiEndpoint() == "" { + return errors.New("apiEndpoint cannot be empty") + } + if ic.Model() == "" { + return errors.New("model cannot be empty") + } + return nil +} + +func (ic *classSettings) getStringProperty(name, defaultValue string) string { + return ic.BaseClassSettings.GetPropertyAsString(name, defaultValue) +} + +func (ic *classSettings) ApiEndpoint() string { + return ic.getStringProperty(apiEndpointProperty, DefaultApiEndpoint) +} + +func (ic *classSettings) Model() string { + return ic.getStringProperty(modelProperty, DefaultModel) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/ent/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/ent/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ddff65f8696a54c9e478eff8c4899ac867695327 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/ent/class_settings_test.go @@ -0,0 +1,161 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "testing" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/moduletools" +) + +func Test_classSettings_Validate(t *testing.T) { + tests := []struct { + name string + cfg moduletools.ClassConfig + wantApiEndpoint string + wantModel string + wantErr error + }{ + { + name: "happy flow", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{}, + }, + wantApiEndpoint: "http://localhost:11434", + wantModel: "nomic-embed-text", + wantErr: nil, + }, + { + name: "custom values", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "apiEndpoint": "https://localhost:11434", + "model": "future-text-embed", + }, + }, + wantApiEndpoint: "https://localhost:11434", + wantModel: "future-text-embed", + wantErr: nil, + }, + { + name: "empty endpoint", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "apiEndpoint": "", + "model": "test", + }, + }, + wantErr: errors.Errorf("apiEndpoint cannot be empty"), + }, + { + name: "empty model", + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "apiEndpoint": "http://localhost:8080", + "model": "", + }, + }, + wantErr: errors.Errorf("model cannot be empty"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ic := NewClassSettings(tt.cfg) + if tt.wantErr != nil { + assert.EqualError(t, ic.Validate(&models.Class{Class: "Test", Properties: []*models.Property{ + { + Name: "test", + DataType: []string{schema.DataTypeText.String()}, + }, + }}), tt.wantErr.Error()) + } else { + assert.Equal(t, tt.wantApiEndpoint, ic.ApiEndpoint()) + assert.Equal(t, tt.wantModel, ic.Model()) + } + }) + } +} + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizeClassName bool + vectorizePropertyName bool + skippedProperty string + excludedProperty string + apiEndpoint string + modelID string + properties interface{} +} + +func (f fakeClassConfig) Class() map[string]interface{} { + classSettings := map[string]interface{}{ + "vectorizeClassName": f.vectorizeClassName, + } + if f.apiEndpoint != "" { + classSettings["apiEndpoint"] = f.apiEndpoint + } + if f.modelID != "" { + classSettings["modelID"] = f.modelID + } + if f.properties != nil { + classSettings["properties"] = f.properties + } + for k, v := range f.classConfig { + classSettings[k] = v + } + return classSettings +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.Class() +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/module.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/module.go new file mode 100644 index 0000000000000000000000000000000000000000..0c31c0fc88e52d44b6ecac42e6b50556d95b4686 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/module.go @@ -0,0 +1,153 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modollama + +import ( + "context" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/text2vec-ollama/clients" + "github.com/weaviate/weaviate/modules/text2vec-ollama/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents/additional" + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + "github.com/weaviate/weaviate/usecases/modulecomponents/text2vecbase" +) + +const Name = "text2vec-ollama" + +var batchSettings = batch.Settings{ + TokenMultiplier: 0, + MaxObjectsPerBatch: 10, + MaxTokensPerBatch: func(cfg moduletools.ClassConfig) int { return 2500 }, + MaxTimePerBatch: float64(10), + HasTokenLimit: false, + ReturnsRateLimit: false, +} + +func New() *OllamaModule { + return &OllamaModule{} +} + +type OllamaModule struct { + vectorizer text2vecbase.TextVectorizerBatch[[]float32] + metaProvider text2vecbase.MetaProvider + graphqlProvider modulecapabilities.GraphQLArguments + searcher modulecapabilities.Searcher[[]float32] + nearTextTransformer modulecapabilities.TextTransform + logger logrus.FieldLogger + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +func (m *OllamaModule) Name() string { + return Name +} + +func (m *OllamaModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2Vec +} + +func (m *OllamaModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, m.logger); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initAdditionalPropertiesProvider(); err != nil { + return errors.Wrap(err, "init additional properties provider") + } + + return nil +} + +func (m *OllamaModule) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init graphql provider") + } + return nil +} + +func (m *OllamaModule) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + client := clients.New(timeout, logger) + + m.vectorizer = text2vecbase.New(client, + batch.NewBatchVectorizer(client, 50*time.Second, batchSettings, logger, m.Name()), + batch.ReturnBatchTokenizer(batchSettings.TokenMultiplier, m.Name(), ent.LowerCaseInput), + ) + m.metaProvider = client + + return nil +} + +func (m *OllamaModule) initAdditionalPropertiesProvider() error { + m.additionalPropertiesProvider = additional.NewText2VecProvider() + return nil +} + +func (m *OllamaModule) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + return m.vectorizer.Object(ctx, obj, cfg, ent.NewClassSettings(cfg)) +} + +func (m *OllamaModule) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + vecs, errs := m.vectorizer.ObjectBatch(ctx, objs, skipObject, cfg) + return vecs, nil, errs +} + +func (m *OllamaModule) MetaInfo() (map[string]interface{}, error) { + return m.metaProvider.MetaInfo() +} + +func (m *OllamaModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +func (m *OllamaModule) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + return m.vectorizer.Texts(ctx, []string{input}, cfg) +} + +func (m *OllamaModule) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + return true, nil, nil +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.Searcher[[]float32](New()) + _ = modulecapabilities.GraphQLArguments(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/nearText.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/nearText.go new file mode 100644 index 0000000000000000000000000000000000000000..b70c7274b30a2403952591f5a207f3d5172b1e6c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-ollama/nearText.go @@ -0,0 +1,36 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modollama + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +func (m *OllamaModule) initNearText() error { + m.searcher = nearText.NewSearcher(m.vectorizer) + m.graphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *OllamaModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return m.graphqlProvider.Arguments() +} + +func (m *OllamaModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + return m.searcher.VectorSearches() +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/clients/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/clients/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..66b3424d989ced1b66c11b705780a757ec395d48 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/clients/fakes_for_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizePropertyName bool + skippedProperty string + excludedProperty string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..7e1607b06c9811a788aeeb387592a79396f2dba7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/clients/meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *client) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "OpenAI Module", + "documentationHref": "https://platform.openai.com/docs/guides/embeddings/what-are-embeddings", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/clients/openai.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/clients/openai.go new file mode 100644 index 0000000000000000000000000000000000000000..452e92c75787fc5dc439bc91f32c883c54ee03e0 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/clients/openai.go @@ -0,0 +1,371 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "crypto/sha256" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "strconv" + "time" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/usecases/logrusext" + "github.com/weaviate/weaviate/usecases/monitoring" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/text2vec-openai/ent" +) + +type embeddingsRequest struct { + Input []string `json:"input"` + Model string `json:"model,omitempty"` + Dimensions *int64 `json:"dimensions,omitempty"` +} + +type embedding struct { + Object string `json:"object"` + Data []embeddingData `json:"data,omitempty"` + Error *openAIApiError `json:"error,omitempty"` + Usage *modulecomponents.Usage `json:"usage,omitempty"` +} + +type embeddingData struct { + Object string `json:"object"` + Index int `json:"index"` + Embedding []float32 `json:"embedding"` + Error *openAIApiError `json:"error,omitempty"` +} + +type openAIApiError struct { + Message string `json:"message"` + Type string `json:"type"` + Param string `json:"param"` + Code openAICode `json:"code"` +} + +type openAICode string + +func (c *openAICode) String() string { + if c == nil { + return "" + } + return string(*c) +} + +func (c *openAICode) UnmarshalJSON(data []byte) (err error) { + if number, err := strconv.Atoi(string(data)); err == nil { + str := strconv.Itoa(number) + *c = openAICode(str) + return nil + } + var str string + err = json.Unmarshal(data, &str) + if err != nil { + return err + } + *c = openAICode(str) + return nil +} + +func buildUrl(baseURL, resourceName, deploymentID, apiVersion string, isAzure bool) (string, error) { + if isAzure { + host := baseURL + if host == "" || host == "https://api.openai.com" { + // Fall back to old assumption + host = "https://" + resourceName + ".openai.azure.com" + } + + path := "openai/deployments/" + deploymentID + "/embeddings" + queryParam := fmt.Sprintf("api-version=%s", apiVersion) + return fmt.Sprintf("%s/%s?%s", host, path, queryParam), nil + } + + host := baseURL + path := "/v1/embeddings" + return url.JoinPath(host, path) +} + +type client struct { + openAIApiKey string + openAIOrganization string + azureApiKey string + httpClient *http.Client + buildUrlFn func(baseURL, resourceName, deploymentID, apiVersion string, isAzure bool) (string, error) + logger logrus.FieldLogger + sampledLogger *logrusext.Sampler +} + +func New(openAIApiKey, openAIOrganization, azureApiKey string, timeout time.Duration, logger logrus.FieldLogger) *client { + return &client{ + openAIApiKey: openAIApiKey, + openAIOrganization: openAIOrganization, + azureApiKey: azureApiKey, + httpClient: &http.Client{ + Timeout: timeout, + }, + buildUrlFn: buildUrl, + logger: logger, + sampledLogger: logrusext.NewSampler(logger, 5, time.Minute), + } +} + +func (v *client) Vectorize(ctx context.Context, input []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], *modulecomponents.RateLimits, int, error) { + config := v.getVectorizationConfig(cfg, "document") + res, rateLimits, totalTokens, err := v.vectorize(ctx, input, config.ModelString, config) + if err != nil { + monitoring.GetMetrics().ModuleCallError.WithLabelValues("openai", "-", "-").Inc() + } + return res, rateLimits, totalTokens, err +} + +func (v *client) VectorizeQuery(ctx context.Context, input []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], error) { + config := v.getVectorizationConfig(cfg, "query") + res, _, _, err := v.vectorize(ctx, input, config.ModelString, config) + if err != nil { + monitoring.GetMetrics().ModuleExternalError.WithLabelValues("text2vec", "openai", "-", "-").Inc() + } + return res, err +} + +func (v *client) vectorize(ctx context.Context, input []string, model string, config ent.VectorizationConfig) (*modulecomponents.VectorizationResult[[]float32], *modulecomponents.RateLimits, int, error) { + metrics := monitoring.GetMetrics() + startTime := time.Now() + metrics.ModuleExternalRequests.WithLabelValues("text2vec", "openai").Inc() + + body, err := json.Marshal(v.getEmbeddingsRequest(input, model, config.IsAzure, config.Dimensions)) + if err != nil { + return nil, nil, 0, errors.Wrap(err, "marshal body") + } + + endpoint, err := v.buildURL(ctx, config) + if err != nil { + return nil, nil, 0, errors.Wrap(err, "join OpenAI API host and path") + } + + defer func() { + monitoring.GetMetrics().ModuleExternalRequestDuration.WithLabelValues("openai", endpoint).Observe(time.Since(startTime).Seconds()) + }() + + req, err := http.NewRequestWithContext(ctx, "POST", endpoint, + bytes.NewReader(body)) + if err != nil { + return nil, nil, 0, errors.Wrap(err, "create POST request") + } + apiKey, err := v.getApiKey(ctx, config.IsAzure) + if err != nil { + return nil, nil, 0, errors.Wrap(err, "API Key") + } + req.Header.Add(v.getApiKeyHeaderAndValue(apiKey, config.IsAzure)) + if openAIOrganization := v.getOpenAIOrganization(ctx); openAIOrganization != "" { + req.Header.Add("OpenAI-Organization", openAIOrganization) + } + req.Header.Add("Content-Type", "application/json") + + metrics.ModuleExternalRequestSingleCount.WithLabelValues("text2vec", endpoint).Inc() + + metrics.ModuleExternalRequestSize.WithLabelValues("text2vec", endpoint).Observe(float64(len(body))) + + res, err := v.httpClient.Do(req) + if res != nil { + vrst := monitoring.GetMetrics().ModuleExternalResponseStatus + vrst.WithLabelValues("text2vec", endpoint, fmt.Sprintf("%v", res.StatusCode)).Inc() + } + if err != nil { + metrics.ModuleCallError.WithLabelValues("openai", endpoint, fmt.Sprintf("%v", err)).Inc() + return nil, nil, 0, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + + requestID := res.Header.Get("x-request-id") + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, nil, 0, errors.Wrap(err, "read response body") + } + + vrs := metrics.ModuleExternalResponseSize + vrs.WithLabelValues("text2vec", endpoint).Observe(float64(len(bodyBytes))) + + var resBody embedding + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, nil, 0, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if res.StatusCode != 200 || resBody.Error != nil { + return nil, nil, 0, v.getError(res.StatusCode, requestID, resBody.Error, config.IsAzure) + } + rateLimit := ent.GetRateLimitsFromHeader(v.sampledLogger, res.Header, config.IsAzure) + + texts := make([]string, len(resBody.Data)) + embeddings := make([][]float32, len(resBody.Data)) + openAIerror := make([]error, len(resBody.Data)) + for i := range resBody.Data { + texts[i] = resBody.Data[i].Object + embeddings[i] = resBody.Data[i].Embedding + if resBody.Data[i].Error != nil { + openAIerror[i] = v.getError(res.StatusCode, requestID, resBody.Data[i].Error, config.IsAzure) + } + if resBody.Usage != nil { + vrt := metrics.VectorizerRequestTokens + vrt.WithLabelValues("input", endpoint).Observe(float64(resBody.Usage.PromptTokens)) + vrt.WithLabelValues("output", endpoint).Observe(float64(resBody.Usage.CompletionTokens)) + } + } + + if len(resBody.Data) == 0 { + return nil, nil, 0, errors.New("no data returned from OpenAI API") + } + + return &modulecomponents.VectorizationResult[[]float32]{ + Text: texts, + Dimensions: len(resBody.Data[0].Embedding), + Vector: embeddings, + Errors: openAIerror, + }, rateLimit, modulecomponents.GetTotalTokens(resBody.Usage), nil +} + +func (v *client) buildURL(ctx context.Context, config ent.VectorizationConfig) (string, error) { + baseURL, resourceName, deploymentID, apiVersion, isAzure := config.BaseURL, config.ResourceName, config.DeploymentID, config.ApiVersion, config.IsAzure + + if headerBaseURL := modulecomponents.GetValueFromContext(ctx, "X-Openai-Baseurl"); headerBaseURL != "" { + baseURL = headerBaseURL + } + + if headerDeploymentID := modulecomponents.GetValueFromContext(ctx, "X-Azure-Deployment-Id"); headerDeploymentID != "" { + deploymentID = headerDeploymentID + } + + if headerResourceName := modulecomponents.GetValueFromContext(ctx, "X-Azure-Resource-Name"); headerResourceName != "" { + resourceName = headerResourceName + } + + return v.buildUrlFn(baseURL, resourceName, deploymentID, apiVersion, isAzure) +} + +func (v *client) getError(statusCode int, requestID string, resBodyError *openAIApiError, isAzure bool) error { + endpoint := "OpenAI API" + if isAzure { + endpoint = "Azure OpenAI API" + } + errorMsg := fmt.Sprintf("connection to: %s failed with status: %d", endpoint, statusCode) + if requestID != "" { + errorMsg = fmt.Sprintf("%s request-id: %s", errorMsg, requestID) + } + if resBodyError != nil { + errorMsg = fmt.Sprintf("%s error: %v", errorMsg, resBodyError.Message) + } + monitoring.GetMetrics().ModuleExternalError.WithLabelValues("text2vec", endpoint, errorMsg, fmt.Sprintf("%v", statusCode)).Inc() + return errors.New(errorMsg) +} + +func (v *client) getEmbeddingsRequest(input []string, model string, isAzure bool, dimensions *int64) embeddingsRequest { + if isAzure { + return embeddingsRequest{Input: input, Dimensions: dimensions} + } + return embeddingsRequest{Input: input, Model: model, Dimensions: dimensions} +} + +func (v *client) getApiKeyHeaderAndValue(apiKey string, isAzure bool) (string, string) { + if isAzure { + return "api-key", apiKey + } + return "Authorization", fmt.Sprintf("Bearer %s", apiKey) +} + +func (v *client) getOpenAIOrganization(ctx context.Context) string { + if value := modulecomponents.GetValueFromContext(ctx, "X-Openai-Organization"); value != "" { + return value + } + return v.openAIOrganization +} + +func (v *client) GetApiKeyHash(ctx context.Context, cfg moduletools.ClassConfig) [32]byte { + config := v.getVectorizationConfig(cfg, "document") + + key, err := v.getApiKey(ctx, config.IsAzure) + if err != nil { + return [32]byte{} + } + return sha256.Sum256([]byte(key)) +} + +func (v *client) GetVectorizerRateLimit(ctx context.Context, cfg moduletools.ClassConfig) *modulecomponents.RateLimits { + config := v.getVectorizationConfig(cfg, "document") + name := "Openai" + if config.IsAzure { + name = "Azure" + } + rpm, tpm := modulecomponents.GetRateLimitFromContext(ctx, name, 0, 0) + return &modulecomponents.RateLimits{ + RemainingTokens: tpm, + LimitTokens: tpm, + ResetTokens: time.Now().Add(61 * time.Second), + RemainingRequests: rpm, + LimitRequests: rpm, + ResetRequests: time.Now().Add(61 * time.Second), + } +} + +func (v *client) getApiKey(ctx context.Context, isAzure bool) (string, error) { + var apiKey, envVarValue, envVar string + + if isAzure { + apiKey = "X-Azure-Api-Key" + envVar = "AZURE_APIKEY" + envVarValue = v.azureApiKey + } else { + apiKey = "X-Openai-Api-Key" + envVar = "OPENAI_APIKEY" + envVarValue = v.openAIApiKey + } + + return v.getApiKeyFromContext(ctx, apiKey, envVarValue, envVar) +} + +func (v *client) getApiKeyFromContext(ctx context.Context, apiKey, envVarValue, envVar string) (string, error) { + if apiKeyValue := modulecomponents.GetValueFromContext(ctx, apiKey); apiKeyValue != "" { + return apiKeyValue, nil + } + if envVarValue != "" { + return envVarValue, nil + } + return "", fmt.Errorf("no api key found neither in request header: %s nor in environment variable under %s", apiKey, envVar) +} + +func (v *client) getVectorizationConfig(cfg moduletools.ClassConfig, action string) ent.VectorizationConfig { + settings := ent.NewClassSettings(cfg) + return ent.VectorizationConfig{ + Type: settings.Type(), + Model: settings.Model(), + ModelVersion: settings.ModelVersion(), + ResourceName: settings.ResourceName(), + DeploymentID: settings.DeploymentID(), + BaseURL: settings.BaseURL(), + IsAzure: settings.IsAzure(), + IsThirdPartyProvider: settings.IsThirdPartyProvider(), + ApiVersion: settings.ApiVersion(), + Dimensions: settings.Dimensions(), + ModelString: settings.ModelStringForAction(action), + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/clients/openai_fuzz_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/clients/openai_fuzz_test.go new file mode 100644 index 0000000000000000000000000000000000000000..f900410bf795cc0e400df661d95ca31abd075abf --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/clients/openai_fuzz_test.go @@ -0,0 +1,109 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "math/rand" + "net/http" + "net/http/httptest" + "strconv" + "testing" + "time" +) + +func FuzzVectorizeTotal(f *testing.F) { + f.Add("some input", "ada", "org1", "api-key-1", "base.url", "deployID", "resourceX", true, "2024-04-01") + f.Fuzz(func(t *testing.T, text, model, org, key, baseURL, deployID, resource string, isAzure bool, apiVersion string) { + if text == "" || model == "" || key == "" { + t.Skip("incomplete input") + return + } + + embedding := make([]float32, rand.Intn(20)+1) + for i := range embedding { + embedding[i] = rand.Float32() + } + + dataEntry := map[string]interface{}{ + "object": text, + "index": 0, + "embedding": embedding, + } + + resBody := map[string]interface{}{ + "object": "list", + "data": []interface{}{dataEntry}, + "usage": map[string]interface{}{ + "prompt_tokens": rand.Intn(1000), + "completion_tokens": rand.Intn(1000), + }, + } + + if rand.Float32() < 0.2 { + resBody["error"] = map[string]interface{}{ + "message": "main error", + "type": "fail", + "param": "some", + "code": 500, + } + // must still include valid data to avoid crash + resBody["data"] = []interface{}{dataEntry} + } + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.Header().Set("x-ratelimit-limit-requests", strconv.Itoa(rand.Intn(1000))) + w.Header().Set("x-ratelimit-limit-tokens", strconv.Itoa(rand.Intn(1000))) + w.Header().Set("x-ratelimit-remaining-requests", strconv.Itoa(rand.Intn(1000))) + w.Header().Set("x-ratelimit-remaining-tokens", strconv.Itoa(rand.Intn(1000))) + + if rand.Float32() < 0.05 { + w.Write([]byte("{ broken")) + return + } + + _ = json.NewEncoder(w).Encode(resBody) + })) + defer server.Close() + + ctx := context.Background() + ctx = context.WithValue(ctx, "X-Openai-Api-Key", []string{key}) + ctx = context.WithValue(ctx, "X-Openai-Organization", []string{org}) + ctx = context.WithValue(ctx, "X-Openai-Baseurl", []string{baseURL}) + ctx = context.WithValue(ctx, "X-Azure-Deployment-Id", []string{deployID}) + ctx = context.WithValue(ctx, "X-Azure-Resource-Name", []string{resource}) + ctx = context.WithValue(ctx, "X-Openai-Ratelimit-RequestPM-Embedding", []string{strconv.Itoa(rand.Intn(1000))}) + ctx = context.WithValue(ctx, "X-Openai-Ratelimit-TokenPM-Embedding", []string{strconv.Itoa(rand.Intn(1000))}) + + cfg := fakeClassConfig{classConfig: map[string]interface{}{ + "Type": "text", + "Model": model, + "IsAzure": isAzure, + "ApiVersion": apiVersion, + "BaseURL": baseURL, + "ResourceName": resource, + "DeploymentID": deployID, + }} + + c := New(key, org, key, time.Second, nullLogger()) + c.buildUrlFn = func(_, _, _, _ string, _ bool) (string, error) { + return server.URL, nil + } + + _, _, _, _ = c.Vectorize(ctx, []string{text}, cfg) + _, _ = c.VectorizeQuery(ctx, []string{text}, cfg) + _ = c.GetApiKeyHash(ctx, cfg) + _ = c.GetVectorizerRateLimit(ctx, cfg) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/clients/openai_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/clients/openai_test.go new file mode 100644 index 0000000000000000000000000000000000000000..fe67fc0c16f14577176e94111688d5c786a6ebc1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/clients/openai_test.go @@ -0,0 +1,560 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/modules/text2vec-openai/ent" +) + +func TestBuildUrlFn(t *testing.T) { + t.Run("buildUrlFn returns default OpenAI Client", func(t *testing.T) { + config := ent.VectorizationConfig{ + Type: "", + Model: "", + ModelVersion: "", + ResourceName: "", + DeploymentID: "", + ApiVersion: "2022-12-01", + BaseURL: "https://api.openai.com", + IsAzure: false, + } + url, err := buildUrl(config.BaseURL, config.ResourceName, config.DeploymentID, config.ApiVersion, config.IsAzure) + assert.Nil(t, err) + assert.Equal(t, "https://api.openai.com/v1/embeddings", url) + }) + t.Run("buildUrlFn returns Azure Client", func(t *testing.T) { + config := ent.VectorizationConfig{ + Type: "", + Model: "", + ModelVersion: "", + ResourceName: "resourceID", + DeploymentID: "deploymentID", + ApiVersion: "2022-12-01", + BaseURL: "", + IsAzure: true, + } + url, err := buildUrl(config.BaseURL, config.ResourceName, config.DeploymentID, config.ApiVersion, config.IsAzure) + assert.Nil(t, err) + assert.Equal(t, "https://resourceID.openai.azure.com/openai/deployments/deploymentID/embeddings?api-version=2022-12-01", url) + }) + + t.Run("buildUrlFn returns Azure Client with custom API Version", func(t *testing.T) { + config := ent.VectorizationConfig{ + Type: "", + Model: "", + ModelVersion: "", + ResourceName: "resourceID", + DeploymentID: "deploymentID", + ApiVersion: "2024-02-01", + BaseURL: "", + IsAzure: true, + } + url, err := buildUrl(config.BaseURL, config.ResourceName, config.DeploymentID, config.ApiVersion, config.IsAzure) + assert.Nil(t, err) + assert.Equal(t, "https://resourceID.openai.azure.com/openai/deployments/deploymentID/embeddings?api-version=2024-02-01", url) + }) + + t.Run("buildUrlFn returns Azure client with BaseUrl set", func(t *testing.T) { + config := ent.VectorizationConfig{ + Type: "", + Model: "", + ModelVersion: "", + ResourceName: "resourceID", + DeploymentID: "deploymentID", + ApiVersion: "2022-12-01", + BaseURL: "https://foobar.some.proxy", + IsAzure: true, + } + url, err := buildUrl(config.BaseURL, config.ResourceName, config.DeploymentID, config.ApiVersion, config.IsAzure) + assert.Nil(t, err) + assert.Equal(t, "https://foobar.some.proxy/openai/deployments/deploymentID/embeddings?api-version=2022-12-01", url) + }) + + t.Run("buildUrlFn loads from BaseURL", func(t *testing.T) { + config := ent.VectorizationConfig{ + Type: "", + Model: "", + ModelVersion: "", + ResourceName: "resourceID", + DeploymentID: "deploymentID", + ApiVersion: "2022-12-01", + BaseURL: "https://foobar.some.proxy", + IsAzure: false, + } + url, err := buildUrl(config.BaseURL, config.ResourceName, config.DeploymentID, config.ApiVersion, config.IsAzure) + assert.Nil(t, err) + assert.Equal(t, "https://foobar.some.proxy/v1/embeddings", url) + }) +} + +func TestClient(t *testing.T) { + t.Run("when all is fine", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + + c := New("apiKey", "", "", 0, nullLogger()) + c.buildUrlFn = func(baseURL, resourceName, deploymentID, apiVersion string, isAzure bool) (string, error) { + return server.URL, nil + } + + expected := &modulecomponents.VectorizationResult[[]float32]{ + Text: []string{"This is my text"}, + Vector: [][]float32{{0.1, 0.2, 0.3}}, + Dimensions: 3, + Errors: []error{nil}, + } + res, rl, _, err := c.Vectorize(context.Background(), []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Type": "text", "Model": "ada"}}) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + + assert.Equal(t, false, rl.UpdateWithMissingValues) + assert.Equal(t, 100, rl.RemainingTokens) + assert.Equal(t, 100, rl.RemainingRequests) + assert.Equal(t, 100, rl.LimitTokens) + assert.Equal(t, 100, rl.LimitRequests) + }) + + t.Run("when rate limit values are missing", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t, noRlHeader: true}) + defer server.Close() + + c := New("apiKey", "", "", 0, nullLogger()) + c.buildUrlFn = func(baseURL, resourceName, deploymentID, apiVersion string, isAzure bool) (string, error) { + return server.URL, nil + } + + expected := &modulecomponents.VectorizationResult[[]float32]{ + Text: []string{"This is my text"}, + Vector: [][]float32{{0.1, 0.2, 0.3}}, + Dimensions: 3, + Errors: []error{nil}, + } + res, rl, _, err := c.Vectorize(context.Background(), []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Type": "text", "Model": "ada"}}) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + + assert.Equal(t, true, rl.UpdateWithMissingValues) + assert.Equal(t, -1, rl.RemainingTokens) + assert.Equal(t, -1, rl.RemainingRequests) + assert.Equal(t, -1, rl.LimitTokens) + assert.Equal(t, -1, rl.LimitRequests) + }) + + t.Run("when rate limit values are returned but are bad values", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t, noRlHeader: false, RlValues: "0"}) + defer server.Close() + + c := New("apiKey", "", "", 0, nullLogger()) + c.buildUrlFn = func(baseURL, resourceName, deploymentID, apiVersion string, isAzure bool) (string, error) { + return server.URL, nil + } + + expected := &modulecomponents.VectorizationResult[[]float32]{ + Text: []string{"This is my text"}, + Vector: [][]float32{{0.1, 0.2, 0.3}}, + Dimensions: 3, + Errors: []error{nil}, + } + res, rl, _, err := c.Vectorize(context.Background(), []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Type": "text", "Model": "ada"}}) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + + assert.Equal(t, true, rl.UpdateWithMissingValues) + assert.Equal(t, 0, rl.RemainingTokens) + assert.Equal(t, 0, rl.RemainingRequests) + assert.Equal(t, 0, rl.LimitTokens) + assert.Equal(t, 0, rl.LimitRequests) + }) + + t.Run("when the context is expired", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("apiKey", "", "", 0, nullLogger()) + c.buildUrlFn = func(baseURL, resourceName, deploymentID, apiVersion string, isAzure bool) (string, error) { + return server.URL, nil + } + + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, _, _, err := c.Vectorize(ctx, []string{"This is my text"}, fakeClassConfig{}) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "context deadline exceeded") + }) + + t.Run("when the server returns an error", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{ + t: t, + serverError: errors.Errorf("nope, not gonna happen"), + }) + defer server.Close() + c := New("apiKey", "", "", 0, nullLogger()) + c.buildUrlFn = func(baseURL, resourceName, deploymentID, apiVersion string, isAzure bool) (string, error) { + return server.URL, nil + } + + _, _, _, err := c.Vectorize(context.Background(), []string{"This is my text"}, + fakeClassConfig{}) + + require.NotNil(t, err) + assert.EqualError(t, err, "connection to: OpenAI API failed with status: 500 error: nope, not gonna happen") + }) + + t.Run("when the server returns an error with request id", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{ + t: t, + serverError: errors.Errorf("nope, not gonna happen"), + headerRequestID: "some-request-id", + }) + defer server.Close() + c := New("apiKey", "", "", 0, nullLogger()) + c.buildUrlFn = func(baseURL, resourceName, deploymentID, apiVersion string, isAzure bool) (string, error) { + return server.URL, nil + } + + _, _, _, err := c.Vectorize(context.Background(), []string{"This is my text"}, + fakeClassConfig{}) + + require.NotNil(t, err) + assert.EqualError(t, err, "connection to: OpenAI API failed with status: 500 request-id: some-request-id error: nope, not gonna happen") + }) + + t.Run("when OpenAI key is passed using X-Openai-Api-Key header", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("", "", "", 0, nullLogger()) + c.buildUrlFn = func(baseURL, resourceName, deploymentID, apiVersion string, isAzure bool) (string, error) { + return server.URL, nil + } + + ctxWithValue := context.WithValue(context.Background(), + "X-Openai-Api-Key", []string{"some-key"}) + + expected := &modulecomponents.VectorizationResult[[]float32]{ + Text: []string{"This is my text"}, + Vector: [][]float32{{0.1, 0.2, 0.3}}, + Dimensions: 3, + Errors: []error{nil}, + } + res, _, _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, + fakeClassConfig{classConfig: map[string]interface{}{"Type": "text", "Model": "ada"}}) + + require.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when OpenAI key is empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("", "", "", 0, nullLogger()) + c.buildUrlFn = func(baseURL, resourceName, deploymentID, apiVersion string, isAzure bool) (string, error) { + return server.URL, nil + } + + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, _, _, err := c.Vectorize(ctx, []string{"This is my text"}, fakeClassConfig{}) + + require.NotNil(t, err) + assert.EqualError(t, err, "API Key: no api key found "+ + "neither in request header: X-Openai-Api-Key "+ + "nor in environment variable under OPENAI_APIKEY") + }) + + t.Run("when X-Openai-Api-Key header is passed but empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New("", "", "", 0, nullLogger()) + c.buildUrlFn = func(baseURL, resourceName, deploymentID, apiVersion string, isAzure bool) (string, error) { + return server.URL, nil + } + + ctxWithValue := context.WithValue(context.Background(), + "X-Openai-Api-Key", []string{""}) + + _, _, _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, + fakeClassConfig{classConfig: map[string]interface{}{"Type": "text", "Model": "ada"}}) + + require.NotNil(t, err) + assert.EqualError(t, err, "API Key: no api key found "+ + "neither in request header: X-Openai-Api-Key "+ + "nor in environment variable under OPENAI_APIKEY") + }) + + t.Run("when X-OpenAI-BaseURL header is passed", func(t *testing.T) { + c := New("", "", "", 0, nullLogger()) + + config := ent.VectorizationConfig{ + Type: "text", + Model: "ada", + BaseURL: "http://default-url.com", + } + + ctxWithValue := context.WithValue(context.Background(), + "X-Openai-Baseurl", []string{"http://base-url-passed-in-header.com"}) + + buildURL, err := c.buildURL(ctxWithValue, config) + require.NoError(t, err) + assert.Equal(t, "http://base-url-passed-in-header.com/v1/embeddings", buildURL) + + buildURL, err = c.buildURL(context.TODO(), config) + require.NoError(t, err) + assert.Equal(t, "http://default-url.com/v1/embeddings", buildURL) + }) + + t.Run("when X-Azure-* headers are passed", func(t *testing.T) { + c := New("", "", "", 0, nullLogger()) + + config := ent.VectorizationConfig{ + IsAzure: true, + ApiVersion: "", + } + + ctxWithValue := context.WithValue(context.Background(), + "X-Azure-Deployment-Id", []string{"spoofDeployment"}) + ctxWithValue = context.WithValue(ctxWithValue, + "X-Azure-Resource-Name", []string{"spoofResource"}) + + buildURL, err := c.buildURL(ctxWithValue, config) + require.NoError(t, err) + assert.Equal(t, "https://spoofResource.openai.azure.com/openai/deployments/spoofDeployment/embeddings?api-version=", buildURL) + }) + + t.Run("pass rate limit headers requests", func(t *testing.T) { + c := New("", "", "", 0, nullLogger()) + + ctxWithValue := context.WithValue(context.Background(), + "X-Openai-Ratelimit-RequestPM-Embedding", []string{"50"}) + + rl := c.GetVectorizerRateLimit(ctxWithValue, fakeClassConfig{}) + assert.Equal(t, 50, rl.LimitRequests) + assert.Equal(t, 50, rl.RemainingRequests) + }) + + t.Run("pass rate limit headers tokens", func(t *testing.T) { + c := New("", "", "", 0, nullLogger()) + + ctxWithValue := context.WithValue(context.Background(), "X-Openai-Ratelimit-TokenPM-Embedding", []string{"60"}) + + rl := c.GetVectorizerRateLimit(ctxWithValue, fakeClassConfig{}) + assert.Equal(t, 60, rl.LimitTokens) + assert.Equal(t, 60, rl.RemainingTokens) + }) +} + +type fakeHandler struct { + t *testing.T + serverError error + headerRequestID string + noRlHeader bool + RlValues string +} + +func (f *fakeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.serverError != nil { + embeddingError := map[string]interface{}{ + "message": f.serverError.Error(), + "type": "invalid_request_error", + } + embedding := map[string]interface{}{ + "error": embeddingError, + } + outBytes, err := json.Marshal(embedding) + require.Nil(f.t, err) + + if f.headerRequestID != "" { + w.Header().Add("x-request-id", f.headerRequestID) + } + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b map[string]interface{} + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + textInputArray := b["input"].([]interface{}) + textInput := textInputArray[0].(string) + assert.Greater(f.t, len(textInput), 0) + + embeddingData := map[string]interface{}{ + "object": textInput, + "index": 0, + "embedding": []float32{0.1, 0.2, 0.3}, + } + embedding := map[string]interface{}{ + "object": "list", + "data": []interface{}{embeddingData}, + } + + outBytes, err := json.Marshal(embedding) + require.Nil(f.t, err) + + if !f.noRlHeader { + rlValues := f.RlValues + if f.RlValues == "" { + rlValues = "100" + } + w.Header().Add("x-ratelimit-limit-requests", rlValues) + w.Header().Add("x-ratelimit-limit-tokens", rlValues) + w.Header().Add("x-ratelimit-remaining-requests", rlValues) + w.Header().Add("x-ratelimit-remaining-tokens", rlValues) + } + + w.Write(outBytes) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} + +func TestGetApiKeyFromContext(t *testing.T) { + t.Run("value from context", func(t *testing.T) { + ctx := context.WithValue(context.Background(), "X-Openai-Api-Key", []string{"key-from-ctx"}) + c := New("", "", "", 0, nullLogger()) + key, err := c.getApiKey(ctx, false) + require.NoError(t, err) + assert.Equal(t, "key-from-ctx", key) + }) + + t.Run("value from env fallback", func(t *testing.T) { + ctx := context.Background() + c := New("env-key", "", "", 0, nullLogger()) + key, err := c.getApiKey(ctx, false) + require.NoError(t, err) + assert.Equal(t, "env-key", key) + }) + + t.Run("no value at all", func(t *testing.T) { + ctx := context.Background() + c := New("", "", "", 0, nullLogger()) + _, err := c.getApiKey(ctx, false) + require.Error(t, err) + assert.Contains(t, err.Error(), "no api key found") + }) +} + +func TestGetOpenAIOrganization(t *testing.T) { + t.Run("from context", func(t *testing.T) { + ctx := context.WithValue(context.Background(), "X-Openai-Organization", []string{"from-context"}) + c := New("", "default-org", "", 0, nullLogger()) + assert.Equal(t, "from-context", c.getOpenAIOrganization(ctx)) + }) + + t.Run("from default", func(t *testing.T) { + ctx := context.Background() + c := New("", "default-org", "", 0, nullLogger()) + assert.Equal(t, "default-org", c.getOpenAIOrganization(ctx)) + }) +} + +func TestGetEmbeddingsRequest(t *testing.T) { + t.Run("Azure true omits model", func(t *testing.T) { + c := New("", "", "", 0, nullLogger()) + req := c.getEmbeddingsRequest([]string{"foo"}, "model", true, nil) + assert.Equal(t, []string{"foo"}, req.Input) + assert.Equal(t, (*int64)(nil), req.Dimensions) + assert.Empty(t, req.Model) + }) + t.Run("Non-Azure includes model", func(t *testing.T) { + c := New("", "", "", 0, nullLogger()) + dim := int64(42) + req := c.getEmbeddingsRequest([]string{"foo"}, "model", false, &dim) + assert.Equal(t, []string{"foo"}, req.Input) + assert.Equal(t, "model", req.Model) + assert.Equal(t, &dim, req.Dimensions) + }) +} + +func TestGetApiKeyHeaderAndValue(t *testing.T) { + c := New("", "", "", 0, nullLogger()) + h, v := c.getApiKeyHeaderAndValue("some-key", true) + assert.Equal(t, "api-key", h) + assert.Equal(t, "some-key", v) + + h, v = c.getApiKeyHeaderAndValue("other-key", false) + assert.Equal(t, "Authorization", h) + assert.Equal(t, "Bearer other-key", v) +} + +func TestGetApiKeyHash(t *testing.T) { + c := New("super-secret", "", "", 0, nullLogger()) + hash := c.GetApiKeyHash(context.Background(), fakeClassConfig{}) + assert.NotEqual(t, [32]byte{}, hash) + assert.Equal(t, hash, c.GetApiKeyHash(context.Background(), fakeClassConfig{})) +} + +func TestGetErrorFormat(t *testing.T) { + c := New("", "", "", 0, nullLogger()) + err := c.getError(403, "abc-123", &openAIApiError{Message: "denied"}, false) + assert.Contains(t, err.Error(), "403") + assert.Contains(t, err.Error(), "abc-123") + assert.Contains(t, err.Error(), "denied") +} + +func TestOpenAIApiErrorDecode(t *testing.T) { + tests := []struct { + name string + payload string + expected string + }{ + {"missing code", `{"message": "fail", "type": "err", "param": "x"}`, ""}, + {"numeric code", `{"message": "fail", "type": "err", "param": "x", "code": 500}`, "500"}, + {"string number", `{"message": "fail", "type": "err", "param": "x", "code": "500"}`, "500"}, + {"string literal", `{"message": "fail", "type": "err", "param": "x", "code": "invalid_key"}`, "invalid_key"}, + {"empty string", `{"message": "fail", "type": "err", "param": "x", "code": ""}`, ""}, + {"null code", `{"message": "fail", "type": "err", "param": "x", "code": null}`, ""}, + {"code as boolean (invalid)", `{"message": "fail", "type": "err", "param": "x", "code": true}`, ""}, + {"code as array (invalid)", `{"message": "fail", "type": "err", "param": "x", "code": ["bad"]}`, ""}, + {"code as object (invalid)", `{"message": "fail", "type": "err", "param": "x", "code": {"key": "val"}}`, ""}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var got *openAIApiError + err := json.Unmarshal([]byte(tt.payload), &got) + if err != nil && tt.expected != "" { + t.Errorf("unexpected unmarshal error: %v", err) + return + } + if got != nil && got.Code.String() != tt.expected { + t.Errorf("got code %q, expected %q", got.Code.String(), tt.expected) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/clients/openai_tokens.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/clients/openai_tokens.go new file mode 100644 index 0000000000000000000000000000000000000000..b2252944d88152f17825a0bbee0c318ffe547fbe --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/clients/openai_tokens.go @@ -0,0 +1,29 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "strings" + + "github.com/weaviate/tiktoken-go" +) + +func GetTokensCount(model string, input string, tke *tiktoken.Tiktoken) int { + tokensPerMessage := 3 + if strings.HasPrefix(model, "gpt-3.5-turbo") { + tokensPerMessage = 4 + } + + tokensCount := tokensPerMessage + tokensCount += len(tke.Encode(input, nil, nil)) + return tokensCount +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/clients/openai_tokens_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/clients/openai_tokens_test.go new file mode 100644 index 0000000000000000000000000000000000000000..5fba1f5dff856138e77d83a64bab963a80b543be --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/clients/openai_tokens_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/tiktoken-go" +) + +func Test_getTokensCount(t *testing.T) { + shortTestText := "I am a short message. Teddy is the best and biggest dog ever." + + tests := []struct { + name string + model string + messages string + want int + wantErr string + }{ + { + name: "text-davinci-002", + model: "text-davinci-002", + messages: shortTestText, + want: 18, + }, + { + name: "gpt-3.5-turbo", + model: "gpt-3.5-turbo", + messages: shortTestText, + want: 19, + }, + { + name: "gpt-4", + model: "gpt-4", + messages: shortTestText, + want: 18, + }, + { + name: "non-existent-model", + model: "non-existent-model", + messages: shortTestText, + wantErr: "no encoding for model non-existent-model", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tke, err := tiktoken.EncodingForModel(tt.model) + if err != nil { + assert.EqualError(t, err, tt.wantErr) + } else { + assert.Nil(t, err) + assert.Equal(t, tt.want, GetTokensCount(tt.model, tt.messages, tke)) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/config.go new file mode 100644 index 0000000000000000000000000000000000000000..60d656df42b89631bf9c8f242e73212038d6949e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/config.go @@ -0,0 +1,49 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modopenai + +import ( + "context" + + "github.com/weaviate/weaviate/modules/text2vec-openai/ent" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *OpenAIModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{ + "vectorizeClassName": ent.DefaultVectorizeClassName, + "baseURL": ent.DefaultBaseURL, + "model": ent.DefaultOpenAIModel, + } +} + +func (m *OpenAIModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{ + "skip": !ent.DefaultPropertyIndexed, + "vectorizePropertyName": ent.DefaultVectorizePropertyName, + } +} + +func (m *OpenAIModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := ent.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/ent/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/ent/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..df1e51ce7f3fa40ab80961cc3a3d63e410e41e4d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/ent/class_settings.go @@ -0,0 +1,278 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "fmt" + "strings" + + "github.com/pkg/errors" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + DefaultOpenAIDocumentType = "text" + DefaultOpenAIModel = "text-embedding-3-small" + DefaultVectorizeClassName = true + DefaultPropertyIndexed = true + DefaultVectorizePropertyName = false + DefaultBaseURL = "https://api.openai.com" + DefaultApiVersion = "2024-02-01" + LowerCaseInput = false +) + +const ( + TextEmbedding3Small = "text-embedding-3-small" + TextEmbedding3Large = "text-embedding-3-large" +) + +var ( + TextEmbedding3SmallDefaultDimensions int64 = 1536 + TextEmbedding3LargeDefaultDimensions int64 = 3072 +) + +var availableOpenAITypes = []string{"text", "code"} + +var availableV3Models = []string{ + // new v3 models + TextEmbedding3Small, + TextEmbedding3Large, +} + +var availableV3ModelsDimensions = map[string][]int64{ + TextEmbedding3Small: {512, TextEmbedding3SmallDefaultDimensions}, + TextEmbedding3Large: {256, 1024, TextEmbedding3LargeDefaultDimensions}, +} + +var availableOpenAIModels = []string{ + "ada", // supports 001 and 002 + "babbage", // only supports 001 + "curie", // only supports 001 + "davinci", // only supports 001 +} + +var availableApiVersions = []string{ + "2022-12-01", + "2023-03-15-preview", + "2023-05-15", + "2023-06-01-preview", + "2023-07-01-preview", + "2023-08-01-preview", + "2023-09-01-preview", + "2023-12-01-preview", + "2024-02-15-preview", + "2024-03-01-preview", + "2024-02-01", +} + +type classSettings struct { + basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, BaseClassSettings: *basesettings.NewBaseClassSettings(cfg, false)} +} + +func (cs *classSettings) Model() string { + return cs.BaseClassSettings.GetPropertyAsString("model", DefaultOpenAIModel) +} + +func (cs *classSettings) Type() string { + return cs.BaseClassSettings.GetPropertyAsString("type", DefaultOpenAIDocumentType) +} + +func (cs *classSettings) ModelVersion() string { + defaultVersion := PickDefaultModelVersion(cs.Model(), cs.Type()) + return cs.BaseClassSettings.GetPropertyAsString("modelVersion", defaultVersion) +} + +func (cs *classSettings) ModelStringForAction(action string) string { + if strings.HasPrefix(cs.Model(), "text-embedding-3") || cs.IsThirdPartyProvider() { + // indicates that we handle v3 models + return cs.Model() + } + if cs.ModelVersion() == "002" { + return cs.getModel002String(cs.Model()) + } + return cs.getModel001String(cs.Type(), cs.Model(), action) +} + +func (v *classSettings) getModel001String(docType, model, action string) string { + modelBaseString := "%s-search-%s-%s-001" + if action == "document" { + if docType == "code" { + return fmt.Sprintf(modelBaseString, docType, model, "code") + } + return fmt.Sprintf(modelBaseString, docType, model, "doc") + + } else { + if docType == "code" { + return fmt.Sprintf(modelBaseString, docType, model, "text") + } + return fmt.Sprintf(modelBaseString, docType, model, "query") + } +} + +func (v *classSettings) getModel002String(model string) string { + modelBaseString := "text-embedding-%s-002" + return fmt.Sprintf(modelBaseString, model) +} + +func (cs *classSettings) ResourceName() string { + return cs.BaseClassSettings.GetPropertyAsString("resourceName", "") +} + +func (cs *classSettings) BaseURL() string { + return cs.BaseClassSettings.GetPropertyAsString("baseURL", DefaultBaseURL) +} + +func (cs *classSettings) DeploymentID() string { + return cs.BaseClassSettings.GetPropertyAsString("deploymentId", "") +} + +func (cs *classSettings) ApiVersion() string { + return cs.BaseClassSettings.GetPropertyAsString("apiVersion", DefaultApiVersion) +} + +func (cs *classSettings) IsThirdPartyProvider() bool { + return !(strings.Contains(cs.BaseURL(), "api.openai.com") || cs.IsAzure()) +} + +func (cs *classSettings) IsAzure() bool { + return cs.BaseClassSettings.GetPropertyAsBool("isAzure", false) || (cs.ResourceName() != "" && cs.DeploymentID() != "") +} + +func (cs *classSettings) Dimensions() *int64 { + defaultValue := PickDefaultDimensions(cs.Model()) + if cs.IsAzure() { + defaultValue = nil + } + return cs.BaseClassSettings.GetPropertyAsInt64("dimensions", defaultValue) +} + +func (cs *classSettings) Validate(class *models.Class) error { + if err := cs.BaseClassSettings.Validate(class); err != nil { + return err + } + + docType := cs.Type() + if !basesettings.ValidateSetting(docType, availableOpenAITypes) { + return errors.Errorf("wrong OpenAI type name, available model names are: %v", availableOpenAITypes) + } + + model := cs.Model() + // only validate models for openAI endpoints + if !cs.IsThirdPartyProvider() { + availableModels := append(availableOpenAIModels, availableV3Models...) + if !basesettings.ValidateSetting(model, availableModels) { + return errors.Errorf("wrong OpenAI model name, available model names are: %v", availableModels) + } + } + + dimensions := cs.Dimensions() + if !cs.IsThirdPartyProvider() && dimensions != nil { + if !basesettings.ValidateSetting(model, availableV3Models) { + return errors.Errorf("dimensions setting can only be used with V3 embedding models: %v", availableV3Models) + } + availableDimensions := availableV3ModelsDimensions[model] + if !basesettings.ValidateSetting(*dimensions, availableDimensions) { + return errors.Errorf("wrong dimensions setting for %s model, available dimensions are: %v", model, availableDimensions) + } + } + + version := cs.ModelVersion() + if err := cs.validateModelVersion(version, model, docType); err != nil { + return err + } + + if cs.IsAzure() { + err := cs.validateAzureConfig(cs.ResourceName(), cs.DeploymentID(), cs.ApiVersion()) + if err != nil { + return err + } + } + + return nil +} + +func (cs *classSettings) validateModelVersion(version, model, docType string) error { + for i := range availableV3Models { + if model == availableV3Models[i] { + return nil + } + } + + if version == "001" { + // no restrictions + return nil + } + + if version == "002" { + // only ada/davinci 002 + if model != "ada" && model != "davinci" { + return fmt.Errorf("unsupported version %s", version) + } + } + + if version == "003" && model != "davinci" { + // only davinci 003 + return fmt.Errorf("unsupported version %s", version) + } + + if version != "002" && version != "003" { + // all other fallback + return fmt.Errorf("model %s is only available in version 001", model) + } + + if docType != "text" { + return fmt.Errorf("ada-002 no longer distinguishes between text/code, use 'text' for all use cases") + } + + return nil +} + +func (cs *classSettings) validateAzureConfig(resourceName, deploymentId, apiVersion string) error { + if (resourceName == "" && deploymentId != "") || (resourceName != "" && deploymentId == "") { + return fmt.Errorf("both resourceName and deploymentId must be provided") + } + if !basesettings.ValidateSetting(apiVersion, availableApiVersions) { + return errors.Errorf("wrong Azure OpenAI apiVersion setting, available api versions are: %v", availableApiVersions) + } + return nil +} + +func PickDefaultModelVersion(model, docType string) string { + for i := range availableV3Models { + if model == availableV3Models[i] { + return "" + } + } + if model == "ada" && docType == "text" { + return "002" + } + // for all other combinations stick with "001" + return "001" +} + +func PickDefaultDimensions(model string) *int64 { + if model == TextEmbedding3Small { + return &TextEmbedding3SmallDefaultDimensions + } + if model == TextEmbedding3Large { + return &TextEmbedding3LargeDefaultDimensions + } + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/ent/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/ent/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..de9e11fb7660c8e7fa7705bf6c9d1e80155ce6b9 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/ent/class_settings_test.go @@ -0,0 +1,535 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "fmt" + "testing" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/modules" +) + +func Test_classSettings_Validate(t *testing.T) { + class := &models.Class{ + Class: "test", + Properties: []*models.Property{ + { + DataType: []string{schema.DataTypeText.String()}, + Name: "test", + }, + }, + } + tests := []struct { + name string + cfg moduletools.ClassConfig + wantErr error + }{ + { + name: "text-embedding-3-small", + cfg: &fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "text-embedding-3-small", + }, + }, + }, + { + name: "text-embedding-3-small, 512 dimensions", + cfg: &fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "text-embedding-3-small", + "dimensions": 512, + }, + }, + }, + { + name: "text-embedding-3-small, wrong dimensions", + cfg: &fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "text-embedding-3-small", + "dimensions": 1, + }, + }, + wantErr: errors.New("wrong dimensions setting for text-embedding-3-small model, available dimensions are: [512 1536]"), + }, + { + name: "text-embedding-3-large", + cfg: &fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "text-embedding-3-large", + }, + }, + }, + { + name: "text-embedding-3-large, 512 dimensions", + cfg: &fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "text-embedding-3-large", + "dimensions": 1024, + }, + }, + }, + { + name: "text-embedding-3-large, wrong dimensions", + cfg: &fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "text-embedding-3-large", + "dimensions": 512, + }, + }, + wantErr: errors.New("wrong dimensions setting for text-embedding-3-large model, available dimensions are: [256 1024 3072]"), + }, + { + name: "text-embedding-ada-002", + cfg: &fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "ada", + "modelVersion": "002", + }, + }, + }, + { + name: "text-embedding-ada-002 - dimensions error", + cfg: &fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "ada", + "dimensions": 512, + }, + }, + wantErr: errors.New("dimensions setting can only be used with V3 embedding models: [text-embedding-3-small text-embedding-3-large]"), + }, + { + name: "custom endpoint - no dimension validation", + cfg: &fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "model-that-openai-does-not-have", + "baseURL": "https://something-else.com", + "dimensions": 512, + }, + }, + }, + { + name: "text-embedding-ada-002 - wrong model version", + cfg: &fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "ada", + "modelVersion": "003", + }, + }, + wantErr: errors.New("unsupported version 003"), + }, + { + name: "wrong model name", + cfg: &fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "unknown-model", + }, + }, + wantErr: errors.New("wrong OpenAI model name, available model names are: [ada babbage curie davinci text-embedding-3-small text-embedding-3-large]"), + }, + { + name: "third party provider", + cfg: &fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "model-that-openai-does-not-have", + "baseURL": "https://something-else.com", + }, + }, + }, + { + name: "wrong properties", + cfg: &fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "text-embedding-3-large", + "properties": "wrong-properties", + }, + }, + wantErr: errors.New("properties field needs to be of array type, got: string"), + }, + { + name: "wrong apiVersion", + cfg: &fakeClassConfig{ + classConfig: map[string]interface{}{ + "resourceName": "resource", + "deploymentId": "deploymentId", + "apiVersion": "wrong-api-version", + }, + }, + wantErr: errors.New("wrong Azure OpenAI apiVersion setting, available api versions are: " + + "[2022-12-01 2023-03-15-preview 2023-05-15 2023-06-01-preview 2023-07-01-preview 2023-08-01-preview " + + "2023-09-01-preview 2023-12-01-preview 2024-02-15-preview 2024-03-01-preview 2024-02-01]"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + cs := NewClassSettings(tt.cfg) + err := cs.Validate(class) + if tt.wantErr != nil { + assert.EqualError(t, err, tt.wantErr.Error()) + } else { + assert.NoError(t, err) + } + }) + } +} + +func Test_classSettings(t *testing.T) { + t.Run("with target vector and properties", func(t *testing.T) { + targetVector := "targetVector" + propertyToIndex := "someProp" + class := &models.Class{ + Class: "MyClass", + VectorConfig: map[string]models.VectorConfig{ + targetVector: { + Vectorizer: map[string]interface{}{ + "my-module": map[string]interface{}{ + "vectorizeClassName": false, + "properties": []interface{}{propertyToIndex}, + }, + }, + VectorIndexType: "hnsw", + }, + }, + Properties: []*models.Property{ + { + Name: propertyToIndex, + ModuleConfig: map[string]interface{}{ + "my-module": map[string]interface{}{ + "skip": true, + "vectorizePropertyName": true, + }, + }, + }, + { + Name: "otherProp", + }, + }, + } + + cfg := modules.NewClassBasedModuleConfig(class, "my-module", "tenant", targetVector, nil) + ic := NewClassSettings(cfg) + + assert.True(t, ic.PropertyIndexed(propertyToIndex)) + assert.True(t, ic.VectorizePropertyName(propertyToIndex)) + assert.False(t, ic.PropertyIndexed("otherProp")) + assert.False(t, ic.VectorizePropertyName("otherProp")) + assert.False(t, ic.VectorizeClassName()) + }) +} + +func TestValidateModelVersion(t *testing.T) { + type test struct { + model string + docType string + version string + possible bool + } + + tests := []test{ + // 001 models + {"ada", "text", "001", true}, + {"ada", "code", "001", true}, + {"babbage", "text", "001", true}, + {"babbage", "code", "001", true}, + {"curie", "text", "001", true}, + {"curie", "code", "001", true}, + {"davinci", "text", "001", true}, + {"davinci", "code", "001", true}, + + // 002 models + {"ada", "text", "002", true}, + {"davinci", "text", "002", true}, + {"ada", "code", "002", false}, + {"babbage", "text", "002", false}, + {"babbage", "code", "002", false}, + {"curie", "text", "002", false}, + {"curie", "code", "002", false}, + {"davinci", "code", "002", false}, + + // 003 + {"davinci", "text", "003", true}, + {"ada", "text", "003", false}, + {"babbage", "text", "003", false}, + + // 004 + {"davinci", "text", "004", false}, + {"ada", "text", "004", false}, + {"babbage", "text", "004", false}, + } + + for _, test := range tests { + name := fmt.Sprintf("model=%s docType=%s version=%s", test.model, test.docType, test.version) + t.Run(name, func(t *testing.T) { + err := (&classSettings{}).validateModelVersion(test.version, test.model, test.docType) + if test.possible { + assert.Nil(t, err, "this combination should be possible") + } else { + assert.NotNil(t, err, "this combination should not be possible") + } + }) + } +} + +func Test_getModelString(t *testing.T) { + t.Run("getModelStringDocument", func(t *testing.T) { + type args struct { + docType string + model string + version string + } + tests := []struct { + name string + args args + want string + }{ + { + name: "Document type: text model: ada vectorizationType: document", + args: args{ + docType: "text", + model: "ada", + }, + want: "text-search-ada-doc-001", + }, + { + name: "Document type: text model: ada-002 vectorizationType: document", + args: args{ + docType: "text", + model: "ada", + version: "002", + }, + want: "text-embedding-ada-002", + }, + { + name: "Document type: text model: babbage vectorizationType: document", + args: args{ + docType: "text", + model: "babbage", + }, + want: "text-search-babbage-doc-001", + }, + { + name: "Document type: text model: curie vectorizationType: document", + args: args{ + docType: "text", + model: "curie", + }, + want: "text-search-curie-doc-001", + }, + { + name: "Document type: text model: davinci vectorizationType: document", + args: args{ + docType: "text", + model: "davinci", + }, + want: "text-search-davinci-doc-001", + }, + { + name: "Document type: code model: ada vectorizationType: code", + args: args{ + docType: "code", + model: "ada", + }, + want: "code-search-ada-code-001", + }, + { + name: "Document type: code model: babbage vectorizationType: code", + args: args{ + docType: "code", + model: "babbage", + }, + want: "code-search-babbage-code-001", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + cs := NewClassSettings(&fakeClassConfig{ + classConfig: map[string]interface{}{ + "type": tt.args.docType, + "model": tt.args.model, + "modelVersion": tt.args.version, + }, + }) + if got := cs.ModelStringForAction("document"); got != tt.want { + t.Errorf("vectorizer.getModelString() = %v, want %v", got, tt.want) + } + }) + } + }) + + t.Run("getModelStringQuery", func(t *testing.T) { + type args struct { + docType string + model string + version string + } + tests := []struct { + name string + args args + want string + }{ + { + name: "Document type: text model: ada vectorizationType: query", + args: args{ + docType: "text", + model: "ada", + }, + want: "text-search-ada-query-001", + }, + { + name: "Document type: text model: babbage vectorizationType: query", + args: args{ + docType: "text", + model: "babbage", + }, + want: "text-search-babbage-query-001", + }, + { + name: "Document type: text model: curie vectorizationType: query", + args: args{ + docType: "text", + model: "curie", + }, + want: "text-search-curie-query-001", + }, + { + name: "Document type: text model: davinci vectorizationType: query", + args: args{ + docType: "text", + model: "davinci", + }, + want: "text-search-davinci-query-001", + }, + { + name: "Document type: code model: ada vectorizationType: text", + args: args{ + docType: "code", + model: "ada", + }, + want: "code-search-ada-text-001", + }, + { + name: "Document type: code model: babbage vectorizationType: text", + args: args{ + docType: "code", + model: "babbage", + }, + want: "code-search-babbage-text-001", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + cs := NewClassSettings(&fakeClassConfig{ + classConfig: map[string]interface{}{ + "type": tt.args.docType, + "model": tt.args.model, + "modelVersion": tt.args.version, + }, + }) + if got := cs.ModelStringForAction("query"); got != tt.want { + t.Errorf("vectorizer.getModelString() = %v, want %v", got, tt.want) + } + }) + } + }) +} + +func TestPickDefaultModelVersion(t *testing.T) { + t.Run("ada with text", func(t *testing.T) { + version := PickDefaultModelVersion("ada", "text") + assert.Equal(t, "002", version) + }) + + t.Run("ada with code", func(t *testing.T) { + version := PickDefaultModelVersion("ada", "code") + assert.Equal(t, "001", version) + }) + + t.Run("with curie", func(t *testing.T) { + version := PickDefaultModelVersion("curie", "text") + assert.Equal(t, "001", version) + }) +} + +func TestClassSettings(t *testing.T) { + type testCase struct { + expectedBaseURL string + expectedDimensions int64 + expectedNilDimensions bool + cfg moduletools.ClassConfig + } + tests := []testCase{ + { + cfg: fakeClassConfig{ + classConfig: make(map[string]interface{}), + }, + expectedBaseURL: DefaultBaseURL, + expectedDimensions: 1536, + }, + { + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "baseURL": "https://proxy.weaviate.dev", + }, + }, + expectedBaseURL: "https://proxy.weaviate.dev", + expectedDimensions: 1536, + }, + { + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "baseURL": "https://proxy.weaviate.dev", + "dimensions": 768, + }, + }, + expectedBaseURL: "https://proxy.weaviate.dev", + expectedDimensions: 768, + }, + { + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "baseURL": "https://proxy.weaviate.dev", + "resourceName": "resourceName", + "deploymentId": "deploymentId", + }, + }, + expectedBaseURL: "https://proxy.weaviate.dev", + expectedNilDimensions: true, + }, + { + cfg: fakeClassConfig{ + classConfig: map[string]interface{}{ + "baseURL": "https://proxy.weaviate.dev", + "resourceName": "resourceName", + "deploymentId": "deploymentId", + "dimensions": 1024, + }, + }, + expectedBaseURL: "https://proxy.weaviate.dev", + expectedNilDimensions: false, + expectedDimensions: 1024, + }, + } + + for _, tt := range tests { + ic := NewClassSettings(tt.cfg) + assert.Equal(t, tt.expectedBaseURL, ic.BaseURL()) + if tt.expectedNilDimensions { + assert.Nil(t, ic.Dimensions()) + } else { + assert.Equal(t, tt.expectedDimensions, *ic.Dimensions()) + } + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/ent/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/ent/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..5a6b0275a637b585b5623f751a53f67d473dfee7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/ent/fakes_for_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizePropertyName bool + skippedProperty string + excludedProperty string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/ent/vectorization_config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/ent/vectorization_config.go new file mode 100644 index 0000000000000000000000000000000000000000..656e868beb8c7615bef1a223772df0d400f29322 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/ent/vectorization_config.go @@ -0,0 +1,22 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type VectorizationConfig struct { + Type, Model, ModelVersion, ModelString, ResourceName string + BaseURL string + DeploymentID string `json:"deploymentId"` + ApiVersion string + IsAzure bool + IsThirdPartyProvider bool + Dimensions *int64 +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/ent/vectorization_result.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/ent/vectorization_result.go new file mode 100644 index 0000000000000000000000000000000000000000..7065dfe9697f6213ca0a5b69f444839d036540a4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/ent/vectorization_result.go @@ -0,0 +1,82 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "net/http" + "strconv" + "time" + + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/usecases/logrusext" + "github.com/weaviate/weaviate/usecases/modulecomponents" +) + +const dummyLimit = 10000000 + +func GetRateLimitsFromHeader(l *logrusext.Sampler, header http.Header, isAzure bool) *modulecomponents.RateLimits { + requestsReset, err := time.ParseDuration(header.Get("x-ratelimit-reset-requests")) + if err != nil { + requestsReset = 0 + } + tokensReset, err := time.ParseDuration(header.Get("x-ratelimit-reset-tokens")) + if err != nil { + // azure doesn't include the x-ratelimit-reset-tokens header, fallback to default + tokensReset = time.Duration(1) * time.Minute + } + limitRequests := getHeaderInt(header, "x-ratelimit-limit-requests") + limitTokens := getHeaderInt(header, "x-ratelimit-limit-tokens") + remainingRequests := getHeaderInt(header, "x-ratelimit-remaining-requests") + remainingTokens := getHeaderInt(header, "x-ratelimit-remaining-tokens") + + // azure returns 0 as limit, make sure this does not block anything by setting a high value + if isAzure { + limitRequests = dummyLimit + remainingRequests = dummyLimit + } + + updateWithMissingValues := false + // the absolute limits should never be 0, while it is possible to use up all tokens/requests which results in the + // remaining tokens/requests to be 0 + if limitRequests <= 0 || limitTokens <= 0 || remainingRequests < 0 || remainingTokens < 0 { + updateWithMissingValues = true + + // logging all headers as there should not be anything sensitive according to the documentation: + // https://platform.openai.com/docs/api-reference/debugging-requests + l.WithSampling(func(l logrus.FieldLogger) { + l.WithField("headers", header). + Debug("rate limit headers are missing or invalid, going to keep using the old values") + }) + } + + return &modulecomponents.RateLimits{ + LimitRequests: limitRequests, + LimitTokens: limitTokens, + RemainingRequests: remainingRequests, + RemainingTokens: remainingTokens, + ResetRequests: time.Now().Add(requestsReset), + ResetTokens: time.Now().Add(tokensReset), + UpdateWithMissingValues: updateWithMissingValues, + } +} + +func getHeaderInt(header http.Header, key string) int { + value := header.Get(key) + if value == "" { + return -1 + } + i, err := strconv.Atoi(value) + if err != nil { + return -1 + } + return i +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/module.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/module.go new file mode 100644 index 0000000000000000000000000000000000000000..f1126807c06a66da22ef06c4facc2ef36c743778 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/module.go @@ -0,0 +1,170 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modopenai + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/text2vec-openai/clients" + "github.com/weaviate/weaviate/modules/text2vec-openai/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents/additional" + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + "github.com/weaviate/weaviate/usecases/modulecomponents/text2vecbase" + "github.com/weaviate/weaviate/usecases/monitoring" +) + +const ( + Name = "text2vec-openai" +) + +var batchSettings = batch.Settings{ + TokenMultiplier: 1, + MaxTimePerBatch: float64(10), + MaxObjectsPerBatch: 2000, // https://platform.openai.com/docs/api-reference/embeddings/create + // cant find any info about this on the website besides this forum thread:https://community.openai.com/t/max-total-embeddings-tokens-per-request/1254699 + // we had customers run into this error, too + MaxTokensPerBatch: func(cfg moduletools.ClassConfig) int { return 300000 }, + HasTokenLimit: true, + ReturnsRateLimit: true, +} + +func New() *OpenAIModule { + return &OpenAIModule{} +} + +type OpenAIModule struct { + vectorizer text2vecbase.TextVectorizerBatch[[]float32] + metaProvider text2vecbase.MetaProvider + graphqlProvider modulecapabilities.GraphQLArguments + searcher modulecapabilities.Searcher[[]float32] + nearTextTransformer modulecapabilities.TextTransform + logger logrus.FieldLogger + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +func (m *OpenAIModule) Name() string { + return Name +} + +func (m *OpenAIModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2ManyVec +} + +func (m *OpenAIModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, m.logger); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initAdditionalPropertiesProvider(); err != nil { + return errors.Wrap(err, "init additional properties provider") + } + + return nil +} + +func (m *OpenAIModule) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init graphql provider") + } + return nil +} + +func (m *OpenAIModule) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + openAIApiKey := os.Getenv("OPENAI_APIKEY") + openAIOrganization := os.Getenv("OPENAI_ORGANIZATION") + azureApiKey := os.Getenv("AZURE_APIKEY") + + client := clients.New(openAIApiKey, openAIOrganization, azureApiKey, timeout, logger) + + m.vectorizer = text2vecbase.New(client, + batch.NewBatchVectorizer(client, 50*time.Second, batchSettings, logger, m.Name()), + batch.ReturnBatchTokenizer(batchSettings.TokenMultiplier, m.Name(), ent.LowerCaseInput), + ) + + m.metaProvider = client + + return nil +} + +func (m *OpenAIModule) initAdditionalPropertiesProvider() error { + m.additionalPropertiesProvider = additional.NewText2VecProvider() + return nil +} + +func (m *OpenAIModule) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + monitoring.GetMetrics().ModuleExternalRequestSingleCount.WithLabelValues(m.Name(), "vectorizeObject").Inc() + icheck := ent.NewClassSettings(cfg) + return m.vectorizer.Object(ctx, obj, cfg, icheck) +} + +func (m *OpenAIModule) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + monitoring.GetMetrics().ModuleExternalBatchLength.WithLabelValues("vectorizeBatch", m.Name()).Observe(float64(len(objs))) + monitoring.GetMetrics().ModuleExternalRequestBatchCount.WithLabelValues(m.Name(), "vectorizeBatch").Inc() + vecs, errs := m.vectorizer.ObjectBatch(ctx, objs, skipObject, cfg) + return vecs, nil, errs +} + +func (m *OpenAIModule) MetaInfo() (map[string]interface{}, error) { + return m.metaProvider.MetaInfo() +} + +func (m *OpenAIModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +func (m *OpenAIModule) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + monitoring.GetMetrics().ModuleExternalRequestSingleCount.WithLabelValues(m.Name(), "vectorizeTexts").Inc() + monitoring.GetMetrics().ModuleExternalRequestSize.WithLabelValues(m.Name(), "vectorizeTexts").Observe(float64(len(input))) + return m.vectorizer.Texts(ctx, []string{input}, cfg) +} + +func (m *OpenAIModule) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + return true, nil, nil +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.Searcher[[]float32](New()) + _ = modulecapabilities.GraphQLArguments(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/nearText.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/nearText.go new file mode 100644 index 0000000000000000000000000000000000000000..87aa1d6bf679f5642b2e9379c9f43f2782ec70ab --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/nearText.go @@ -0,0 +1,36 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modopenai + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +func (m *OpenAIModule) initNearText() error { + m.searcher = nearText.NewSearcher(m.vectorizer) + m.graphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *OpenAIModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return m.graphqlProvider.Arguments() +} + +func (m *OpenAIModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + return m.searcher.VectorSearches() +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/vectorizer/batch_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/vectorizer/batch_test.go new file mode 100644 index 0000000000000000000000000000000000000000..8a0655bbbb4c903a511f064162aca1a971c4b131 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/vectorizer/batch_test.go @@ -0,0 +1,157 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + "github.com/weaviate/weaviate/usecases/modulecomponents/text2vecbase" + + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" +) + +func TestBatch(t *testing.T) { + cfg := &fakeClassConfig{vectorizePropertyName: false, classConfig: map[string]interface{}{"vectorizeClassName": false}} + logger, _ := test.NewNullLogger() + cases := []struct { + name string + objects []*models.Object + skip []bool + wantErrors map[int]error + deadline time.Duration + }{ + {name: "skip all", objects: []*models.Object{{Class: "Car"}}, skip: []bool{true}}, + {name: "skip first", objects: []*models.Object{{Class: "Car"}, {Class: "Car", Properties: map[string]interface{}{"test": "test"}}}, skip: []bool{true, false}}, + {name: "one object errors", objects: []*models.Object{{Class: "Car", Properties: map[string]interface{}{"test": "test"}}, {Class: "Car", Properties: map[string]interface{}{"test": "error something"}}}, skip: []bool{false, false}, wantErrors: map[int]error{1: fmt.Errorf("something")}}, + {name: "first object errors", objects: []*models.Object{{Class: "Car", Properties: map[string]interface{}{"test": "error something"}}, {Class: "Car", Properties: map[string]interface{}{"test": "test"}}}, skip: []bool{false, false}, wantErrors: map[int]error{0: fmt.Errorf("something")}}, + {name: "vectorize all", objects: []*models.Object{{Class: "Car", Properties: map[string]interface{}{"test": "test"}}, {Class: "Car", Properties: map[string]interface{}{"test": "something"}}}, skip: []bool{false, false}}, + {name: "multiple vectorizer batches", objects: []*models.Object{ + {Class: "Car", Properties: map[string]interface{}{"test": "tokens 50"}}, // set limit so next 3 objects are one batch + {Class: "Car", Properties: map[string]interface{}{"test": "first object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "second object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "third object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "tokens 100"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "first object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "second object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "third object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "fourth object second batch"}}, + }, skip: []bool{false, false, false, false, false, false, false, false, false}}, + {name: "multiple vectorizer batches with skips and errors", objects: []*models.Object{ + {Class: "Car", Properties: map[string]interface{}{"test": "tokens 50"}}, // set limit so next 3 objects are one batch + {Class: "Car", Properties: map[string]interface{}{"test": "first object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "second object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "error something"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "tokens 100"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "first object second batch"}}, // rate is 100 again + {Class: "Car", Properties: map[string]interface{}{"test": "second object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "third object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "fourth object second batch"}}, + }, skip: []bool{false, true, false, false, false, false, true, false, false}, wantErrors: map[int]error{3: fmt.Errorf("something")}}, + {name: "token too long", objects: []*models.Object{ + {Class: "Car", Properties: map[string]interface{}{"test": "tokens 5"}}, // set limit + {Class: "Car", Properties: map[string]interface{}{"test": "long long long long, long, long, long, long"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "short"}}, + }, skip: []bool{false, false, false}, wantErrors: map[int]error{1: fmt.Errorf("text too long for vectorization from provider: got 43, total limit: 10, remaining: 5")}}, + {name: "token too long, last item in batch", objects: []*models.Object{ + {Class: "Car", Properties: map[string]interface{}{"test": "tokens 5"}}, // set limit + {Class: "Car", Properties: map[string]interface{}{"test": "short"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "long long long long, long, long, long, long"}}, + }, skip: []bool{false, false, false}, wantErrors: map[int]error{2: fmt.Errorf("text too long for vectorization from provider: got 43, total limit: 10, remaining: 5")}}, + {name: "skip last item", objects: []*models.Object{ + {Class: "Car", Properties: map[string]interface{}{"test": "fir test object"}}, // set limit + {Class: "Car", Properties: map[string]interface{}{"test": "first object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "second object first batch"}}, + }, skip: []bool{false, false, true}}, + {name: "deadline", deadline: 400 * time.Millisecond, objects: []*models.Object{ + {Class: "Car", Properties: map[string]interface{}{"test": "tokens 15"}}, // set limit so next two items are in a batch + {Class: "Car", Properties: map[string]interface{}{"test": "wait 500"}}, // needs to be higher than deadline, so all remaining objects time out + {Class: "Car", Properties: map[string]interface{}{"test": "long long long"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "next batch, will be aborted due to context deadline"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "skipped"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "has error again"}}, + }, skip: []bool{false, false, false, false, true, false}, wantErrors: map[int]error{3: fmt.Errorf("context deadline exceeded"), 5: fmt.Errorf("context deadline exceeded")}}, + {name: "azure limit without total Limit", objects: []*models.Object{ + {Class: "Car", Properties: map[string]interface{}{"test": "azure_tokens 20"}}, // set azure limit without total Limit + {Class: "Car", Properties: map[string]interface{}{"test": "long long long long"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "azure_tokens 0"}}, // simulate token limit hit + {Class: "Car", Properties: map[string]interface{}{"test": "something"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "skipped"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "all works"}}, + }, skip: []bool{false, false, false, false, true, false}}, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + client := &fakeBatchClient{} + + v := text2vecbase.New(client, + batch.NewBatchVectorizer(client, 50*time.Second, + batch.Settings{MaxObjectsPerBatch: 100, MaxTokensPerBatch: func(cfg moduletools.ClassConfig) int { return 500000 }, MaxTimePerBatch: 10, HasTokenLimit: true, ReturnsRateLimit: true}, + logger, "test"), + batch.ReturnBatchTokenizer(1, "", false), + ) + deadline := time.Now().Add(10 * time.Second) + if tt.deadline != 0 { + deadline = time.Now().Add(tt.deadline) + } + + ctx, cancl := context.WithDeadline(context.Background(), deadline) + vecs, errs := v.ObjectBatch( + ctx, tt.objects, tt.skip, cfg, + ) + + require.Len(t, errs, len(tt.wantErrors)) + require.Len(t, vecs, len(tt.objects)) + + for i := range tt.objects { + if tt.wantErrors[i] != nil { + require.Equal(t, tt.wantErrors[i], errs[i]) + } else if tt.skip[i] { + require.Nil(t, vecs[i]) + } else { + require.NotNil(t, vecs[i]) + } + } + cancl() + }) + } +} + +func BenchmarkEncoderCache(b *testing.B) { + client := &fakeBatchClient{} + logger, _ := test.NewNullLogger() + + v := text2vecbase.New(client, + batch.NewBatchVectorizer(client, 50*time.Second, + batch.Settings{MaxObjectsPerBatch: 100, MaxTokensPerBatch: func(cfg moduletools.ClassConfig) int { return 500000 }, MaxTimePerBatch: 10, HasTokenLimit: true, ReturnsRateLimit: true}, + logger, "test"), + batch.ReturnBatchTokenizer(1, "", false), + ) + deadline := time.Now().Add(10 * time.Second) + cfg := &fakeClassConfig{vectorizePropertyName: false, classConfig: map[string]interface{}{"vectorizeClassName": false}} + b.ResetTimer() + for i := 0; i < b.N; i++ { + ctx, cancl := context.WithDeadline(context.Background(), deadline) + vecs, errs := v.ObjectBatch( + ctx, []*models.Object{{Class: "Car"}}, []bool{false}, cfg, + ) + cancl() + require.Len(b, errs, 0) + require.Len(b, vecs, 1) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/vectorizer/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/vectorizer/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..3a24f04cefd423a21300708537ee95a88d31f8e2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-openai/vectorizer/fakes_for_test.go @@ -0,0 +1,172 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "fmt" + "strconv" + "strings" + "time" + + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +type fakeBatchClient struct { + defaultResetRate int + defaultRPM int + defaultTPM int + rateLimit *modulecomponents.RateLimits +} + +func (c *fakeBatchClient) VectorizeQuery(ctx context.Context, input []string, cfg moduletools.ClassConfig) (*modulecomponents.VectorizationResult[[]float32], error) { + panic("implement me") +} + +func (c *fakeBatchClient) Vectorize(ctx context.Context, + text []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], *modulecomponents.RateLimits, int, error) { + if c.defaultResetRate == 0 { + c.defaultResetRate = 60 + } + + var reqError error + + vectors := make([][]float32, len(text)) + errors := make([]error, len(text)) + if c.rateLimit == nil { + c.rateLimit = &modulecomponents.RateLimits{LastOverwrite: time.Now(), RemainingTokens: 100, RemainingRequests: 100, LimitTokens: 200, ResetTokens: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second), ResetRequests: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second)} + } else { + c.rateLimit.ResetTokens = time.Now().Add(time.Duration(c.defaultResetRate) * time.Second) + } + for i := range text { + if len(text[i]) >= len("error ") && text[i][:6] == "error " { + errors[i] = fmt.Errorf("%s", text[i][6:]) + continue + } + + tok := len("tokens ") + if len(text[i]) >= tok && text[i][:tok] == "tokens " { + rate, _ := strconv.Atoi(text[i][tok:]) + c.rateLimit.RemainingTokens = rate + c.rateLimit.LimitTokens = 2 * rate + } else if req := len("requests "); len(text[i]) >= req && text[i][:req] == "requests " { + reqs, _ := strconv.Atoi(strings.Split(text[i][req:], " ")[0]) + c.rateLimit.RemainingRequests = reqs + c.rateLimit.LimitRequests = 2 * reqs + } else if reqErr := len("ReqError "); len(text[i]) >= reqErr && text[i][:reqErr] == "ReqError " { + reqError = fmt.Errorf("%v", strings.Split(text[i][reqErr:], " ")[0]) + } else if len(text[i]) >= len("wait ") && text[i][:5] == "wait " { + wait, _ := strconv.Atoi(text[i][5:]) + time.Sleep(time.Duration(wait) * time.Millisecond) + } else { + // refresh the remaining token + secondsSinceLastRefresh := time.Since(c.rateLimit.LastOverwrite) + fraction := secondsSinceLastRefresh.Seconds() / time.Until(c.rateLimit.ResetTokens).Seconds() + if fraction > 1 { + c.rateLimit.RemainingTokens = c.rateLimit.LimitTokens + } else { + c.rateLimit.RemainingTokens += int(float64(c.rateLimit.LimitTokens) * fraction / float64(c.defaultResetRate)) + } + if len(text[i]) > c.rateLimit.LimitTokens || len(text[i]) > c.rateLimit.RemainingTokens { + errors[i] = fmt.Errorf("text too long for vectorization from provider: got %v, total limit: %v, remaining: %v", len(text[i]), c.rateLimit.LimitTokens, c.rateLimit.RemainingTokens) + } + + } + vectors[i] = []float32{0, 1, 2, 3} + } + c.rateLimit.LastOverwrite = time.Now() + return &modulecomponents.VectorizationResult[[]float32]{ + Vector: vectors, + Dimensions: 4, + Text: text, + Errors: errors, + }, c.rateLimit, 0, reqError +} + +func (c *fakeBatchClient) GetVectorizerRateLimit(ctx context.Context, cfg moduletools.ClassConfig) *modulecomponents.RateLimits { + return &modulecomponents.RateLimits{RemainingTokens: c.defaultTPM, RemainingRequests: c.defaultRPM, LimitTokens: c.defaultTPM, LimitRequests: c.defaultRPM, ResetTokens: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second), ResetRequests: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second)} +} + +func (c *fakeBatchClient) GetApiKeyHash(ctx context.Context, cfg moduletools.ClassConfig) [32]byte { + return [32]byte{} +} + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizePropertyName bool + skippedProperty string + excludedProperty string +} + +func (f fakeClassConfig) PropertyIndexed(property string) bool { + return !((property == f.skippedProperty) || (property == f.excludedProperty)) +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) VectorizeClassName() bool { + return f.classConfig["vectorizeClassName"].(bool) +} + +func (f fakeClassConfig) VectorizePropertyName(propertyName string) bool { + return f.vectorizePropertyName +} + +func (f fakeClassConfig) Properties() []string { + return nil +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..bdf994ad70f65a9c92b026798ef6aa50b4016ecc --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/clients/meta.go @@ -0,0 +1,78 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "fmt" + "strings" + "sync" + + enterrors "github.com/weaviate/weaviate/entities/errors" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/transformers" + + "github.com/pkg/errors" +) + +func (v *vectorizer) MetaInfo() (map[string]any, error) { + type nameMetaErr struct { + name string + meta map[string]any + err error + } + + endpoints := map[string]string{} + if v.originPassage != v.originQuery { + endpoints["passage"] = v.urlBuilder.GetPassageURL("/meta", transformers.VectorizationConfig{}) + endpoints["query"] = v.urlBuilder.GetQueryURL("/meta", transformers.VectorizationConfig{}) + } else { + endpoints[""] = v.urlBuilder.GetPassageURL("/meta", transformers.VectorizationConfig{}) + } + + var wg sync.WaitGroup + ch := make(chan nameMetaErr, len(endpoints)) + for serviceName, endpoint := range endpoints { + serviceName, endpoint := serviceName, endpoint + wg.Add(1) + enterrors.GoWrapper(func() { + defer wg.Done() + meta, err := v.client.MetaInfo(endpoint) + ch <- nameMetaErr{serviceName, meta, err} + }, v.logger) + } + wg.Wait() + close(ch) + + metas := map[string]interface{}{} + var errs []string + for nme := range ch { + if nme.err != nil { + prefix := "" + if nme.name != "" { + prefix = "[" + nme.name + "] " + } + errs = append(errs, fmt.Sprintf("%s%v", prefix, nme.err.Error())) + } + if nme.meta != nil { + metas[nme.name] = nme.meta + } + } + + if len(errs) > 0 { + return nil, errors.New(strings.Join(errs, ", ")) + } + if len(metas) == 1 { + for _, meta := range metas { + return meta.(map[string]any), nil + } + } + return metas, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/clients/meta_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/clients/meta_test.go new file mode 100644 index 0000000000000000000000000000000000000000..e84d182b5e290cf786ab7de08dcc2b7426ebfe71 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/clients/meta_test.go @@ -0,0 +1,350 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestGetMeta(t *testing.T) { + t.Run("when common server is providing meta", func(t *testing.T) { + server := httptest.NewServer(&testMetaHandler{t: t}) + defer server.Close() + v := New(server.URL, server.URL, 0, nullLogger()) + meta, err := v.MetaInfo() + + assert.Nil(t, err) + assert.NotNil(t, meta) + + model := extractChildMap(t, meta, "model") + assert.NotNil(t, model["_name_or_path"]) + assert.NotNil(t, model["architectures"]) + assert.Contains(t, model["architectures"], "DistilBertModel") + ID2Label := extractChildMap(t, model, "id2label") + assert.NotNil(t, ID2Label["0"]) + assert.NotNil(t, ID2Label["1"]) + }) + + t.Run("when passage and query servers are providing meta", func(t *testing.T) { + serverPassage := httptest.NewServer(&testMetaHandler{t: t, modelType: "passage"}) + serverQuery := httptest.NewServer(&testMetaHandler{t: t, modelType: "query"}) + defer serverPassage.Close() + defer serverQuery.Close() + v := New(serverPassage.URL, serverQuery.URL, 0, nullLogger()) + meta, err := v.MetaInfo() + + assert.Nil(t, err) + assert.NotNil(t, meta) + + passage := extractChildMap(t, meta, "passage") + passageModel := extractChildMap(t, passage, "model") + assert.NotNil(t, passageModel["_name_or_path"]) + assert.NotNil(t, passageModel["architectures"]) + assert.Contains(t, passageModel["architectures"], "DPRContextEncoder") + passageID2Label := extractChildMap(t, passageModel, "id2label") + assert.NotNil(t, passageID2Label["0"]) + assert.NotNil(t, passageID2Label["1"]) + + query := extractChildMap(t, meta, "query") + queryModel := extractChildMap(t, query, "model") + assert.NotNil(t, queryModel["_name_or_path"]) + assert.NotNil(t, queryModel["architectures"]) + assert.Contains(t, queryModel["architectures"], "DPRQuestionEncoder") + queryID2Label := extractChildMap(t, queryModel, "id2label") + assert.NotNil(t, queryID2Label["0"]) + assert.NotNil(t, queryID2Label["1"]) + }) + + t.Run("when passage and query servers are unavailable", func(t *testing.T) { + rt := time.Now().Add(time.Hour) + serverPassage := httptest.NewServer(&testMetaHandler{t: t, modelType: "passage", readyTime: rt}) + serverQuery := httptest.NewServer(&testMetaHandler{t: t, modelType: "query", readyTime: rt}) + defer serverPassage.Close() + defer serverQuery.Close() + v := New(serverPassage.URL, serverQuery.URL, 0, nullLogger()) + meta, err := v.MetaInfo() + + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "[passage] unexpected status code '503' of meta request") + assert.Contains(t, err.Error(), "[query] unexpected status code '503' of meta request") + assert.Nil(t, meta) + }) +} + +type testMetaHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time + modelType string +} + +func (h *testMetaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(h.t, "/meta", r.URL.String()) + assert.Equal(h.t, http.MethodGet, r.Method) + + if time.Since(h.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + return + } + + w.Write([]byte(h.metaInfo())) +} + +func (h *testMetaHandler) metaInfo() string { + switch h.modelType { + case "passage": + return `{ + "model": { + "return_dict": true, + "output_hidden_states": false, + "output_attentions": false, + "torchscript": false, + "torch_dtype": "float32", + "use_bfloat16": false, + "pruned_heads": {}, + "tie_word_embeddings": true, + "is_encoder_decoder": false, + "is_decoder": false, + "cross_attention_hidden_size": null, + "add_cross_attention": false, + "tie_encoder_decoder": false, + "max_length": 20, + "min_length": 0, + "do_sample": false, + "early_stopping": false, + "num_beams": 1, + "num_beam_groups": 1, + "diversity_penalty": 0, + "temperature": 1, + "top_k": 50, + "top_p": 1, + "repetition_penalty": 1, + "length_penalty": 1, + "no_repeat_ngram_size": 0, + "encoder_no_repeat_ngram_size": 0, + "bad_words_ids": null, + "num_return_sequences": 1, + "chunk_size_feed_forward": 0, + "output_scores": false, + "return_dict_in_generate": false, + "forced_bos_token_id": null, + "forced_eos_token_id": null, + "remove_invalid_values": false, + "architectures": [ + "DPRContextEncoder" + ], + "finetuning_task": null, + "id2label": { + "0": "LABEL_0", + "1": "LABEL_1" + }, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1 + }, + "tokenizer_class": null, + "prefix": null, + "bos_token_id": null, + "pad_token_id": 0, + "eos_token_id": null, + "sep_token_id": null, + "decoder_start_token_id": null, + "task_specific_params": null, + "problem_type": null, + "_name_or_path": "./models/model", + "transformers_version": "4.16.2", + "gradient_checkpointing": false, + "model_type": "dpr", + "vocab_size": 30522, + "hidden_size": 768, + "num_hidden_layers": 12, + "num_attention_heads": 12, + "hidden_act": "gelu", + "intermediate_size": 3072, + "hidden_dropout_prob": 0.1, + "attention_probs_dropout_prob": 0.1, + "max_position_embeddings": 512, + "type_vocab_size": 2, + "initializer_range": 0.02, + "layer_norm_eps": 1e-12, + "projection_dim": 0, + "position_embedding_type": "absolute" + } + }` + case "query": + return `{ + "model": { + "return_dict": true, + "output_hidden_states": false, + "output_attentions": false, + "torchscript": false, + "torch_dtype": "float32", + "use_bfloat16": false, + "pruned_heads": {}, + "tie_word_embeddings": true, + "is_encoder_decoder": false, + "is_decoder": false, + "cross_attention_hidden_size": null, + "add_cross_attention": false, + "tie_encoder_decoder": false, + "max_length": 20, + "min_length": 0, + "do_sample": false, + "early_stopping": false, + "num_beams": 1, + "num_beam_groups": 1, + "diversity_penalty": 0, + "temperature": 1, + "top_k": 50, + "top_p": 1, + "repetition_penalty": 1, + "length_penalty": 1, + "no_repeat_ngram_size": 0, + "encoder_no_repeat_ngram_size": 0, + "bad_words_ids": null, + "num_return_sequences": 1, + "chunk_size_feed_forward": 0, + "output_scores": false, + "return_dict_in_generate": false, + "forced_bos_token_id": null, + "forced_eos_token_id": null, + "remove_invalid_values": false, + "architectures": [ + "DPRQuestionEncoder" + ], + "finetuning_task": null, + "id2label": { + "0": "LABEL_0", + "1": "LABEL_1" + }, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1 + }, + "tokenizer_class": null, + "prefix": null, + "bos_token_id": null, + "pad_token_id": 0, + "eos_token_id": null, + "sep_token_id": null, + "decoder_start_token_id": null, + "task_specific_params": null, + "problem_type": null, + "_name_or_path": "./models/model", + "transformers_version": "4.16.2", + "gradient_checkpointing": false, + "model_type": "dpr", + "vocab_size": 30522, + "hidden_size": 768, + "num_hidden_layers": 12, + "num_attention_heads": 12, + "hidden_act": "gelu", + "intermediate_size": 3072, + "hidden_dropout_prob": 0.1, + "attention_probs_dropout_prob": 0.1, + "max_position_embeddings": 512, + "type_vocab_size": 2, + "initializer_range": 0.02, + "layer_norm_eps": 1e-12, + "projection_dim": 0, + "position_embedding_type": "absolute" + } + }` + default: + return `{ + "model": { + "_name_or_path": "distilbert-base-uncased", + "activation": "gelu", + "add_cross_attention": false, + "architectures": [ + "DistilBertModel" + ], + "attention_dropout": 0.1, + "bad_words_ids": null, + "bos_token_id": null, + "chunk_size_feed_forward": 0, + "decoder_start_token_id": null, + "dim": 768, + "diversity_penalty": 0, + "do_sample": false, + "dropout": 0.1, + "early_stopping": false, + "encoder_no_repeat_ngram_size": 0, + "eos_token_id": null, + "finetuning_task": null, + "hidden_dim": 3072, + "id2label": { + "0": "LABEL_0", + "1": "LABEL_1" + }, + "initializer_range": 0.02, + "is_decoder": false, + "is_encoder_decoder": false, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1 + }, + "length_penalty": 1, + "max_length": 20, + "max_position_embeddings": 512, + "min_length": 0, + "model_type": "distilbert", + "n_heads": 12, + "n_layers": 6, + "no_repeat_ngram_size": 0, + "num_beam_groups": 1, + "num_beams": 1, + "num_return_sequences": 1, + "output_attentions": false, + "output_hidden_states": false, + "output_scores": false, + "pad_token_id": 0, + "prefix": null, + "pruned_heads": {}, + "qa_dropout": 0.1, + "repetition_penalty": 1, + "return_dict": true, + "return_dict_in_generate": false, + "sep_token_id": null, + "seq_classif_dropout": 0.2, + "sinusoidal_pos_embds": false, + "task_specific_params": null, + "temperature": 1, + "tie_encoder_decoder": false, + "tie_weights_": true, + "tie_word_embeddings": true, + "tokenizer_class": null, + "top_k": 50, + "top_p": 1, + "torchscript": false, + "transformers_version": "4.3.2", + "use_bfloat16": false, + "vocab_size": 30522, + "xla_device": null + } + }` + } +} + +func extractChildMap(t *testing.T, parent map[string]interface{}, name string) map[string]interface{} { + assert.NotNil(t, parent[name]) + child, ok := parent[name].(map[string]interface{}) + assert.True(t, ok) + assert.NotNil(t, child) + + return child +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/clients/startup.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/clients/startup.go new file mode 100644 index 0000000000000000000000000000000000000000..444ac95293f1870e4071361747793866419c0eb0 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/clients/startup.go @@ -0,0 +1,86 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "strings" + "sync" + "time" + + enterrors "github.com/weaviate/weaviate/entities/errors" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/transformers" + + "github.com/pkg/errors" +) + +func (v *vectorizer) WaitForStartup(initCtx context.Context, + interval time.Duration, +) error { + endpoints := map[string]string{} + if v.originPassage != v.originQuery { + endpoints["passage"] = v.urlBuilder.GetPassageURL("/.well-known/ready", transformers.VectorizationConfig{}) + endpoints["query"] = v.urlBuilder.GetQueryURL("/.well-known/ready", transformers.VectorizationConfig{}) + } else { + endpoints[""] = v.urlBuilder.GetPassageURL("/.well-known/ready", transformers.VectorizationConfig{}) + } + + ch := make(chan error, len(endpoints)) + var wg sync.WaitGroup + for serviceName, endpoint := range endpoints { + serviceName, endpoint := serviceName, endpoint + wg.Add(1) + enterrors.GoWrapper(func() { + defer wg.Done() + if err := v.waitFor(initCtx, interval, endpoint, serviceName); err != nil { + ch <- err + } + }, v.logger) + } + wg.Wait() + close(ch) + + if len(ch) > 0 { + var errs []string + for err := range ch { + errs = append(errs, err.Error()) + } + return errors.New(strings.Join(errs, ", ")) + } + return nil +} + +func (v *vectorizer) waitFor(initCtx context.Context, interval time.Duration, endpoint string, serviceName string) error { + ticker := time.NewTicker(interval) + defer ticker.Stop() + expired := initCtx.Done() + var lastErr error + prefix := "" + if serviceName != "" { + prefix = "[" + serviceName + "] " + } + + for { + select { + case <-ticker.C: + lastErr = v.client.CheckReady(initCtx, endpoint) + if lastErr == nil { + return nil + } + v.logger. + WithField("action", "transformer_remote_wait_for_startup"). + WithError(lastErr).Warnf("%stransformer remote inference service not ready", prefix) + case <-expired: + return errors.Wrapf(lastErr, "%sinit context expired before remote was ready", prefix) + } + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/clients/startup_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/clients/startup_test.go new file mode 100644 index 0000000000000000000000000000000000000000..d0ff0a1a47644f9364707e714926742fcef5eb19 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/clients/startup_test.go @@ -0,0 +1,197 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "net/http" + "net/http/httptest" + "regexp" + "strings" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestWaitForStartup(t *testing.T) { + t.Run("when common server is immediately ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{t: t}) + defer server.Close() + v := New(server.URL, server.URL, 0, nullLogger()) + err := v.WaitForStartup(context.Background(), 150*time.Millisecond) + + assert.Nil(t, err) + }) + + t.Run("when passage and query servers are immediately ready", func(t *testing.T) { + serverPassage := httptest.NewServer(&testReadyHandler{t: t}) + serverQuery := httptest.NewServer(&testReadyHandler{t: t}) + defer serverPassage.Close() + defer serverQuery.Close() + v := New(serverPassage.URL, serverQuery.URL, 0, nullLogger()) + err := v.WaitForStartup(context.Background(), 150*time.Millisecond) + + assert.Nil(t, err) + }) + + t.Run("when common server is down", func(t *testing.T) { + url := "http://nothing-running-at-this-url" + v := New(url, url, 0, nullLogger()) + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := v.WaitForStartup(ctx, 50*time.Millisecond) + + require.NotNil(t, err, nullLogger()) + assert.Contains(t, err.Error(), "init context expired before remote was ready: send check ready request") + assertContainsEither(t, err.Error(), "dial tcp", "context deadline exceeded") + assert.NotContains(t, err.Error(), "[passage]") + assert.NotContains(t, err.Error(), "[query]") + }) + + t.Run("when passage and query servers are down", func(t *testing.T) { + urlPassage := "http://nothing-running-at-this-url" + urlQuery := "http://nothing-running-at-this-url-either" + v := New(urlPassage, urlQuery, 0, nullLogger()) + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := v.WaitForStartup(ctx, 50*time.Millisecond) + + require.NotNil(t, err, nullLogger()) + assert.Contains(t, err.Error(), "[passage] init context expired before remote was ready: send check ready request") + assert.Contains(t, err.Error(), "[query] init context expired before remote was ready: send check ready request") + assertContainsEither(t, err.Error(), "dial tcp", "context deadline exceeded") + }) + + t.Run("when common server is alive, but not ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(time.Hour), + }) + defer server.Close() + v := New(server.URL, server.URL, 0, nullLogger()) + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := v.WaitForStartup(ctx, 50*time.Millisecond) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "init context expired before remote was ready") + assertContainsEither(t, err.Error(), "not ready: status 503", "context deadline exceeded") + assert.NotContains(t, err.Error(), "[passage]") + assert.NotContains(t, err.Error(), "[query]") + }) + + t.Run("when passage and query servers are alive, but not ready", func(t *testing.T) { + rt := time.Now().Add(time.Hour) + serverPassage := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: rt, + }) + serverQuery := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: rt, + }) + defer serverPassage.Close() + defer serverQuery.Close() + v := New(serverPassage.URL, serverQuery.URL, 0, nullLogger()) + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := v.WaitForStartup(ctx, 50*time.Millisecond) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "[passage] init context expired before remote was ready") + assert.Contains(t, err.Error(), "[query] init context expired before remote was ready") + assertContainsEither(t, err.Error(), "not ready: status 503", "context deadline exceeded") + }) + + t.Run("when passage and query servers are alive, but query one is not ready", func(t *testing.T) { + serverPassage := httptest.NewServer(&testReadyHandler{t: t}) + serverQuery := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(1 * time.Minute), + }) + defer serverPassage.Close() + defer serverQuery.Close() + v := New(serverPassage.URL, serverQuery.URL, 0, nullLogger()) + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := v.WaitForStartup(ctx, 50*time.Millisecond) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "[query] init context expired before remote was ready") + assertContainsEither(t, err.Error(), "not ready: status 503", "context deadline exceeded") + assert.NotContains(t, err.Error(), "[passage]") + }) + + t.Run("when common server is initially not ready, but then becomes ready", func(t *testing.T) { + server := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(100 * time.Millisecond), + }) + v := New(server.URL, server.URL, 0, nullLogger()) + defer server.Close() + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := v.WaitForStartup(ctx, 50*time.Millisecond) + + require.Nil(t, err) + }) + + t.Run("when passage and query servers are initially not ready, but then become ready", func(t *testing.T) { + serverPassage := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(100 * time.Millisecond), + }) + serverQuery := httptest.NewServer(&testReadyHandler{ + t: t, + readyTime: time.Now().Add(150 * time.Millisecond), + }) + defer serverPassage.Close() + defer serverQuery.Close() + v := New(serverPassage.URL, serverQuery.URL, 0, nullLogger()) + ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond) + defer cancel() + err := v.WaitForStartup(ctx, 50*time.Millisecond) + + require.Nil(t, err) + }) +} + +type testReadyHandler struct { + t *testing.T + // the test handler will report as not ready before the time has passed + readyTime time.Time +} + +func (f *testReadyHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/.well-known/ready", r.URL.String()) + assert.Equal(f.t, http.MethodGet, r.Method) + + if time.Since(f.readyTime) < 0 { + w.WriteHeader(http.StatusServiceUnavailable) + } else { + w.WriteHeader(http.StatusNoContent) + } +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} + +func assertContainsEither(t *testing.T, str string, contains ...string) { + reg := regexp.MustCompile(strings.Join(contains, "|")) + assert.Regexp(t, reg, str) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/clients/transformers.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/clients/transformers.go new file mode 100644 index 0000000000000000000000000000000000000000..18f3f61fcc694bf2138d7d3319a2f908b1bb4891 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/clients/transformers.go @@ -0,0 +1,51 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "time" + + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/transformers" +) + +type vectorizer struct { + originPassage string + originQuery string + client *transformers.Client + urlBuilder *transformers.URLBuilder + logger logrus.FieldLogger +} + +func New(originPassage, originQuery string, timeout time.Duration, logger logrus.FieldLogger) *vectorizer { + urlBuilder := transformers.NewURLBuilder(originPassage, originQuery) + return &vectorizer{ + originPassage: originPassage, + originQuery: originQuery, + urlBuilder: urlBuilder, + client: transformers.New(urlBuilder, timeout, logger), + logger: logger, + } +} + +func (v *vectorizer) VectorizeObject(ctx context.Context, input string, + config transformers.VectorizationConfig, +) (*transformers.VectorizationResult, error) { + return v.client.VectorizeObject(ctx, input, config) +} + +func (v *vectorizer) VectorizeQuery(ctx context.Context, input string, + config transformers.VectorizationConfig, +) (*transformers.VectorizationResult, error) { + return v.client.VectorizeQuery(ctx, input, config) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/clients/transformers_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/clients/transformers_test.go new file mode 100644 index 0000000000000000000000000000000000000000..07fc198c2fd2711fee6dd8825a22483e126d8d2d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/clients/transformers_test.go @@ -0,0 +1,146 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/transformers" +) + +func TestClient(t *testing.T) { + t.Run("when all is fine", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New(server.URL, server.URL, 0, nullLogger()) + expected := &transformers.VectorizationResult{ + Text: "This is my text", + Vector: []float32{0.1, 0.2, 0.3}, + Dimensions: 3, + } + res, err := c.VectorizeObject(context.Background(), "This is my text", + transformers.VectorizationConfig{ + PoolingStrategy: "masked_mean", + }) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when all is fine with dimensions parameter", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New(server.URL, server.URL, 0, nullLogger()) + dimensions := int64(2) + expected := &transformers.VectorizationResult{ + Text: "This is my text", + Vector: []float32{0.1, 0.2}, + Dimensions: int(dimensions), + } + res, err := c.VectorizeObject(context.Background(), "This is my text", + transformers.VectorizationConfig{ + PoolingStrategy: "masked_mean", + Dimensions: &dimensions, + }) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when the context is expired", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := New(server.URL, server.URL, 0, nullLogger()) + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, err := c.VectorizeObject(ctx, "This is my text", transformers.VectorizationConfig{}) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "context deadline exceeded") + }) + + t.Run("when the server returns an error", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{ + t: t, + serverError: errors.Errorf("nope, not gonna happen"), + }) + defer server.Close() + c := New(server.URL, server.URL, 0, nullLogger()) + _, err := c.VectorizeObject(context.Background(), "This is my text", + transformers.VectorizationConfig{}) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "nope, not gonna happen") + }) +} + +type fakeHandler struct { + t *testing.T + serverError error +} + +func (f *fakeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, "/vectors", r.URL.String()) + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.serverError != nil { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(fmt.Sprintf(`{"error":"%s"}`, f.serverError.Error()))) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b map[string]any + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + textInput := b["text"].(string) + assert.Greater(f.t, len(textInput), 0) + + pooling := b["config"].(map[string]any)["pooling_strategy"].(string) + assert.Equal(f.t, "masked_mean", pooling) + + out := map[string]any{ + "text": textInput, + "dims": 3, + "vector": []float32{0.1, 0.2, 0.3}, + } + + dimensions, ok := b["config"].(map[string]any)["dimensions"].(float64) + if ok { + assert.Equal(f.t, float64(2), dimensions) + // in case of passed dimensions parameter adjust response + out = map[string]any{ + "text": textInput, + "dims": dimensions, + "vector": []float32{0.1, 0.2}, + } + } + + outBytes, err := json.Marshal(out) + require.Nil(f.t, err) + + w.Write(outBytes) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/config.go new file mode 100644 index 0000000000000000000000000000000000000000..f18bb8a58a0db37a7dc60cab76db41d8fda218d2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/config.go @@ -0,0 +1,47 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modtransformers + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/modules/text2vec-transformers/vectorizer" +) + +func (m *TransformersModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{ + "vectorizeClassName": vectorizer.DefaultVectorizeClassName, + "poolingStrategy": vectorizer.DefaultPoolingStrategy, + } +} + +func (m *TransformersModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{ + "skip": !vectorizer.DefaultPropertyIndexed, + "vectorizePropertyName": vectorizer.DefaultVectorizePropertyName, + } +} + +func (m *TransformersModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := vectorizer.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/config_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/config_test.go new file mode 100644 index 0000000000000000000000000000000000000000..de01dfdc05ee84a68812229549f27232d8f05e46 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/config_test.go @@ -0,0 +1,35 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modtransformers + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/schema" +) + +func TestConfigDefaults(t *testing.T) { + t.Run("for properties", func(t *testing.T) { + def := New().ClassConfigDefaults() + + assert.Equal(t, true, def["vectorizeClassName"]) + assert.Equal(t, "masked_mean", def["poolingStrategy"]) + }) + + t.Run("for the class", func(t *testing.T) { + dt := schema.DataTypeText + def := New().PropertyConfigDefaults(&dt) + assert.Equal(t, false, def["vectorizePropertyName"]) + assert.Equal(t, false, def["skip"]) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/module.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/module.go new file mode 100644 index 0000000000000000000000000000000000000000..715912395dccb8438ad0b1df076ea20a27ea1011 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/module.go @@ -0,0 +1,190 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modtransformers + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + entcfg "github.com/weaviate/weaviate/entities/config" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/text2vec-transformers/clients" + "github.com/weaviate/weaviate/modules/text2vec-transformers/vectorizer" + "github.com/weaviate/weaviate/usecases/modulecomponents/additional" + "github.com/weaviate/weaviate/usecases/modulecomponents/text2vecbase" +) + +const Name = "text2vec-transformers" + +func New() *TransformersModule { + return &TransformersModule{} +} + +type TransformersModule struct { + vectorizer text2vecbase.TextVectorizer[[]float32] + metaProvider text2vecbase.MetaProvider + graphqlProvider modulecapabilities.GraphQLArguments + searcher modulecapabilities.Searcher[[]float32] + nearTextTransformer modulecapabilities.TextTransform + logger logrus.FieldLogger + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +func (m *TransformersModule) Name() string { + return Name +} + +func (m *TransformersModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2Vec +} + +func (m *TransformersModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, m.logger); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initAdditionalPropertiesProvider(); err != nil { + return errors.Wrap(err, "init additional properties provider") + } + + return nil +} + +func (m *TransformersModule) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init graphql provider") + } + return nil +} + +func (m *TransformersModule) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + // TODO: gh-1486 proper config management + uriPassage := os.Getenv("TRANSFORMERS_PASSAGE_INFERENCE_API") + uriQuery := os.Getenv("TRANSFORMERS_QUERY_INFERENCE_API") + uriCommon := os.Getenv("TRANSFORMERS_INFERENCE_API") + + if uriCommon == "" { + if uriPassage == "" && uriQuery == "" { + return errors.Errorf("required variable TRANSFORMERS_INFERENCE_API or both variables TRANSFORMERS_PASSAGE_INFERENCE_API and TRANSFORMERS_QUERY_INFERENCE_API are not set") + } + if uriPassage != "" && uriQuery == "" { + return errors.Errorf("required variable TRANSFORMERS_QUERY_INFERENCE_API is not set") + } + if uriPassage == "" && uriQuery != "" { + return errors.Errorf("required variable TRANSFORMERS_PASSAGE_INFERENCE_API is not set") + } + } else { + if uriPassage != "" || uriQuery != "" { + return errors.Errorf("either variable TRANSFORMERS_INFERENCE_API or both variables TRANSFORMERS_PASSAGE_INFERENCE_API and TRANSFORMERS_QUERY_INFERENCE_API should be set") + } + uriPassage = uriCommon + uriQuery = uriCommon + } + + waitForStartup := true + if envWaitForStartup := os.Getenv("TRANSFORMERS_WAIT_FOR_STARTUP"); envWaitForStartup != "" { + waitForStartup = entcfg.Enabled(envWaitForStartup) + } + + client := clients.New(uriPassage, uriQuery, timeout, logger) + if waitForStartup { + if err := client.WaitForStartup(ctx, 1*time.Second); err != nil { + return errors.Wrap(err, "init remote vectorizer") + } + } + + m.vectorizer = vectorizer.New(client) + m.metaProvider = client + + return nil +} + +func (m *TransformersModule) initAdditionalPropertiesProvider() error { + m.additionalPropertiesProvider = additional.NewText2VecProvider() + return nil +} + +func (m *TransformersModule) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + return m.vectorizer.Object(ctx, obj, cfg) +} + +// VectorizeBatch is _slower_ if many requests are done in parallel. So do all objects sequentially +func (m *TransformersModule) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + vecs := make([][]float32, len(objs)) + addProps := make([]models.AdditionalProperties, len(objs)) + // error should be the exception so dont preallocate + errs := make(map[int]error, 0) + for i, obj := range objs { + if skipObject[i] { + continue + } + vec, addProp, err := m.vectorizer.Object(ctx, obj, cfg) + if err != nil { + errs[i] = err + continue + } + addProps[i] = addProp + vecs[i] = vec + } + + return vecs, addProps, errs +} + +func (m *TransformersModule) MetaInfo() (map[string]interface{}, error) { + return m.metaProvider.MetaInfo() +} + +func (m *TransformersModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +func (m *TransformersModule) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + return m.vectorizer.Texts(ctx, []string{input}, cfg) +} + +func (m *TransformersModule) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + return true, nil, nil +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.MetaProvider(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/nearText.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/nearText.go new file mode 100644 index 0000000000000000000000000000000000000000..d80b156a3c69a1d21d8c56bc70a65096f9f04b73 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/nearText.go @@ -0,0 +1,36 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modtransformers + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +func (m *TransformersModule) initNearText() error { + m.searcher = nearText.NewSearcher(m.vectorizer) + m.graphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *TransformersModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return m.graphqlProvider.Arguments() +} + +func (m *TransformersModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + return m.searcher.VectorSearches() +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/vectorizer/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/vectorizer/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..45be1ef0c4d0258077e503a5b3cdf45992e018e8 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/vectorizer/class_settings.go @@ -0,0 +1,76 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "errors" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + DefaultPropertyIndexed = true + DefaultVectorizeClassName = true + DefaultVectorizePropertyName = false + DefaultPoolingStrategy = "masked_mean" +) + +type classSettings struct { + basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, BaseClassSettings: *basesettings.NewBaseClassSettings(cfg, false)} +} + +func (ic *classSettings) PoolingStrategy() string { + return ic.BaseClassSettings.GetPropertyAsString("poolingStrategy", DefaultPoolingStrategy) +} + +func (ic *classSettings) InferenceURL() string { + return ic.getSetting("inferenceUrl") +} + +func (ic *classSettings) PassageInferenceURL() string { + return ic.getSetting("passageInferenceUrl") +} + +func (ic *classSettings) QueryInferenceURL() string { + return ic.getSetting("queryInferenceUrl") +} + +func (ic *classSettings) Dimensions() *int64 { + return ic.BaseClassSettings.GetPropertyAsInt64("dimensions", nil) +} + +func (ic *classSettings) getSetting(property string) string { + return ic.BaseClassSettings.GetPropertyAsString(property, "") +} + +func (ic *classSettings) Validate(class *models.Class) error { + if err := ic.BaseClassSettings.Validate(class); err != nil { + return err + } + if ic.InferenceURL() != "" && (ic.PassageInferenceURL() != "" || ic.QueryInferenceURL() != "") { + return errors.New("either inferenceUrl or passageInferenceUrl together with queryInferenceUrl needs to be set, not both") + } + if ic.PassageInferenceURL() == "" && ic.QueryInferenceURL() != "" { + return errors.New("queryInferenceUrl is set but passageInferenceUrl is empty, both needs to be set") + } + if ic.PassageInferenceURL() != "" && ic.QueryInferenceURL() == "" { + return errors.New("passageInferenceUrl is set but queryInferenceUrl is empty, both needs to be set") + } + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/vectorizer/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/vectorizer/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..75ed88b86fa77b29dd06c1ad5a48984ad0a108ab --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/vectorizer/class_settings_test.go @@ -0,0 +1,324 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/modules" +) + +func TestClassSettings(t *testing.T) { + t.Run("with all defaults", func(t *testing.T) { + class := &models.Class{ + Class: "MyClass", + Properties: []*models.Property{{ + Name: "someProp", + }}, + } + + cfg := modules.NewClassBasedModuleConfig(class, "my-module", "tenant", "", nil) + ic := NewClassSettings(cfg) + + assert.True(t, ic.PropertyIndexed("someProp")) + assert.False(t, ic.VectorizePropertyName("someProp")) + assert.True(t, ic.VectorizeClassName()) + assert.Equal(t, ic.PoolingStrategy(), "masked_mean") + }) + + t.Run("with a nil config", func(t *testing.T) { + // this is the case if we were running in a situation such as a + // cross-class vectorization of search time, as is the case with Explore + // {}, we then expect all default values + + ic := NewClassSettings(nil) + + assert.True(t, ic.PropertyIndexed("someProp")) + assert.False(t, ic.VectorizePropertyName("someProp")) + assert.True(t, ic.VectorizeClassName()) + assert.Equal(t, ic.PoolingStrategy(), "masked_mean") + }) + + t.Run("with all explicit config matching the defaults", func(t *testing.T) { + class := &models.Class{ + Class: "MyClass", + ModuleConfig: map[string]interface{}{ + "my-module": map[string]interface{}{ + "vectorizeClassName": true, + "poolingStrategy": "masked_mean", + }, + }, + Properties: []*models.Property{{ + Name: "someProp", + ModuleConfig: map[string]interface{}{ + "my-module": map[string]interface{}{ + "skip": false, + "vectorizePropertyName": false, + }, + }, + }}, + } + + cfg := modules.NewClassBasedModuleConfig(class, "my-module", "tenant", "", nil) + ic := NewClassSettings(cfg) + + assert.True(t, ic.PropertyIndexed("someProp")) + assert.False(t, ic.VectorizePropertyName("someProp")) + assert.True(t, ic.VectorizeClassName()) + assert.Equal(t, ic.PoolingStrategy(), "masked_mean") + }) + + t.Run("with all explicit config using non-default values", func(t *testing.T) { + class := &models.Class{ + Class: "MyClass", + ModuleConfig: map[string]interface{}{ + "my-module": map[string]interface{}{ + "vectorizeClassName": false, + "poolingStrategy": "cls", + }, + }, + Properties: []*models.Property{{ + Name: "someProp", + ModuleConfig: map[string]interface{}{ + "my-module": map[string]interface{}{ + "skip": true, + "vectorizePropertyName": true, + }, + }, + }}, + } + + cfg := modules.NewClassBasedModuleConfig(class, "my-module", "tenant", "", nil) + ic := NewClassSettings(cfg) + + assert.False(t, ic.PropertyIndexed("someProp")) + assert.True(t, ic.VectorizePropertyName("someProp")) + assert.False(t, ic.VectorizeClassName()) + assert.Equal(t, ic.PoolingStrategy(), "cls") + }) + + t.Run("with target vector and properties", func(t *testing.T) { + targetVector := "targetVector" + propertyToIndex := "someProp" + class := &models.Class{ + Class: "MyClass", + VectorConfig: map[string]models.VectorConfig{ + targetVector: { + Vectorizer: map[string]interface{}{ + "my-module": map[string]interface{}{ + "vectorizeClassName": false, + "properties": []interface{}{propertyToIndex}, + }, + }, + VectorIndexType: "hnsw", + }, + }, + Properties: []*models.Property{ + { + Name: propertyToIndex, + ModuleConfig: map[string]interface{}{ + "my-module": map[string]interface{}{ + "skip": true, + "vectorizePropertyName": true, + }, + }, + }, + { + Name: "otherProp", + }, + }, + } + + cfg := modules.NewClassBasedModuleConfig(class, "my-module", "tenant", targetVector, nil) + ic := NewClassSettings(cfg) + + assert.True(t, ic.PropertyIndexed(propertyToIndex)) + assert.True(t, ic.VectorizePropertyName(propertyToIndex)) + assert.False(t, ic.PropertyIndexed("otherProp")) + assert.False(t, ic.VectorizePropertyName("otherProp")) + assert.False(t, ic.VectorizeClassName()) + }) + + t.Run("with inferenceUrl setting", func(t *testing.T) { + class := &models.Class{ + Class: "MyClass", + VectorConfig: map[string]models.VectorConfig{ + "withInferenceUrl": { + Vectorizer: map[string]interface{}{ + "my-module": map[string]interface{}{ + "vectorizeClassName": false, + "poolingStrategy": "cls", + "inferenceUrl": "http://inference.url", + }, + }, + }, + "withPassageAndQueryInferenceUrl": { + Vectorizer: map[string]interface{}{ + "my-module": map[string]interface{}{ + "vectorizeClassName": false, + "poolingStrategy": "cls", + "passageInferenceUrl": "http://passage.inference.url", + "queryInferenceUrl": "http://query.inference.url", + }, + }, + }, + }, + + Properties: []*models.Property{{ + Name: "someProp", + ModuleConfig: map[string]interface{}{ + "my-module": map[string]interface{}{ + "skip": true, + "vectorizePropertyName": true, + }, + }, + }}, + } + + cfg := modules.NewClassBasedModuleConfig(class, "my-module", "tenant", "withInferenceUrl", nil) + ic := NewClassSettings(cfg) + + assert.False(t, ic.PropertyIndexed("someProp")) + assert.True(t, ic.VectorizePropertyName("someProp")) + assert.False(t, ic.VectorizeClassName()) + assert.Equal(t, ic.PoolingStrategy(), "cls") + assert.Equal(t, ic.InferenceURL(), "http://inference.url") + assert.Empty(t, ic.PassageInferenceURL()) + assert.Empty(t, ic.QueryInferenceURL()) + + cfg = modules.NewClassBasedModuleConfig(class, "my-module", "tenant", "withPassageAndQueryInferenceUrl", nil) + ic = NewClassSettings(cfg) + + assert.False(t, ic.PropertyIndexed("someProp")) + assert.True(t, ic.VectorizePropertyName("someProp")) + assert.False(t, ic.VectorizeClassName()) + assert.Equal(t, ic.PoolingStrategy(), "cls") + assert.Empty(t, ic.InferenceURL()) + assert.Equal(t, ic.PassageInferenceURL(), "http://passage.inference.url") + assert.Equal(t, ic.QueryInferenceURL(), "http://query.inference.url") + }) +} + +func Test_classSettings_Validate(t *testing.T) { + tests := []struct { + name string + vectorizer map[string]interface{} + wantErr error + }{ + { + name: "only inference url", + vectorizer: map[string]interface{}{ + "vectorizeClassName": false, + "poolingStrategy": "cls", + "inferenceUrl": "http://inference.url", + }, + }, + { + name: "only passage and query inference urls", + vectorizer: map[string]interface{}{ + "vectorizeClassName": false, + "poolingStrategy": "cls", + "passageInferenceUrl": "http://passage.inference.url", + "queryInferenceUrl": "http://query.inference.url", + }, + }, + { + name: "error - all inference urls", + vectorizer: map[string]interface{}{ + "vectorizeClassName": false, + "poolingStrategy": "cls", + "inferenceUrl": "http://inference.url", + "passageInferenceUrl": "http://passage.inference.url", + "queryInferenceUrl": "http://query.inference.url", + }, + wantErr: errors.New("either inferenceUrl or passageInferenceUrl together with queryInferenceUrl needs to be set, not both"), + }, + { + name: "error - all inference urls, without passage", + vectorizer: map[string]interface{}{ + "vectorizeClassName": false, + "poolingStrategy": "cls", + "inferenceUrl": "http://inference.url", + "queryInferenceUrl": "http://query.inference.url", + }, + wantErr: errors.New("either inferenceUrl or passageInferenceUrl together with queryInferenceUrl needs to be set, not both"), + }, + { + name: "error - all inference urls, without query", + vectorizer: map[string]interface{}{ + "vectorizeClassName": false, + "poolingStrategy": "cls", + "inferenceUrl": "http://inference.url", + "passageInferenceUrl": "http://passage.inference.url", + }, + wantErr: errors.New("either inferenceUrl or passageInferenceUrl together with queryInferenceUrl needs to be set, not both"), + }, + { + name: "error - passage inference url set but not query", + vectorizer: map[string]interface{}{ + "vectorizeClassName": false, + "poolingStrategy": "cls", + "passageInferenceUrl": "http://passage.inference.url", + }, + wantErr: errors.New("passageInferenceUrl is set but queryInferenceUrl is empty, both needs to be set"), + }, + { + name: "error - query inference url set but not passage", + vectorizer: map[string]interface{}{ + "vectorizeClassName": false, + "poolingStrategy": "cls", + "queryInferenceUrl": "http://passage.inference.url", + }, + wantErr: errors.New("queryInferenceUrl is set but passageInferenceUrl is empty, both needs to be set"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + class := &models.Class{ + Class: "MyClass", + VectorConfig: map[string]models.VectorConfig{ + "namedVector": { + Vectorizer: map[string]interface{}{ + "my-module": tt.vectorizer, + }, + }, + }, + Properties: []*models.Property{{ + Name: "someProp", + DataType: []string{schema.DataTypeText.String()}, + ModuleConfig: map[string]interface{}{ + "my-module": map[string]interface{}{ + "skip": false, + "vectorizePropertyName": false, + }, + }, + }}, + } + + cfg := modules.NewClassBasedModuleConfig(class, "my-module", "tenant", "namedVector", nil) + ic := NewClassSettings(cfg) + err := ic.Validate(class) + if tt.wantErr != nil { + require.Error(t, err) + assert.EqualError(t, err, tt.wantErr.Error()) + } else { + assert.Nil(t, err) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/vectorizer/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/vectorizer/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..9b4c799f62ffe733067902a5d677c442b5e934a3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/vectorizer/fakes_for_test.go @@ -0,0 +1,99 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/transformers" +) + +type fakeClient struct { + lastInput string + lastConfig transformers.VectorizationConfig +} + +func (c *fakeClient) VectorizeObject(ctx context.Context, + text string, cfg transformers.VectorizationConfig, +) (*transformers.VectorizationResult, error) { + c.lastInput = text + c.lastConfig = cfg + return &transformers.VectorizationResult{ + Vector: []float32{0, 1, 2, 3}, + Dimensions: 4, + Text: text, + }, nil +} + +func (c *fakeClient) VectorizeQuery(ctx context.Context, + text string, cfg transformers.VectorizationConfig, +) (*transformers.VectorizationResult, error) { + return c.VectorizeObject(ctx, text, cfg) +} + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizeClassName bool + vectorizePropertyName bool + skippedProperty string + excludedProperty string + poolingStrategy string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + classSettings := map[string]interface{}{ + "vectorizeClassName": f.vectorizeClassName, + "poolingStrategy": f.poolingStrategy, + } + return classSettings +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/vectorizer/objects.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/vectorizer/objects.go new file mode 100644 index 0000000000000000000000000000000000000000..76ec7366b4f542c769e73ed5f00746f62ee52a26 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/vectorizer/objects.go @@ -0,0 +1,66 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/transformers" + objectsvectorizer "github.com/weaviate/weaviate/usecases/modulecomponents/vectorizer" +) + +type Vectorizer struct { + client Client + objectVectorizer *objectsvectorizer.ObjectVectorizer +} + +func New(client Client) *Vectorizer { + return &Vectorizer{ + client: client, + objectVectorizer: objectsvectorizer.New(), + } +} + +type Client interface { + VectorizeObject(ctx context.Context, input string, + cfg transformers.VectorizationConfig) (*transformers.VectorizationResult, error) + VectorizeQuery(ctx context.Context, input string, + cfg transformers.VectorizationConfig) (*transformers.VectorizationResult, error) +} + +// IndexCheck returns whether a property of a class should be indexed +type ClassSettings interface { + PropertyIndexed(property string) bool + VectorizeClassName() bool + VectorizePropertyName(propertyName string) bool + PoolingStrategy() string +} + +func (v *Vectorizer) Object(ctx context.Context, object *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + vec, err := v.object(ctx, object, cfg) + return vec, nil, err +} + +func (v *Vectorizer) object(ctx context.Context, object *models.Object, cfg moduletools.ClassConfig, +) ([]float32, error) { + icheck := NewClassSettings(cfg) + text := v.objectVectorizer.Texts(ctx, object, icheck) + res, err := v.client.VectorizeObject(ctx, text, v.getVectorizationConfig(cfg)) + if err != nil { + return nil, err + } + + return res.Vector, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/vectorizer/texts.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/vectorizer/texts.go new file mode 100644 index 0000000000000000000000000000000000000000..b823d1d75e78500e7dce86cc8166fd59106ee889 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/vectorizer/texts.go @@ -0,0 +1,47 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/transformers" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +func (v *Vectorizer) Texts(ctx context.Context, inputs []string, + cfg moduletools.ClassConfig, +) ([]float32, error) { + vectors := make([][]float32, len(inputs)) + for i := range inputs { + res, err := v.client.VectorizeQuery(ctx, inputs[i], v.getVectorizationConfig(cfg)) + if err != nil { + return nil, errors.Wrap(err, "remote client vectorize") + } + vectors[i] = res.Vector + } + + return libvectorizer.CombineVectors(vectors), nil +} + +func (v *Vectorizer) getVectorizationConfig(cfg moduletools.ClassConfig) transformers.VectorizationConfig { + settings := NewClassSettings(cfg) + return transformers.VectorizationConfig{ + PoolingStrategy: settings.PoolingStrategy(), + InferenceURL: settings.InferenceURL(), + PassageInferenceURL: settings.PassageInferenceURL(), + QueryInferenceURL: settings.QueryInferenceURL(), + Dimensions: settings.Dimensions(), + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/vectorizer/texts_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/vectorizer/texts_test.go new file mode 100644 index 0000000000000000000000000000000000000000..95cd27ec41cb7e31526f6422b22e74cc0af1102f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-transformers/vectorizer/texts_test.go @@ -0,0 +1,94 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// as used in the nearText searcher +func TestVectorizingTexts(t *testing.T) { + type testCase struct { + name string + input []string + expectedPoolingStrategy string + poolingStrategy string + } + + tests := []testCase{ + { + name: "single word", + input: []string{"hello"}, + poolingStrategy: "cls", + expectedPoolingStrategy: "cls", + }, + { + name: "multiple words", + input: []string{"hello world, this is me!"}, + poolingStrategy: "cls", + expectedPoolingStrategy: "cls", + }, + + { + name: "multiple sentences (joined with a dot)", + input: []string{"this is sentence 1", "and here's number 2"}, + poolingStrategy: "cls", + expectedPoolingStrategy: "cls", + }, + + { + name: "multiple sentences already containing a dot", + input: []string{"this is sentence 1.", "and here's number 2"}, + poolingStrategy: "cls", + expectedPoolingStrategy: "cls", + }, + { + name: "multiple sentences already containing a question mark", + input: []string{"this is sentence 1?", "and here's number 2"}, + poolingStrategy: "cls", + expectedPoolingStrategy: "cls", + }, + { + name: "multiple sentences already containing an exclamation mark", + input: []string{"this is sentence 1!", "and here's number 2"}, + poolingStrategy: "cls", + expectedPoolingStrategy: "cls", + }, + { + name: "multiple sentences already containing comma", + input: []string{"this is sentence 1,", "and here's number 2"}, + poolingStrategy: "cls", + expectedPoolingStrategy: "cls", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + client := &fakeClient{} + + v := New(client) + + settings := &fakeClassConfig{ + poolingStrategy: test.poolingStrategy, + } + vec, err := v.Texts(context.Background(), test.input, settings) + + require.Nil(t, err) + assert.Equal(t, []float32{0, 1, 2, 3}, vec) + assert.Equal(t, client.lastConfig.PoolingStrategy, test.expectedPoolingStrategy) + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..e247c4176b2ce774dfb07712b97c41c734fc1d9f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/clients/meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *vectorizer) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "VoyageAI Module", + "documentationHref": "https://docs.voyageai.com/docs/embeddings", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/clients/voyageai.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/clients/voyageai.go new file mode 100644 index 0000000000000000000000000000000000000000..894a3b2184d2a3282ffde192ea3eab20fee1d890 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/clients/voyageai.go @@ -0,0 +1,101 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "fmt" + "time" + + "github.com/weaviate/weaviate/entities/moduletools" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/voyageai" + + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/text2vec-voyageai/ent" +) + +var rateLimitPerModel = map[string]voyageai.VoyageRLModel{ + "voyage-3.5": {TokenLimit: 8_000_000, RequestLimit: 2000}, + "voyage-3.5-lite": {TokenLimit: 16_000_000, RequestLimit: 2000}, + "voyage-3": {TokenLimit: 2_000_000, RequestLimit: 1000}, + "voyage-3-lite": {TokenLimit: 4_000_000, RequestLimit: 1000}, + "default": {TokenLimit: 1_000_000, RequestLimit: 1000}, +} + +func getLimitForModel(model string) voyageai.VoyageRLModel { + if rl, ok := rateLimitPerModel[model]; ok { + return rl + } + return rateLimitPerModel["default"] +} + +type voyageaiUrlBuilder struct { + origin string + pathMask string +} + +func newVoyageAIUrlBuilder() *voyageaiUrlBuilder { + return &voyageaiUrlBuilder{ + origin: ent.DefaultBaseURL, + pathMask: "/embeddings", + } +} + +func (c *voyageaiUrlBuilder) URL(baseURL string) string { + if baseURL != "" { + return fmt.Sprintf("%s%s", baseURL, c.pathMask) + } + return fmt.Sprintf("%s%s", c.origin, c.pathMask) +} + +type vectorizer struct { + client *voyageai.Client +} + +func New(apiKey string, timeout time.Duration, logger logrus.FieldLogger) *vectorizer { + return &vectorizer{ + client: voyageai.New(apiKey, timeout, newVoyageAIUrlBuilder(), logger), + } +} + +func (v *vectorizer) Vectorize(ctx context.Context, input []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], *modulecomponents.RateLimits, int, error) { + settings := ent.NewClassSettings(cfg) + return v.client.Vectorize(ctx, input, voyageai.Settings{ + BaseURL: settings.BaseURL(), + Model: settings.Model(), + Truncate: settings.Truncate(), + }) +} + +func (v *vectorizer) VectorizeQuery(ctx context.Context, input []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], error) { + settings := ent.NewClassSettings(cfg) + return v.client.VectorizeQuery(ctx, input, voyageai.Settings{ + BaseURL: settings.BaseURL(), + Model: settings.Model(), + Truncate: settings.Truncate(), + }) +} + +func (v *vectorizer) GetApiKeyHash(ctx context.Context, cfg moduletools.ClassConfig) [32]byte { + return v.client.GetApiKeyHash(ctx, cfg) +} + +func (v *vectorizer) GetVectorizerRateLimit(ctx context.Context, cfg moduletools.ClassConfig) *modulecomponents.RateLimits { + settings := ent.NewClassSettings(cfg) + modelRL := getLimitForModel(settings.Model()) + return v.client.GetVectorizerRateLimit(ctx, modelRL) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/clients/voyageai_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/clients/voyageai_test.go new file mode 100644 index 0000000000000000000000000000000000000000..bab76fad8afc86a7cb6fc46e693d5954331fe7df --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/clients/voyageai_test.go @@ -0,0 +1,220 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/modulecomponents" + "github.com/weaviate/weaviate/usecases/modulecomponents/clients/voyageai" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestClient(t *testing.T) { + t.Run("when all is fine", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{voyageai.New("apiKey", 0, &voyageaiUrlBuilder{origin: server.URL, pathMask: "/embeddings"}, nullLogger())} + expected := &modulecomponents.VectorizationResult[[]float32]{ + Text: []string{"This is my text"}, + Vector: [][]float32{{0.1, 0.2, 0.3}}, + Dimensions: 3, + } + res, _, _, err := c.Vectorize(context.Background(), []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Model": "voyage-2", "baseURL": server.URL}}) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when the context is expired", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{voyageai.New("apiKey", 0, &voyageaiUrlBuilder{origin: server.URL, pathMask: "/embeddings"}, nullLogger())} + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, _, _, err := c.Vectorize(ctx, []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Model": "voyage-2"}}) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "context deadline exceeded") + }) + + t.Run("when the server returns an error", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{ + t: t, + serverError: errors.Errorf("nope, not gonna happen"), + }) + defer server.Close() + c := &vectorizer{voyageai.New("apiKey", 0, &voyageaiUrlBuilder{origin: server.URL, pathMask: "/embeddings"}, nullLogger())} + _, _, _, err := c.Vectorize(context.Background(), []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Model": "voyage-2", "baseURL": server.URL}}) + + require.NotNil(t, err) + assert.Equal(t, err.Error(), "connection to VoyageAI failed with status: 500 error: nope, not gonna happen") + }) + + t.Run("when VoyageAI key is passed using VoyageAIre-Api-Key header", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{voyageai.New("apiKey", 0, &voyageaiUrlBuilder{origin: server.URL, pathMask: "/embeddings"}, nullLogger())} + ctxWithValue := context.WithValue(context.Background(), + "X-Voyageai-Api-Key", []string{"some-key"}) + + expected := &modulecomponents.VectorizationResult[[]float32]{ + Text: []string{"This is my text"}, + Vector: [][]float32{{0.1, 0.2, 0.3}}, + Dimensions: 3, + } + res, _, _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Model": "voyage-2", "baseURL": server.URL}}) + + require.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when VoyageAI key is empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{voyageai.New("", 0, &voyageaiUrlBuilder{origin: server.URL, pathMask: "/embeddings"}, nullLogger())} + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + defer cancel() + + _, _, _, err := c.Vectorize(ctx, []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Model": "voyage-2"}}) + + require.NotNil(t, err) + assert.Equal(t, err.Error(), "VoyageAI API Key: no api key found "+ + "neither in request header: X-VoyageAI-Api-Key "+ + "nor in environment variable under VOYAGEAI_APIKEY") + }) + + t.Run("when X-VoyageAI-Api-Key header is passed but empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{voyageai.New("", 0, &voyageaiUrlBuilder{origin: server.URL, pathMask: "/embeddings"}, nullLogger())} + ctxWithValue := context.WithValue(context.Background(), + "X-VoyageAI-Api-Key", []string{""}) + + _, _, _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Model": "voyage-2"}}) + + require.NotNil(t, err) + assert.Equal(t, err.Error(), "VoyageAI API Key: no api key found "+ + "neither in request header: X-VoyageAI-Api-Key "+ + "nor in environment variable under VOYAGEAI_APIKEY") + }) +} + +type fakeHandler struct { + t *testing.T + serverError error +} + +func (f *fakeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.serverError != nil { + resp := map[string]interface{}{ + "detail": "nope, not gonna happen", + } + outBytes, err := json.Marshal(resp) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var req map[string]interface{} + require.Nil(f.t, json.Unmarshal(bodyBytes, &req)) + + assert.NotNil(f.t, req) + assert.NotEmpty(f.t, req["input"]) + + resp := map[string]interface{}{ + "data": []map[string]interface{}{ + {"embedding": []float32{0.1, 0.2, 0.3}}, + }, + } + outBytes, err := json.Marshal(resp) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizePropertyName bool + skippedProperty string + excludedProperty string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/config.go new file mode 100644 index 0000000000000000000000000000000000000000..e388f403f45de5201e24ba85870e2a564caa032a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/config.go @@ -0,0 +1,50 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modvoyageai + +import ( + "context" + + "github.com/weaviate/weaviate/modules/text2vec-voyageai/ent" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *VoyageAIModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{ + "vectorizeClassName": ent.DefaultVectorizeClassName, + "baseURL": ent.DefaultBaseURL, + "model": ent.DefaultVoyageAIModel, + "truncate": ent.DefaultTruncate, + } +} + +func (m *VoyageAIModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{ + "skip": !ent.DefaultPropertyIndexed, + "vectorizePropertyName": ent.DefaultVectorizePropertyName, + } +} + +func (m *VoyageAIModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := ent.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/ent/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/ent/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..a1eb031aa18c2a2fbd78ec0ba7cedc33619e526f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/ent/class_settings.go @@ -0,0 +1,58 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + // Default values for URL, model and truncate cannot be changed before we solve how old classes that have the defaults + // NOT set will handle the change + DefaultBaseURL = "https://api.voyageai.com/v1" + DefaultVoyageAIModel = "voyage-3" + DefaultTruncate = true + DefaultVectorizeClassName = true + DefaultPropertyIndexed = true + DefaultVectorizePropertyName = false + LowerCaseInput = false +) + +type classSettings struct { + basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) classSettings { + return classSettings{cfg: cfg, BaseClassSettings: *basesettings.NewBaseClassSettings(cfg, LowerCaseInput)} +} + +func (cs classSettings) Model() string { + return cs.BaseClassSettings.GetPropertyAsString("model", DefaultVoyageAIModel) +} + +func (cs classSettings) Truncate() bool { + return cs.BaseClassSettings.GetPropertyAsBool("truncate", DefaultTruncate) +} + +func (cs classSettings) BaseURL() string { + return cs.BaseClassSettings.GetPropertyAsString("baseURL", DefaultBaseURL) +} + +func (cs classSettings) Validate(class *models.Class) error { + if err := cs.BaseClassSettings.Validate(class); err != nil { + return err + } + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/module.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/module.go new file mode 100644 index 0000000000000000000000000000000000000000..66e93a25c84c30eb443057ca2ce8d32742ed7b01 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/module.go @@ -0,0 +1,170 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modvoyageai + +import ( + "context" + "os" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + + "github.com/weaviate/weaviate/modules/text2vec-voyageai/ent" + + "github.com/weaviate/weaviate/usecases/modulecomponents/text2vecbase" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/text2vec-voyageai/clients" + "github.com/weaviate/weaviate/usecases/modulecomponents/additional" +) + +const Name = "text2vec-voyageai" + +var batchSettings = batch.Settings{ + // the encoding is different than OpenAI, but the code is not available in Go and too complicated to port. + // using 30% more than the OpenAI model is a rough estimate but seems to work + TokenMultiplier: 1.3, + MaxTimePerBatch: float64(10), + MaxObjectsPerBatch: 128, // https://docs.voyageai.com/docs/embeddings#python-api + MaxTokensPerBatch: func(cfg moduletools.ClassConfig) int { + model := ent.NewClassSettings(cfg).Model() + if model == "voyage-2" { + return 320000 + } else if model == "voyage-large-2" || model == "voyage-code-2" { + return 120000 + } + return 120000 // unknown model, use the smallest limit + }, + HasTokenLimit: true, + ReturnsRateLimit: true, +} + +func New() *VoyageAIModule { + return &VoyageAIModule{} +} + +type VoyageAIModule struct { + vectorizer text2vecbase.TextVectorizerBatch[[]float32] + metaProvider text2vecbase.MetaProvider + graphqlProvider modulecapabilities.GraphQLArguments + searcher modulecapabilities.Searcher[[]float32] + nearTextTransformer modulecapabilities.TextTransform + logger logrus.FieldLogger + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +func (m *VoyageAIModule) Name() string { + return Name +} + +func (m *VoyageAIModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2ManyVec +} + +func (m *VoyageAIModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, m.logger); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initAdditionalPropertiesProvider(); err != nil { + return errors.Wrap(err, "init additional properties provider") + } + + return nil +} + +func (m *VoyageAIModule) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init graphql provider") + } + return nil +} + +func (m *VoyageAIModule) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("VOYAGEAI_APIKEY") + client := clients.New(apiKey, timeout, logger) + + m.vectorizer = text2vecbase.New(client, + batch.NewBatchVectorizer(client, 50*time.Second, batchSettings, + logger, m.Name()), + batch.ReturnBatchTokenizer(batchSettings.TokenMultiplier, m.Name(), ent.LowerCaseInput), + ) + m.metaProvider = client + + return nil +} + +func (m *VoyageAIModule) initAdditionalPropertiesProvider() error { + m.additionalPropertiesProvider = additional.NewText2VecProvider() + return nil +} + +func (m *VoyageAIModule) VectorizeObject(ctx context.Context, obj *models.Object, + cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + return m.vectorizer.Object(ctx, obj, cfg, ent.NewClassSettings(cfg)) +} + +func (m *VoyageAIModule) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + vecs, errs := m.vectorizer.ObjectBatch(ctx, objs, skipObject, cfg) + return vecs, nil, errs +} + +func (m *VoyageAIModule) VectorizableProperties(cfg moduletools.ClassConfig, +) (bool, []string, error) { + return true, nil, nil +} + +func (m *VoyageAIModule) MetaInfo() (map[string]interface{}, error) { + return m.metaProvider.MetaInfo() +} + +func (m *VoyageAIModule) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + return m.vectorizer.Texts(ctx, []string{input}, cfg) +} + +func (m *VoyageAIModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +// verify we implement the modules.Module interface +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.Searcher[[]float32](New()) + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.InputVectorizer[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/nearText.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/nearText.go new file mode 100644 index 0000000000000000000000000000000000000000..fd52803f981754164177a858bdbd07c62bfaa1f3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/nearText.go @@ -0,0 +1,36 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modvoyageai + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +func (m *VoyageAIModule) initNearText() error { + m.searcher = nearText.NewSearcher(m.vectorizer) + m.graphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *VoyageAIModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return m.graphqlProvider.Arguments() +} + +func (m *VoyageAIModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + return m.searcher.VectorSearches() +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/vectorizer/batch_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/vectorizer/batch_test.go new file mode 100644 index 0000000000000000000000000000000000000000..1bfb368ab22f7aacf00378214a945579a4ecaa08 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/vectorizer/batch_test.go @@ -0,0 +1,104 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + "github.com/weaviate/weaviate/usecases/modulecomponents/text2vecbase" + + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" +) + +func TestBatch(t *testing.T) { + client := &fakeBatchClient{} + cfg := &fakeClassConfig{vectorizePropertyName: false, classConfig: map[string]interface{}{"vectorizeClassName": false}} + logger, _ := test.NewNullLogger() + cases := []struct { + name string + objects []*models.Object + skip []bool + wantErrors map[int]error + deadline time.Duration + }{ + {name: "skip all", objects: []*models.Object{{Class: "Car"}}, skip: []bool{true}}, + {name: "skip first", objects: []*models.Object{{Class: "Car"}, {Class: "Car", Properties: map[string]interface{}{"test": "test"}}}, skip: []bool{true, false}}, + {name: "one object errors", objects: []*models.Object{{Class: "Car", Properties: map[string]interface{}{"test": "test"}}, {Class: "Car", Properties: map[string]interface{}{"test": "error something"}}}, skip: []bool{false, false}, wantErrors: map[int]error{1: fmt.Errorf("something")}}, + {name: "first object errors", objects: []*models.Object{{Class: "Car", Properties: map[string]interface{}{"test": "error something"}}, {Class: "Car", Properties: map[string]interface{}{"test": "test"}}}, skip: []bool{false, false}, wantErrors: map[int]error{0: fmt.Errorf("something")}}, + {name: "vectorize all", objects: []*models.Object{{Class: "Car", Properties: map[string]interface{}{"test": "test"}}, {Class: "Car", Properties: map[string]interface{}{"test": "something"}}}, skip: []bool{false, false}}, + {name: "multiple vectorizer batches", objects: []*models.Object{ + {Class: "Car", Properties: map[string]interface{}{"test": "tokens 25"}}, // set limit so next 3 objects are one batch + {Class: "Car", Properties: map[string]interface{}{"test": "first object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "second object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "third object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "first object second batch"}}, // rate is 100 again + {Class: "Car", Properties: map[string]interface{}{"test": "second object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "third object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "fourth object second batch"}}, + }, skip: []bool{false, false, false, false, false, false, false, false}}, + {name: "multiple vectorizer batches with skips and errors", objects: []*models.Object{ + {Class: "Car", Properties: map[string]interface{}{"test": "tokens 25"}}, // set limit so next 3 objects are one batch + {Class: "Car", Properties: map[string]interface{}{"test": "first object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "second object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "error something"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "first object second batch"}}, // rate is 100 again + {Class: "Car", Properties: map[string]interface{}{"test": "second object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "third object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "fourth object second batch"}}, + }, skip: []bool{false, true, false, false, false, true, false, false}, wantErrors: map[int]error{3: fmt.Errorf("something")}}, + {name: "skip last item", objects: []*models.Object{ + {Class: "Car", Properties: map[string]interface{}{"test": "fir test object"}}, // set limit + {Class: "Car", Properties: map[string]interface{}{"test": "first object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "second object first batch"}}, + }, skip: []bool{false, false, true}}, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + v := text2vecbase.New(client, + batch.NewBatchVectorizer(client, 50*time.Second, + batch.Settings{MaxObjectsPerBatch: 100, MaxTokensPerBatch: func(cfg moduletools.ClassConfig) int { return 500000 }, MaxTimePerBatch: 10}, + logger, "test"), + batch.ReturnBatchTokenizer(1.3, "", false), + ) + deadline := time.Now().Add(10 * time.Second) + if tt.deadline != 0 { + deadline = time.Now().Add(tt.deadline) + } + + ctx, cancl := context.WithDeadline(context.Background(), deadline) + vecs, errs := v.ObjectBatch( + ctx, tt.objects, tt.skip, cfg, + ) + + require.Len(t, errs, len(tt.wantErrors)) + require.Len(t, vecs, len(tt.objects)) + + for i := range tt.objects { + if tt.wantErrors[i] != nil { + require.Equal(t, tt.wantErrors[i], errs[i]) + } else if tt.skip[i] { + require.Nil(t, vecs[i]) + } else { + require.NotNil(t, vecs[i]) + } + } + cancl() + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/vectorizer/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/vectorizer/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..55b26c01e2dd0e3affe418a2968e55d18a3775a7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-voyageai/vectorizer/fakes_for_test.go @@ -0,0 +1,162 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "fmt" + "strconv" + "strings" + "time" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/modulecomponents" +) + +type fakeBatchClient struct { + defaultResetRate int +} + +func (c *fakeBatchClient) Vectorize(ctx context.Context, + text []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], *modulecomponents.RateLimits, int, error) { + if c.defaultResetRate == 0 { + c.defaultResetRate = 60 + } + + vectors := make([][]float32, len(text)) + errors := make([]error, len(text)) + rateLimit := &modulecomponents.RateLimits{RemainingTokens: 100, RemainingRequests: 100, LimitTokens: 200, ResetTokens: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second), ResetRequests: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second)} + for i := range text { + if len(text[i]) >= len("error ") && text[i][:6] == "error " { + errors[i] = fmt.Errorf("%s", text[i][6:]) + continue + } + + req := len("requests ") + if len(text[i]) >= req && text[i][:req] == "requests " { + reqs, _ := strconv.Atoi(strings.Split(text[i][req:], " ")[0]) + rateLimit.RemainingRequests = reqs + rateLimit.LimitRequests = 2 * reqs + } + + if len(text[i]) >= len("wait ") && text[i][:5] == "wait " { + wait, _ := strconv.Atoi(text[i][5:]) + time.Sleep(time.Duration(wait) * time.Millisecond) + } + vectors[i] = []float32{0, 1, 2, 3} + } + + return &modulecomponents.VectorizationResult[[]float32]{ + Vector: vectors, + Dimensions: 4, + Text: text, + Errors: errors, + }, rateLimit, 0, nil +} + +func (c *fakeBatchClient) VectorizeQuery(ctx context.Context, + text []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], error) { + return &modulecomponents.VectorizationResult[[]float32]{ + Vector: [][]float32{{0.1, 1.1, 2.1, 3.1}}, + Dimensions: 4, + Text: text, + }, nil +} + +func (c *fakeBatchClient) GetVectorizerRateLimit(ctx context.Context, cfg moduletools.ClassConfig) *modulecomponents.RateLimits { + return &modulecomponents.RateLimits{RemainingTokens: 100, RemainingRequests: 100, LimitTokens: 200, ResetTokens: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second), ResetRequests: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second)} +} + +func (c *fakeBatchClient) GetApiKeyHash(ctx context.Context, cfg moduletools.ClassConfig) [32]byte { + return [32]byte{} +} + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizeClassName bool + vectorizePropertyName bool + skippedProperty string + excludedProperty string + // module specific settings + voyageaiModel string + truncateType string + baseURL string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + classSettings := map[string]interface{}{ + "vectorizeClassName": f.vectorizeClassName, + "model": f.voyageaiModel, + "truncate": f.truncateType, + "baseURL": f.baseURL, + } + return classSettings +} + +func (f fakeClassConfig) PropertyIndexed(property string) bool { + return !((property == f.skippedProperty) || (property == f.excludedProperty)) +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) VectorizeClassName() bool { + return f.classConfig["vectorizeClassName"].(bool) +} + +func (f fakeClassConfig) VectorizePropertyName(propertyName string) bool { + return f.vectorizePropertyName +} + +func (f fakeClassConfig) Properties() []string { + return nil +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/clients/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/clients/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..66b3424d989ced1b66c11b705780a757ec395d48 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/clients/fakes_for_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizePropertyName bool + skippedProperty string + excludedProperty string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/clients/meta.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/clients/meta.go new file mode 100644 index 0000000000000000000000000000000000000000..9ed01cb13c802ba54309bbe5b4dd6bb9f88283a5 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/clients/meta.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +func (v *vectorizer) MetaInfo() (map[string]interface{}, error) { + return map[string]interface{}{ + "name": "Weaviate Embedding Module", + "documentationHref": "https://api.embedding.weaviate.io", + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/clients/url.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/clients/url.go new file mode 100644 index 0000000000000000000000000000000000000000..92fa5b4adc9fa03db807fb1525e9114d004554df --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/clients/url.go @@ -0,0 +1,33 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import "fmt" + +type weaviateEmbedUrlBuilder struct { + origin string + pathMask string +} + +func newWeaviateEmbedUrlBuilder() *weaviateEmbedUrlBuilder { + return &weaviateEmbedUrlBuilder{ + origin: "https://api.embedding.weaviate.io", + pathMask: "/v1/embeddings/embed", + } +} + +func (c *weaviateEmbedUrlBuilder) url(baseURL string) string { + if baseURL != "" { + return fmt.Sprintf("%s%s", baseURL, c.pathMask) + } + return fmt.Sprintf("%s%s", c.origin, c.pathMask) +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/clients/weaviate_embed.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/clients/weaviate_embed.go new file mode 100644 index 0000000000000000000000000000000000000000..d4ab923101a30d09f33d5f855c8b0c97d6d5d6ee --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/clients/weaviate_embed.go @@ -0,0 +1,236 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "bytes" + "context" + "crypto/sha256" + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "github.com/weaviate/weaviate/entities/moduletools" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/modules/text2vec-weaviate/ent" +) + +const ( + DefaultRPM = 10000 + DefaultTPM = 10_000_000 +) + +type embeddingsRequest struct { + Texts []string `json:"texts"` + IsSearchQuery bool `json:"is_search_query,omitempty"` + Dimensions *int64 `json:"dimensions,omitempty"` +} + +type embeddingsResponse struct { + Embeddings [][]float32 `json:"embeddings,omitempty"` + Metadata metadata `json:"metadata,omitempty"` +} + +type embeddingsResponseError struct { + Detail string `json:"detail"` +} + +type metadata struct { + Model string `json:"model,omitempty"` + TimeTakenInference float32 `json:"time_taken_inference,omitempty"` + NumEmbeddingsInferred int `json:"num_embeddings_inferred,omitempty"` + Usage *modulecomponents.Usage `json:"usage,omitempty"` +} + +type vectorizer struct { + apiKey string + httpClient *http.Client + urlBuilder *weaviateEmbedUrlBuilder + logger logrus.FieldLogger +} + +func New(apiKey string, timeout time.Duration, logger logrus.FieldLogger) *vectorizer { + return &vectorizer{ + apiKey: apiKey, + httpClient: &http.Client{ + Timeout: timeout, + }, + urlBuilder: newWeaviateEmbedUrlBuilder(), + logger: logger, + } +} + +func (v *vectorizer) Vectorize(ctx context.Context, input []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], *modulecomponents.RateLimits, int, error) { + config := v.getVectorizationConfig(cfg) + return v.vectorize(ctx, input, config.Model, config.Truncate, config.BaseURL, false, config) +} + +func (v *vectorizer) VectorizeQuery(ctx context.Context, input []string, + cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], error) { + config := v.getVectorizationConfig(cfg) + res, _, _, err := v.vectorize(ctx, input, config.Model, config.Truncate, config.BaseURL, true, config) + return res, err +} + +func (v *vectorizer) getVectorizationConfig(cfg moduletools.ClassConfig) ent.VectorizationConfig { + icheck := ent.NewClassSettings(cfg) + return ent.VectorizationConfig{ + Model: icheck.Model(), + BaseURL: icheck.BaseURL(), + Truncate: icheck.Truncate(), + Dimensions: icheck.Dimensions(), + } +} + +func (v *vectorizer) vectorize(ctx context.Context, input []string, + model, truncate, baseURL string, isSearchQuery bool, config ent.VectorizationConfig, +) (*modulecomponents.VectorizationResult[[]float32], *modulecomponents.RateLimits, int, error) { + body, err := json.Marshal(v.getEmbeddingsRequest(input, isSearchQuery, config.Dimensions)) + if err != nil { + return nil, nil, 0, errors.Wrap(err, "marshal body") + } + + url := v.getWeaviateEmbedURL(ctx, baseURL) + req, err := http.NewRequestWithContext(ctx, "POST", url, + bytes.NewReader(body)) + if err != nil { + return nil, nil, 0, errors.Wrap(err, "create POST request") + } + token, err := v.getToken(ctx) + if err != nil { + return nil, nil, 0, errors.Wrap(err, "authentication token") + } + clusterURL, err := v.getClusterURL(ctx) + if err != nil { + return nil, nil, 0, errors.Wrap(err, "cluster URL") + } + + req.Header.Set("Authorization", token) + req.Header.Set("Content-Type", "application/json") + req.Header.Add("Request-Source", "unspecified:weaviate") + req.Header.Add("X-Weaviate-Embedding-Model", model) + req.Header.Add("X-Weaviate-Cluster-Url", clusterURL) + + res, err := v.httpClient.Do(req) + if err != nil { + return nil, nil, 0, errors.Wrap(err, "send POST request") + } + defer res.Body.Close() + bodyBytes, err := io.ReadAll(res.Body) + if err != nil { + return nil, nil, 0, errors.Wrap(err, "read response body") + } + + if res.StatusCode > 200 { + errorMessage := getErrorMessage(res.StatusCode, string(bodyBytes), "Weaviate embed API error: %d %s") + return nil, nil, 0, errors.New(errorMessage) + } + + var resBody embeddingsResponse + if err := json.Unmarshal(bodyBytes, &resBody); err != nil { + return nil, nil, 0, errors.Wrap(err, fmt.Sprintf("unmarshal response body. Got: %v", string(bodyBytes))) + } + + if len(resBody.Embeddings) == 0 { + return nil, nil, 0, errors.Errorf("empty embeddings response") + } + + return &modulecomponents.VectorizationResult[[]float32]{ + Text: input, + Dimensions: len(resBody.Embeddings[0]), + Vector: resBody.Embeddings, + }, nil, modulecomponents.GetTotalTokens(resBody.Metadata.Usage), nil +} + +func (v *vectorizer) getWeaviateEmbedURL(ctx context.Context, baseURL string) string { + passedBaseURL := baseURL + if headerBaseURL := modulecomponents.GetValueFromContext(ctx, "X-Weaviate-Baseurl"); headerBaseURL != "" { + passedBaseURL = headerBaseURL + } + return v.urlBuilder.url(passedBaseURL) +} + +func (v *vectorizer) getEmbeddingsRequest(texts []string, isSearchQuery bool, dimensions *int64) embeddingsRequest { + return embeddingsRequest{Texts: texts, IsSearchQuery: isSearchQuery, Dimensions: dimensions} +} + +func (v *vectorizer) GetApiKeyHash(ctx context.Context, config moduletools.ClassConfig) [32]byte { + key, err := v.getToken(ctx) + if err != nil { + return [32]byte{} + } + return sha256.Sum256([]byte(key)) +} + +func (v *vectorizer) GetVectorizerRateLimit(ctx context.Context, cfg moduletools.ClassConfig) *modulecomponents.RateLimits { + rpm, tpm := modulecomponents.GetRateLimitFromContext(ctx, "Weaviate", DefaultRPM, DefaultTPM) + + execAfterRequestFunction := func(limits *modulecomponents.RateLimits, tokensUsed int, deductRequest bool) { + // refresh is after 60 seconds but leave a bit of room for errors. Otherwise, we only deduct the request that just happened + if limits.LastOverwrite.Add(61 * time.Second).After(time.Now()) { + if deductRequest { + limits.RemainingRequests-- + } + return + } + + limits.RemainingRequests = rpm + limits.ResetRequests = time.Now().Add(time.Duration(61) * time.Second) + limits.LimitRequests = rpm + limits.LastOverwrite = time.Now() + + limits.RemainingTokens = tpm + limits.LimitTokens = tpm + limits.ResetTokens = time.Now().Add(time.Duration(1) * time.Second) + } + + initialRL := &modulecomponents.RateLimits{AfterRequestFunction: execAfterRequestFunction, LastOverwrite: time.Now().Add(-61 * time.Minute)} + initialRL.ResetAfterRequestFunction(0) // set initial values + + return initialRL +} + +func getErrorMessage(statusCode int, resBodyError string, errorTemplate string) string { + var errResp embeddingsResponseError + if err := json.Unmarshal([]byte(resBodyError), &errResp); err != nil { + return fmt.Sprintf(errorTemplate, statusCode, resBodyError) + } + return fmt.Sprintf(errorTemplate, statusCode, errResp.Detail) +} + +func (v *vectorizer) getToken(ctx context.Context) (string, error) { + if token := modulecomponents.GetValueFromContext(ctx, "Authorization"); token != "" { + return token, nil + } + if v.apiKey != "" { + return fmt.Sprintf("Bearer %s", v.apiKey), nil + } + return "", errors.New("neither authentication token found in request header: Authorization " + + "nor api key in environment variable under WEAVIATE_APIKEY") +} + +func (v *vectorizer) getClusterURL(ctx context.Context) (string, error) { + if clusterURL := modulecomponents.GetValueFromContext(ctx, "X-Weaviate-Cluster-Url"); clusterURL != "" { + return clusterURL, nil + } + return "", errors.New("no cluster URL found " + + "in request header: X-Weaviate-Cluster-Url") +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/clients/weaviate_embed_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/clients/weaviate_embed_test.go new file mode 100644 index 0000000000000000000000000000000000000000..58eca61581b69f2607360eb3a7a454ea37ec01ea --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/clients/weaviate_embed_test.go @@ -0,0 +1,309 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package clients + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestClient(t *testing.T) { + t.Run("when all is fine", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + apiKey: "apiKey", + httpClient: &http.Client{}, + urlBuilder: &weaviateEmbedUrlBuilder{ + origin: server.URL, + pathMask: "/v1/embeddings/embed", + }, + logger: nullLogger(), + } + expected := &modulecomponents.VectorizationResult[[]float32]{ + Text: []string{"This is my text"}, + Vector: [][]float32{{0.1, 0.2, 0.3}}, + Dimensions: 3, + } + ctxWithClusterURL := context.WithValue(context.Background(), "X-Weaviate-Cluster-Url", []string{server.URL}) + res, _, _, err := c.Vectorize(ctxWithClusterURL, []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Model": "large", "baseURL": server.URL}}) + + assert.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when the context is expired", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + apiKey: "apiKey", + httpClient: &http.Client{}, + urlBuilder: &weaviateEmbedUrlBuilder{ + origin: server.URL, + pathMask: "/v1/embeddings/embed", + }, + logger: nullLogger(), + } + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + ctxWithClusterURL := context.WithValue(ctx, "X-Weaviate-Cluster-Url", []string{server.URL}) + defer cancel() + + _, _, _, err := c.Vectorize(ctxWithClusterURL, []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"baseURL": server.URL}}) + + require.NotNil(t, err) + assert.Contains(t, err.Error(), "context deadline exceeded") + }) + + t.Run("when the server returns an error", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{ + t: t, + serverError: errors.Errorf("nope, not gonna happen"), + }) + defer server.Close() + c := &vectorizer{ + apiKey: "apiKey", + httpClient: &http.Client{}, + urlBuilder: &weaviateEmbedUrlBuilder{ + origin: server.URL, + pathMask: "/v1/embeddings/embed", + }, + logger: nullLogger(), + } + ctxWithClusterURL := context.WithValue(context.Background(), "X-Weaviate-Cluster-Url", []string{server.URL}) + _, _, _, err := c.Vectorize(ctxWithClusterURL, []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"baseURL": server.URL}}) + + require.NotNil(t, err) + assert.Equal(t, err.Error(), "Weaviate embed API error: 500 ") + }) + + t.Run("when Weaviate API key is passed using Authorization header", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + apiKey: "", + httpClient: &http.Client{}, + urlBuilder: &weaviateEmbedUrlBuilder{ + origin: server.URL, + pathMask: "/v1/embeddings/embed", + }, + logger: nullLogger(), + } + ctxWithValue := context.WithValue(context.Background(), "Authorization", []string{"some-key"}) + ctxWithBothValues := context.WithValue(ctxWithValue, "X-Weaviate-Cluster-Url", []string{server.URL}) + + expected := &modulecomponents.VectorizationResult[[]float32]{ + Text: []string{"This is my text"}, + Vector: [][]float32{{0.1, 0.2, 0.3}}, + Dimensions: 3, + } + res, _, _, err := c.Vectorize(ctxWithBothValues, []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"Model": "large", "baseURL": server.URL}}) + + require.Nil(t, err) + assert.Equal(t, expected, res) + }) + + t.Run("when Weaviate API key is empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + apiKey: "", + httpClient: &http.Client{}, + urlBuilder: &weaviateEmbedUrlBuilder{ + origin: server.URL, + pathMask: "/v1/embeddings/embed", + }, + logger: nullLogger(), + } + ctx, cancel := context.WithDeadline(context.Background(), time.Now()) + ctxWithClusterURL := context.WithValue(ctx, "X-Weaviate-Cluster-URL", []string{server.URL}) + defer cancel() + + _, _, _, err := c.Vectorize(ctxWithClusterURL, []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"baseURL": server.URL}}) + + require.NotNil(t, err) + assert.Equal(t, "authentication token: neither authentication token found in request header: Authorization "+ + "nor api key in environment variable under WEAVIATE_APIKEY", err.Error()) + }) + + t.Run("when X-Weaviate-Api-Key header is passed but empty", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + apiKey: "", + httpClient: &http.Client{}, + urlBuilder: &weaviateEmbedUrlBuilder{ + origin: server.URL, + pathMask: "/v1/embeddings/embed", + }, + logger: nullLogger(), + } + ctxWithValue := context.WithValue(context.Background(), + "Authorization", []string{""}) + + _, _, _, err := c.Vectorize(ctxWithValue, []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{}}) + + require.NotNil(t, err) + assert.Equal(t, "authentication token: neither authentication token found in request header: Authorization "+ + "nor api key in environment variable under WEAVIATE_APIKEY", err.Error()) + }) + + t.Run("when X-Weaviate-Baseurl header is passed", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + apiKey: "", + httpClient: &http.Client{}, + urlBuilder: &weaviateEmbedUrlBuilder{ + origin: server.URL, + pathMask: "/v1/embeddings/embed", + }, + logger: nullLogger(), + } + + baseURL := "http://default-url.com" + ctxWithValue := context.WithValue(context.Background(), + "X-Weaviate-Baseurl", []string{"http://base-url-passed-in-header.com"}) + + buildURL := c.getWeaviateEmbedURL(ctxWithValue, baseURL) + assert.Equal(t, "http://base-url-passed-in-header.com/v1/embeddings/embed", buildURL) + + buildURL = c.getWeaviateEmbedURL(context.TODO(), baseURL) + assert.Equal(t, "http://default-url.com/v1/embeddings/embed", buildURL) + }) + + t.Run("pass rate limit headers requests", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + apiKey: "", + httpClient: &http.Client{}, + urlBuilder: &weaviateEmbedUrlBuilder{ + origin: server.URL, + pathMask: "/v1/embeddings/embed", + }, + logger: nullLogger(), + } + + ctxWithValue := context.WithValue(context.Background(), + "X-Weaviate-Ratelimit-RequestPM-Embedding", []string{"50"}) + + rl := c.GetVectorizerRateLimit(ctxWithValue, fakeClassConfig{classConfig: map[string]interface{}{}}) + assert.Equal(t, 50, rl.LimitRequests) + assert.Equal(t, 50, rl.RemainingRequests) + }) + + t.Run("when X-Weaviate-Cluster-URL header is missing", func(t *testing.T) { + server := httptest.NewServer(&fakeHandler{t: t}) + defer server.Close() + c := &vectorizer{ + apiKey: "apiKey", + httpClient: &http.Client{}, + urlBuilder: &weaviateEmbedUrlBuilder{ + origin: server.URL, + pathMask: "/v1/embeddings/embed", + }, + logger: nullLogger(), + } + + _, _, _, err := c.Vectorize(context.Background(), []string{"This is my text"}, fakeClassConfig{classConfig: map[string]interface{}{"baseURL": server.URL}}) + + require.NotNil(t, err) + assert.Equal(t, "cluster URL: no cluster URL found in request header: X-Weaviate-Cluster-Url", err.Error()) + }) + + t.Run("TestVectorizeRequestBodyWithCustomDimensions", func(t *testing.T) { + c := &vectorizer{ + apiKey: "apiKey", + httpClient: &http.Client{}, + urlBuilder: &weaviateEmbedUrlBuilder{ + origin: "http://example.com", + pathMask: "/v1/embeddings/embed", + }, + logger: nullLogger(), + } + + dims := int64(256) + cfg := &fakeClassConfig{ + classConfig: map[string]interface{}{ + "dimensions": dims, + }, + } + + config := c.getVectorizationConfig(cfg) + reqBody := c.getEmbeddingsRequest([]string{"test text"}, false, config.Dimensions) + + require.NotNil(t, reqBody.Dimensions) + require.Equal(t, int64(256), *reqBody.Dimensions) + require.Equal(t, []string{"test text"}, reqBody.Texts) + }) +} + +type fakeHandler struct { + t *testing.T + serverError error +} + +func (f *fakeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(f.t, http.MethodPost, r.Method) + + if f.serverError != nil { + embeddingError := map[string]interface{}{ + "message": f.serverError.Error(), + "type": "invalid_request_error", + } + embeddingResponse := map[string]interface{}{ + "message": embeddingError["message"], + } + outBytes, err := json.Marshal(embeddingResponse) + require.Nil(f.t, err) + + w.WriteHeader(http.StatusInternalServerError) + w.Write(outBytes) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + require.Nil(f.t, err) + defer r.Body.Close() + + var b map[string]interface{} + require.Nil(f.t, json.Unmarshal(bodyBytes, &b)) + + textInput := b["texts"].([]interface{}) + assert.Greater(f.t, len(textInput), 0) + + embeddingResponse := map[string]interface{}{ + "embeddings": [][]float32{{0.1, 0.2, 0.3}}, + } + outBytes, err := json.Marshal(embeddingResponse) + require.Nil(f.t, err) + + w.Write(outBytes) +} + +func nullLogger() logrus.FieldLogger { + l, _ := test.NewNullLogger() + return l +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/config.go new file mode 100644 index 0000000000000000000000000000000000000000..1a2a37af1094c2fd19ff27aea0861e5fcad4889f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/config.go @@ -0,0 +1,50 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modweaviateembed + +import ( + "context" + + "github.com/weaviate/weaviate/modules/text2vec-weaviate/ent" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +func (m *WeaviateEmbedModule) ClassConfigDefaults() map[string]interface{} { + return map[string]interface{}{ + "model": ent.DefaultWeaviateModel, + "truncate": ent.DefaultTruncate, + "baseURL": ent.DefaultBaseURL, + "vectorizeClassName": ent.DefaultVectorizeClassName, + } +} + +func (m *WeaviateEmbedModule) PropertyConfigDefaults( + dt *schema.DataType, +) map[string]interface{} { + return map[string]interface{}{ + "skip": !ent.DefaultPropertyIndexed, + "vectorizePropertyName": ent.DefaultVectorizePropertyName, + } +} + +func (m *WeaviateEmbedModule) ValidateClass(ctx context.Context, + class *models.Class, cfg moduletools.ClassConfig, +) error { + settings := ent.NewClassSettings(cfg) + return settings.Validate(class) +} + +var _ = modulecapabilities.ClassConfigurator(New()) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/ent/class_settings.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/ent/class_settings.go new file mode 100644 index 0000000000000000000000000000000000000000..df3643256e1fbd8b176460ea9eef7482699a4515 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/ent/class_settings.go @@ -0,0 +1,61 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + basesettings "github.com/weaviate/weaviate/usecases/modulecomponents/settings" +) + +const ( + DefaultBaseURL = "https://api.embedding.weaviate.io" + DefaultWeaviateModel = "Snowflake/snowflake-arctic-embed-l-v2.0" + DefaultTruncate = "right" + DefaultVectorizeClassName = true + DefaultPropertyIndexed = true + DefaultVectorizePropertyName = false + LowerCaseInput = false +) + +type classSettings struct { + basesettings.BaseClassSettings + cfg moduletools.ClassConfig +} + +func NewClassSettings(cfg moduletools.ClassConfig) *classSettings { + return &classSettings{cfg: cfg, BaseClassSettings: *basesettings.NewBaseClassSettings(cfg, LowerCaseInput)} +} + +func (cs *classSettings) Model() string { + return cs.BaseClassSettings.GetPropertyAsString("model", DefaultWeaviateModel) +} + +func (cs *classSettings) Truncate() string { + return cs.BaseClassSettings.GetPropertyAsString("truncate", DefaultTruncate) +} + +func (cs *classSettings) BaseURL() string { + return cs.BaseClassSettings.GetPropertyAsString("baseURL", DefaultBaseURL) +} + +func (cs *classSettings) Dimensions() *int64 { + return cs.BaseClassSettings.GetPropertyAsInt64("dimensions", nil) +} + +func (cs *classSettings) Validate(class *models.Class) error { + if err := cs.BaseClassSettings.Validate(class); err != nil { + return err + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/ent/class_settings_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/ent/class_settings_test.go new file mode 100644 index 0000000000000000000000000000000000000000..3be3298c4494d91aa1e1ec3a27d9305a8d3d88a3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/ent/class_settings_test.go @@ -0,0 +1,123 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_classSettings_Validate(t *testing.T) { + class := &models.Class{ + Class: "test", + Properties: []*models.Property{ + { + DataType: []string{schema.DataTypeText.String()}, + Name: "test", + }, + }, + } + tests := []struct { + name string + cfg moduletools.ClassConfig + wantErr error + }{ + { + name: "All defaults", + cfg: &fakeClassConfig{ + classConfig: map[string]interface{}{}, + }, + }, + { + name: "Explicit model", + cfg: &fakeClassConfig{ + classConfig: map[string]interface{}{ + "model": "Snowflake/snowflake-arctic-embed-m-v1.5", + }, + }, + }, + { + name: "Explicit dimensions", + cfg: &fakeClassConfig{ + classConfig: map[string]interface{}{ + "dimensions": 256, + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + cs := NewClassSettings(tt.cfg) + err := cs.Validate(class) + if tt.wantErr != nil { + assert.EqualError(t, err, tt.wantErr.Error()) + } else { + assert.NoError(t, err) + } + }) + } +} + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizePropertyName bool + skippedProperty string + excludedProperty string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/ent/vectorization_config.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/ent/vectorization_config.go new file mode 100644 index 0000000000000000000000000000000000000000..8d1ddebfa097f1813cdc4e80c10922abd2d1bfc4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/ent/vectorization_config.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package ent + +type VectorizationConfig struct { + Model string + Truncate string + BaseURL string + Dimensions *int64 +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/module.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/module.go new file mode 100644 index 0000000000000000000000000000000000000000..df1e1f9978ce303f01613a582b7ab1e3dd93c16f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/module.go @@ -0,0 +1,156 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modweaviateembed + +import ( + "context" + "os" + "time" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/modules/text2vec-weaviate/clients" + "github.com/weaviate/weaviate/modules/text2vec-weaviate/ent" + "github.com/weaviate/weaviate/usecases/modulecomponents/additional" + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + "github.com/weaviate/weaviate/usecases/modulecomponents/text2vecbase" +) + +const Name = "text2vec-weaviate" + +var batchSettings = batch.Settings{ + TokenMultiplier: 0, + MaxTimePerBatch: float64(10), + MaxObjectsPerBatch: 200, + MaxTokensPerBatch: func(cfg moduletools.ClassConfig) int { return 500000 }, + HasTokenLimit: false, + ReturnsRateLimit: false, +} + +type WeaviateEmbedModule struct { + vectorizer text2vecbase.TextVectorizerBatch[[]float32] + metaProvider text2vecbase.MetaProvider + graphqlProvider modulecapabilities.GraphQLArguments + searcher modulecapabilities.Searcher[[]float32] + nearTextTransformer modulecapabilities.TextTransform + logger logrus.FieldLogger + additionalPropertiesProvider modulecapabilities.AdditionalProperties +} + +func New() *WeaviateEmbedModule { + return &WeaviateEmbedModule{} +} + +func (m *WeaviateEmbedModule) Name() string { + return Name +} + +func (m *WeaviateEmbedModule) Type() modulecapabilities.ModuleType { + return modulecapabilities.Text2ManyVec +} + +func (m *WeaviateEmbedModule) Init(ctx context.Context, + params moduletools.ModuleInitParams, +) error { + m.logger = params.GetLogger() + + if err := m.initVectorizer(ctx, params.GetConfig().ModuleHttpClientTimeout, m.logger); err != nil { + return errors.Wrap(err, "init vectorizer") + } + + if err := m.initAdditionalPropertiesProvider(); err != nil { + return errors.Wrap(err, "init additional properties provider") + } + + return nil +} + +func (m *WeaviateEmbedModule) InitExtension(modules []modulecapabilities.Module) error { + for _, module := range modules { + if module.Name() == m.Name() { + continue + } + if arg, ok := module.(modulecapabilities.TextTransformers); ok { + if arg != nil && arg.TextTransformers() != nil { + m.nearTextTransformer = arg.TextTransformers()["nearText"] + } + } + } + + if err := m.initNearText(); err != nil { + return errors.Wrap(err, "init graphql provider") + } + return nil +} + +func (m *WeaviateEmbedModule) initVectorizer(ctx context.Context, timeout time.Duration, + logger logrus.FieldLogger, +) error { + apiKey := os.Getenv("WEAVIATE_APIKEY") + client := clients.New(apiKey, timeout, logger) + + m.vectorizer = text2vecbase.New(client, + batch.NewBatchVectorizer(client, 50*time.Second, batchSettings, logger, m.Name()), + batch.ReturnBatchTokenizer(batchSettings.TokenMultiplier, m.Name(), ent.LowerCaseInput), + ) + m.metaProvider = client + + return nil +} + +func (m *WeaviateEmbedModule) initAdditionalPropertiesProvider() error { + m.additionalPropertiesProvider = additional.NewText2VecProvider() + return nil +} + +func (m *WeaviateEmbedModule) VectorizeObject(ctx context.Context, + obj *models.Object, cfg moduletools.ClassConfig, +) ([]float32, models.AdditionalProperties, error) { + return m.vectorizer.Object(ctx, obj, cfg, ent.NewClassSettings(cfg)) +} + +func (m *WeaviateEmbedModule) VectorizeBatch(ctx context.Context, objs []*models.Object, skipObject []bool, cfg moduletools.ClassConfig) ([][]float32, []models.AdditionalProperties, map[int]error) { + vecs, errs := m.vectorizer.ObjectBatch(ctx, objs, skipObject, cfg) + + return vecs, nil, errs +} + +func (m *WeaviateEmbedModule) MetaInfo() (map[string]interface{}, error) { + return m.metaProvider.MetaInfo() +} + +func (m *WeaviateEmbedModule) VectorizableProperties(cfg moduletools.ClassConfig) (bool, []string, error) { + return true, nil, nil +} + +func (m *WeaviateEmbedModule) VectorizeInput(ctx context.Context, + input string, cfg moduletools.ClassConfig, +) ([]float32, error) { + return m.vectorizer.Texts(ctx, []string{input}, cfg) +} + +func (m *WeaviateEmbedModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return m.additionalPropertiesProvider.AdditionalProperties() +} + +var ( + _ = modulecapabilities.Module(New()) + _ = modulecapabilities.Vectorizer[[]float32](New()) + _ = modulecapabilities.MetaProvider(New()) + _ = modulecapabilities.Searcher[[]float32](New()) + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.InputVectorizer[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/nearText.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/nearText.go new file mode 100644 index 0000000000000000000000000000000000000000..58e397b15b496fd23adc21d5dda7d1f15f10943d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/nearText.go @@ -0,0 +1,36 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package modweaviateembed + +import ( + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" +) + +func (m *WeaviateEmbedModule) initNearText() error { + m.searcher = nearText.NewSearcher(m.vectorizer) + m.graphqlProvider = nearText.New(m.nearTextTransformer) + return nil +} + +func (m *WeaviateEmbedModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return m.graphqlProvider.Arguments() +} + +func (m *WeaviateEmbedModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + return m.searcher.VectorSearches() +} + +var ( + _ = modulecapabilities.GraphQLArguments(New()) + _ = modulecapabilities.Searcher[[]float32](New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/vectorizer/batch_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/vectorizer/batch_test.go new file mode 100644 index 0000000000000000000000000000000000000000..cebb62a63ffa754afc5e9e10abf8da4ef772084a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/vectorizer/batch_test.go @@ -0,0 +1,105 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/usecases/modulecomponents/batch" + "github.com/weaviate/weaviate/usecases/modulecomponents/text2vecbase" + + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" +) + +func TestBatch(t *testing.T) { + client := &fakeBatchClient{} + cfg := &fakeClassConfig{vectorizePropertyName: false, classConfig: map[string]interface{}{"vectorizeClassName": false}} + logger, _ := test.NewNullLogger() + cases := []struct { + name string + objects []*models.Object + skip []bool + wantErrors map[int]error + deadline time.Duration + }{ + {name: "skip all", objects: []*models.Object{{Class: "Car"}}, skip: []bool{true}}, + {name: "skip first", objects: []*models.Object{{Class: "Car"}, {Class: "Car", Properties: map[string]interface{}{"test": "test"}}}, skip: []bool{true, false}}, + {name: "one object errors", objects: []*models.Object{{Class: "Car", Properties: map[string]interface{}{"test": "test"}}, {Class: "Car", Properties: map[string]interface{}{"test": "error something"}}}, skip: []bool{false, false}, wantErrors: map[int]error{1: fmt.Errorf("something")}}, + {name: "first object errors", objects: []*models.Object{{Class: "Car", Properties: map[string]interface{}{"test": "error something"}}, {Class: "Car", Properties: map[string]interface{}{"test": "test"}}}, skip: []bool{false, false}, wantErrors: map[int]error{0: fmt.Errorf("something")}}, + {name: "vectorize all", objects: []*models.Object{{Class: "Car", Properties: map[string]interface{}{"test": "test"}}, {Class: "Car", Properties: map[string]interface{}{"test": "something"}}}, skip: []bool{false, false}}, + {name: "multiple vectorizer batches", objects: []*models.Object{ + {Class: "Car", Properties: map[string]interface{}{"test": "tokens 25"}}, // set limit so next 3 objects are one batch + {Class: "Car", Properties: map[string]interface{}{"test": "first object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "second object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "third object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "first object second batch"}}, // rate is 100 again + {Class: "Car", Properties: map[string]interface{}{"test": "second object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "third object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "fourth object second batch"}}, + }, skip: []bool{false, false, false, false, false, false, false, false}}, + {name: "multiple vectorizer batches with skips and errors", objects: []*models.Object{ + {Class: "Car", Properties: map[string]interface{}{"test": "tokens 25"}}, // set limit so next 3 objects are one batch + {Class: "Car", Properties: map[string]interface{}{"test": "first object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "second object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "error something"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "first object second batch"}}, // rate is 100 again + {Class: "Car", Properties: map[string]interface{}{"test": "second object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "third object second batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "fourth object second batch"}}, + }, skip: []bool{false, true, false, false, false, true, false, false}, wantErrors: map[int]error{3: fmt.Errorf("something")}}, + {name: "skip last item", objects: []*models.Object{ + {Class: "Car", Properties: map[string]interface{}{"test": "fir test object"}}, // set limit + {Class: "Car", Properties: map[string]interface{}{"test": "first object first batch"}}, + {Class: "Car", Properties: map[string]interface{}{"test": "second object first batch"}}, + }, skip: []bool{false, false, true}}, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + v := text2vecbase.New(client, + batch.NewBatchVectorizer( + client, 50*time.Second, + batch.Settings{MaxObjectsPerBatch: 100, MaxTokensPerBatch: func(cfg moduletools.ClassConfig) int { return 500000 }, MaxTimePerBatch: 10}, + logger, "test"), + batch.ReturnBatchTokenizer(0, "", false), + ) + deadline := time.Now().Add(10 * time.Second) + if tt.deadline != 0 { + deadline = time.Now().Add(tt.deadline) + } + + ctx, cancl := context.WithDeadline(context.Background(), deadline) + vecs, errs := v.ObjectBatch( + ctx, tt.objects, tt.skip, cfg, + ) + + require.Len(t, errs, len(tt.wantErrors)) + require.Len(t, vecs, len(tt.objects)) + + for i := range tt.objects { + if tt.wantErrors[i] != nil { + require.Equal(t, tt.wantErrors[i], errs[i]) + } else if tt.skip[i] { + require.Nil(t, vecs[i]) + } else { + require.NotNil(t, vecs[i]) + } + } + cancl() + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/vectorizer/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/vectorizer/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..8a400a5c560ec2eb14a3dfba1f7defddbfae1815 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/text2vec-weaviate/vectorizer/fakes_for_test.go @@ -0,0 +1,147 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "context" + "fmt" + "strconv" + "strings" + "time" + + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/modulecomponents" + + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" +) + +type fakeBatchClient struct { + defaultResetRate int +} + +func (c *fakeBatchClient) Vectorize(ctx context.Context, + text []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], *modulecomponents.RateLimits, int, error) { + if c.defaultResetRate == 0 { + c.defaultResetRate = 60 + } + + vectors := make([][]float32, len(text)) + errors := make([]error, len(text)) + rateLimit := &modulecomponents.RateLimits{RemainingTokens: 100, RemainingRequests: 100, LimitTokens: 200, ResetTokens: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second), ResetRequests: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second)} + for i := range text { + if len(text[i]) >= len("error ") && text[i][:6] == "error " { + errors[i] = fmt.Errorf("%v", text[i][6:]) + continue + } + + req := len("requests ") + if len(text[i]) >= req && text[i][:req] == "requests " { + reqs, _ := strconv.Atoi(strings.Split(text[i][req:], " ")[0]) + rateLimit.RemainingRequests = reqs + rateLimit.LimitRequests = 2 * reqs + } + + if len(text[i]) >= len("wait ") && text[i][:5] == "wait " { + wait, _ := strconv.Atoi(text[i][5:]) + time.Sleep(time.Duration(wait) * time.Millisecond) + } + vectors[i] = []float32{0, 1, 2, 3} + } + + return &modulecomponents.VectorizationResult[[]float32]{ + Vector: vectors, + Dimensions: 4, + Text: text, + Errors: errors, + }, rateLimit, 0, nil +} + +func (c *fakeBatchClient) VectorizeQuery(ctx context.Context, + text []string, cfg moduletools.ClassConfig, +) (*modulecomponents.VectorizationResult[[]float32], error) { + return &modulecomponents.VectorizationResult[[]float32]{ + Vector: [][]float32{{0.1, 1.1, 2.1, 3.1}}, + Dimensions: 4, + Text: text, + }, nil +} + +func (c *fakeBatchClient) GetVectorizerRateLimit(ctx context.Context, cfg moduletools.ClassConfig) *modulecomponents.RateLimits { + return &modulecomponents.RateLimits{RemainingTokens: 100, RemainingRequests: 100, LimitTokens: 200, ResetTokens: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second), ResetRequests: time.Now().Add(time.Duration(c.defaultResetRate) * time.Second)} +} + +func (c *fakeBatchClient) GetApiKeyHash(ctx context.Context, cfg moduletools.ClassConfig) [32]byte { + return [32]byte{} +} + +type fakeClassConfig struct { + classConfig map[string]interface{} + vectorizeClassName bool + vectorizePropertyName bool + skippedProperty string + excludedProperty string + // module specific settings + weaviateEmbedModel string + truncateType string + baseURL string +} + +func (f fakeClassConfig) Class() map[string]interface{} { + classSettings := map[string]interface{}{ + "vectorizeClassName": f.vectorizeClassName, + "model": f.weaviateEmbedModel, + "truncate": f.truncateType, + "baseURL": f.baseURL, + } + return classSettings +} + +func (f fakeClassConfig) ClassByModuleName(moduleName string) map[string]interface{} { + return f.classConfig +} + +func (f fakeClassConfig) Property(propName string) map[string]interface{} { + if propName == f.skippedProperty { + return map[string]interface{}{ + "skip": true, + } + } + if propName == f.excludedProperty { + return map[string]interface{}{ + "vectorizePropertyName": false, + } + } + if f.vectorizePropertyName { + return map[string]interface{}{ + "vectorizePropertyName": true, + } + } + return nil +} + +func (f fakeClassConfig) Tenant() string { + return "" +} + +func (f fakeClassConfig) TargetVector() string { + return "" +} + +func (f fakeClassConfig) PropertiesDataTypes() map[string]schema.DataType { + return nil +} + +func (f fakeClassConfig) Config() *config.Config { + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/modules/usage-gcs/module.go b/platform/dbops/binaries/weaviate-src/modules/usage-gcs/module.go new file mode 100644 index 0000000000000000000000000000000000000000..71afbe22e35e09bed1c1ab9d26eeb4e936665d70 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/usage-gcs/module.go @@ -0,0 +1,160 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package usagegcs + +import ( + "context" + "fmt" + "os" + + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/config/runtime" + common "github.com/weaviate/weaviate/usecases/modulecomponents/usage" +) + +const ( + Name = "usage-gcs" +) + +// module is the GCS usage module using the common base +type module struct { + *common.BaseModule + gcsStorage *GCSStorage +} + +func New() *module { + return &module{} +} + +func (m *module) SetUsageService(usageService any) { + m.BaseModule.SetUsageService(usageService) +} + +func (m *module) Name() string { + return Name +} + +func (m *module) Type() modulecapabilities.ModuleType { + return modulecapabilities.Usage +} + +func (m *module) Init(ctx context.Context, params moduletools.ModuleInitParams) error { + // Parse usage configuration from environment + config := params.GetConfig() + if err := common.ParseCommonUsageConfig(config); err != nil { + return err + } + if err := parseGCSConfig(config); err != nil { + return err + } + + // Validate required configuration + if config.Usage.GCSBucket.Get() == "" && !config.RuntimeOverrides.Enabled { + return fmt.Errorf("GCS bucket name not configured - set USAGE_GCS_BUCKET environment variable or enable runtime overrides with RUNTIME_OVERRIDES_ENABLED=true") + } + + // Initialize logger + logger := params.GetLogger().WithField("component", Name) + + // Create metrics first + metrics := common.NewMetrics(params.GetMetricsRegisterer(), Name) + + // Create GCS storage backend with metrics + gcsStorage, err := NewGCSStorage(ctx, logger, metrics) + if err != nil { + return fmt.Errorf("failed to create GCS storage: %w", err) + } + + m.gcsStorage = gcsStorage + + // Update storage configuration (this may have empty bucket initially) + storageConfig := m.buildGCSConfig(config) + if _, err := m.gcsStorage.UpdateConfig(storageConfig); err != nil { + return fmt.Errorf("failed to configure GCS storage: %w", err) + } + + // Create base module with GCS storage + m.BaseModule = common.NewBaseModule(Name, m.gcsStorage) + + // Initialize base module with metrics + if err := m.InitializeCommon(ctx, config, logger, metrics); err != nil { + return err + } + + // Build log fields, omitting empty values for cleaner output + logFields := map[string]interface{}{ + "node_id": config.Cluster.Hostname, + "collection_interval": config.Usage.ScrapeInterval.Get().String(), + } + + if bucket := config.Usage.GCSBucket.Get(); bucket != "" { + logFields["gcs_bucket"] = bucket + } else if config.RuntimeOverrides.Enabled { + logFields["gcs_bucket"] = "[pending runtime overrides]" + } + + if prefix := config.Usage.GCSPrefix.Get(); prefix != "" { + logFields["gcs_prefix"] = prefix + } else if config.RuntimeOverrides.Enabled { + logFields["gcs_prefix"] = "[pending runtime overrides]" + } + + logger.WithFields(logFields).Info("initializing usage-gcs module with configuration") + + return nil +} + +func (m *module) buildGCSConfig(config *config.Config) common.StorageConfig { + storageConfig := common.StorageConfig{ + NodeID: config.Cluster.Hostname, + } + + if config.Usage.GCSBucket != nil { + storageConfig.Bucket = config.Usage.GCSBucket.Get() + } + if config.Usage.GCSPrefix != nil { + storageConfig.Prefix = config.Usage.GCSPrefix.Get() + } + if config.Usage.PolicyVersion != nil { + storageConfig.Version = config.Usage.PolicyVersion.Get() + } + + return storageConfig +} + +func parseGCSConfig(config *config.Config) error { + gcsBucket := "" + if v := os.Getenv("USAGE_GCS_BUCKET"); v != "" { + gcsBucket = v + } else if config.Usage.GCSBucket != nil { + gcsBucket = config.Usage.GCSBucket.Get() + } + config.Usage.GCSBucket = runtime.NewDynamicValue(gcsBucket) + + gcsPrefix := "" + if v := os.Getenv("USAGE_GCS_PREFIX"); v != "" { + gcsPrefix = v + } else if config.Usage.GCSPrefix != nil { + gcsPrefix = config.Usage.GCSPrefix.Get() + } + config.Usage.GCSPrefix = runtime.NewDynamicValue(gcsPrefix) + + return nil +} + +// verify we implement the required interfaces +var ( + _ = modulecapabilities.ModuleWithClose(New()) + _ = modulecapabilities.ModuleWithUsageService(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/usage-gcs/module_test.go b/platform/dbops/binaries/weaviate-src/modules/usage-gcs/module_test.go new file mode 100644 index 0000000000000000000000000000000000000000..a8a5043d2ac5ecd936009d35f634c8b27d21a378 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/usage-gcs/module_test.go @@ -0,0 +1,631 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package usagegcs + +import ( + "context" + "os" + "testing" + "time" + + "github.com/prometheus/client_golang/prometheus" + "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" + + clusterusage "github.com/weaviate/weaviate/cluster/usage" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/usecases/cluster" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/config/runtime" + common "github.com/weaviate/weaviate/usecases/modulecomponents/usage" + usagetypes "github.com/weaviate/weaviate/usecases/modulecomponents/usage/types" +) + +func TestModule_Name(t *testing.T) { + mod := New() + assert.Equal(t, Name, mod.Name()) +} + +func TestModule_Type(t *testing.T) { + mod := New() + assert.Equal(t, modulecapabilities.Usage, mod.Type()) +} + +func TestModule_Init_Success(t *testing.T) { + // Set up environment variables for success case + t.Setenv("USAGE_GCS_BUCKET", "test-bucket") + t.Setenv("USAGE_GCS_PREFIX", "test-prefix") + t.Setenv("CLUSTER_IN_LOCALHOST", "true") + + mod := New() + logger := logrus.New() + + testConfig := config.Config{ + Cluster: cluster.Config{ + Hostname: "test-node", + }, + Usage: usagetypes.UsageConfig{ + ScrapeInterval: runtime.NewDynamicValue(2 * time.Hour), + PolicyVersion: runtime.NewDynamicValue("2025-06-01"), + }, + } + + params := moduletools.NewMockModuleInitParams(t) + params.EXPECT().GetConfig().Return(&testConfig) + params.EXPECT().GetLogger().Return(logger) + params.EXPECT().GetMetricsRegisterer().Return(prometheus.NewPedanticRegistry()) + + err := mod.Init(context.Background(), params) + require.NoError(t, err) + assert.NotNil(t, mod.BaseModule) + assert.NotNil(t, mod.gcsStorage) +} + +func TestModule_Init_MissingEnvVars(t *testing.T) { + mod := New() + + testConfig := config.Config{ + Cluster: cluster.Config{ + Hostname: "test-node", + }, + Usage: usagetypes.UsageConfig{ + ScrapeInterval: runtime.NewDynamicValue(2 * time.Hour), + PolicyVersion: runtime.NewDynamicValue("2025-06-01"), + }, + } + + params := moduletools.NewMockModuleInitParams(t) + params.EXPECT().GetConfig().Return(&testConfig) + + err := mod.Init(context.Background(), params) + require.Error(t, err) + assert.Contains(t, err.Error(), "GCS bucket name not configured") +} + +func TestModule_Init_MissingBucket(t *testing.T) { + // Test case where environment variable is not set at all + mod := New() + + testConfig := config.Config{ + Cluster: cluster.Config{ + Hostname: "test-node", + }, + Usage: usagetypes.UsageConfig{ + ScrapeInterval: runtime.NewDynamicValue(2 * time.Hour), + PolicyVersion: runtime.NewDynamicValue("2025-06-01"), + }, + } + + params := moduletools.NewMockModuleInitParams(t) + params.EXPECT().GetConfig().Return(&testConfig) + + err := mod.Init(context.Background(), params) + assert.Error(t, err) + assert.Contains(t, err.Error(), "GCS bucket name not configured") +} + +func TestModule_Init_MissingHostname(t *testing.T) { + // Set up environment for GCS bucket so it passes bucket validation + t.Setenv("USAGE_GCS_BUCKET", "test-bucket") + t.Setenv("CLUSTER_IN_LOCALHOST", "true") + + mod := New() + logger := logrus.New() + + testConfig := config.Config{ + Cluster: cluster.Config{ + Hostname: "", // Missing hostname + }, + Usage: usagetypes.UsageConfig{ + ScrapeInterval: runtime.NewDynamicValue(2 * time.Hour), + PolicyVersion: runtime.NewDynamicValue("2025-06-01"), + }, + } + + params := moduletools.NewMockModuleInitParams(t) + params.EXPECT().GetConfig().Return(&testConfig) + params.EXPECT().GetLogger().Return(logger) + params.EXPECT().GetMetricsRegisterer().Return(prometheus.NewPedanticRegistry()) + + err := mod.Init(context.Background(), params) + assert.Error(t, err) + assert.Contains(t, err.Error(), "cluster hostname is not set") +} + +func TestModule_Init_InvalidScrapeInterval(t *testing.T) { + // Set up environment with invalid scrape interval + t.Setenv("USAGE_GCS_BUCKET", "test-bucket") + t.Setenv("USAGE_SCRAPE_INTERVAL", "invalid-duration") + t.Setenv("CLUSTER_IN_LOCALHOST", "true") + + config := &config.Config{ + Cluster: cluster.Config{ + Hostname: "test-node", + }, + Usage: usagetypes.UsageConfig{ + PolicyVersion: runtime.NewDynamicValue("2025-06-01"), + }, + } + + params := moduletools.NewMockModuleInitParams(t) + params.EXPECT().GetConfig().Return(config) + + m := New() + err := m.Init(context.Background(), params) + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid USAGE_SCRAPE_INTERVAL") +} + +func TestModule_Init_ConfigurationParsing(t *testing.T) { + // Set up environment variables + t.Setenv("USAGE_GCS_BUCKET", "env-bucket") + t.Setenv("USAGE_GCS_PREFIX", "env-prefix") + t.Setenv("USAGE_SCRAPE_INTERVAL", "10m") + t.Setenv("USAGE_POLICY_VERSION", "2025-06-01") + t.Setenv("CLUSTER_IN_LOCALHOST", "true") + + config := &config.Config{ + Cluster: cluster.Config{ + Hostname: "test-node", + }, + Usage: usagetypes.UsageConfig{ + ScrapeInterval: runtime.NewDynamicValue(5 * time.Minute), // env vars take priority + PolicyVersion: runtime.NewDynamicValue("2025-01-01"), // env vars take priority + }, + } + + logger := logrus.New() + registry := prometheus.NewRegistry() + + params := moduletools.NewMockModuleInitParams(t) + params.EXPECT().GetLogger().Return(logger) + params.EXPECT().GetConfig().Return(config) + params.EXPECT().GetMetricsRegisterer().Return(registry) + + m := New() + err := m.Init(context.Background(), params) + require.NoError(t, err) + + // Verify that environment variables take priority over existing config values + // GCS bucket and prefix will use env vars since config has no existing values for them + assert.Equal(t, "env-bucket", config.Usage.GCSBucket.Get()) + assert.Equal(t, "env-prefix", config.Usage.GCSPrefix.Get()) + // Environment variables take priority over existing config values + assert.Equal(t, 10*time.Minute, config.Usage.ScrapeInterval.Get()) + assert.Equal(t, "2025-06-01", config.Usage.PolicyVersion.Get()) +} + +func TestModule_SetUsageService(t *testing.T) { + // Set up environment for GCS bucket + t.Setenv("USAGE_GCS_BUCKET", "test-bucket") + t.Setenv("USAGE_GCS_PREFIX", "test-prefix") + t.Setenv("CLUSTER_IN_LOCALHOST", "true") + + mod := New() + logger := logrus.New() + + // Initialize module first + testConfig := config.Config{ + Cluster: cluster.Config{ + Hostname: "test-node", + }, + Usage: usagetypes.UsageConfig{ + ScrapeInterval: runtime.NewDynamicValue(2 * time.Hour), + PolicyVersion: runtime.NewDynamicValue("2025-06-01"), + }, + } + + params := moduletools.NewMockModuleInitParams(t) + params.EXPECT().GetConfig().Return(&testConfig) + params.EXPECT().GetLogger().Return(logger) + params.EXPECT().GetMetricsRegisterer().Return(prometheus.NewPedanticRegistry()) + + err := mod.Init(context.Background(), params) + require.NoError(t, err) + + // Test with valid service after initialization + usageService := clusterusage.NewMockService(t) + usageService.EXPECT().SetJitterInterval(mock.Anything).Return() + + assert.NotPanics(t, func() { + mod.SetUsageService(usageService) + }) + + // Test with invalid service (should not panic) + assert.NotPanics(t, func() { + mod.SetUsageService("invalid") + }) +} + +func TestGCSStorage_VerifyPermissions(t *testing.T) { + t.Setenv("CLUSTER_IN_LOCALHOST", "true") + + logger := logrus.New() + logger.SetOutput(os.Stdout) + + storage, err := NewGCSStorage(context.Background(), logger, nil) + require.NoError(t, err) + + storage.BucketName = "test-bucket" + storage.Prefix = "test-prefix" + + // Test case: Should skip verification for localhost + err = storage.VerifyPermissions(context.Background()) + assert.NoError(t, err) +} + +func TestGCSStorage_UpdateConfig(t *testing.T) { + t.Setenv("CLUSTER_IN_LOCALHOST", "true") + + logger := logrus.New() + logger.SetOutput(os.Stdout) + + storage, err := NewGCSStorage(context.Background(), logger, nil) + require.NoError(t, err) + + storage.BucketName = "old-bucket" + storage.Prefix = "old-prefix" + storage.NodeID = "test-node" // Set during initialization + + // Test configuration update + newConfig := common.StorageConfig{ + Bucket: "new-bucket", + Prefix: "new-prefix", + NodeID: "test-node", + Version: "new-version", + } + + changed, err := storage.UpdateConfig(newConfig) + assert.NoError(t, err) + assert.True(t, changed) + assert.Equal(t, "new-bucket", storage.BucketName) + assert.Equal(t, "new-prefix", storage.Prefix) + assert.Equal(t, "test-node", storage.NodeID) + + // Test no change when config is same + changed, err = storage.UpdateConfig(newConfig) + assert.NoError(t, err) + assert.False(t, changed) +} + +func TestModule_BuildGCSConfig(t *testing.T) { + m := New() + + config := &config.Config{ + Cluster: cluster.Config{ + Hostname: "test-node", + }, + Usage: usagetypes.UsageConfig{ + GCSBucket: runtime.NewDynamicValue("test-bucket"), + GCSPrefix: runtime.NewDynamicValue("test-prefix"), + PolicyVersion: runtime.NewDynamicValue("2025-06-01"), + }, + } + + storageConfig := m.buildGCSConfig(config) + + assert.Equal(t, "test-node", storageConfig.NodeID) + assert.Equal(t, "test-bucket", storageConfig.Bucket) + assert.Equal(t, "test-prefix", storageConfig.Prefix) + assert.Equal(t, "2025-06-01", storageConfig.Version) +} + +func TestModule_BuildGCSConfig_EmptyValues(t *testing.T) { + m := New() + + config := &config.Config{ + Cluster: cluster.Config{ + Hostname: "test-node", + }, + Usage: usagetypes.UsageConfig{ + // All values are nil/empty + }, + } + + storageConfig := m.buildGCSConfig(config) + + assert.Equal(t, "test-node", storageConfig.NodeID) + assert.Equal(t, "", storageConfig.Bucket) + assert.Equal(t, "", storageConfig.Prefix) + assert.Equal(t, "", storageConfig.Version) +} + +func TestParseGCSConfig(t *testing.T) { + tests := []struct { + name string + envVars map[string]string + existingGCSBucket string + existingGCSPrefix string + expectedGCSBucket string + expectedGCSPrefix string + }{ + { + name: "all GCS environment variables set", + envVars: map[string]string{ + "USAGE_GCS_BUCKET": "env-bucket", + "USAGE_GCS_PREFIX": "env-prefix", + }, + existingGCSBucket: "existing-bucket", + existingGCSPrefix: "existing-prefix", + expectedGCSBucket: "env-bucket", // env vars take priority + expectedGCSPrefix: "env-prefix", // env vars take priority + }, + { + name: "no environment variables, empty config", + envVars: map[string]string{}, + existingGCSBucket: "", + existingGCSPrefix: "", + expectedGCSBucket: "", // no existing config, no env var → empty string + expectedGCSPrefix: "", // no existing config, no env var → empty string + }, + { + name: "no environment variables but config has existing values", + envVars: map[string]string{}, + existingGCSBucket: "existing-bucket", + existingGCSPrefix: "existing-prefix", + expectedGCSBucket: "existing-bucket", // existing config takes priority + expectedGCSPrefix: "existing-prefix", // existing config takes priority + }, + { + name: "partial environment variables", + envVars: map[string]string{ + "USAGE_GCS_BUCKET": "env-bucket", + // USAGE_GCS_PREFIX not set + }, + existingGCSBucket: "existing-bucket", + existingGCSPrefix: "existing-prefix", + expectedGCSBucket: "env-bucket", // env var takes priority + expectedGCSPrefix: "existing-prefix", // no env var, use existing config + }, + { + name: "environment variables with no existing config", + envVars: map[string]string{ + "USAGE_GCS_BUCKET": "env-bucket", + "USAGE_GCS_PREFIX": "env-prefix", + }, + existingGCSBucket: "", + existingGCSPrefix: "", + expectedGCSBucket: "env-bucket", // no existing config, use env var + expectedGCSPrefix: "env-prefix", // no existing config, use env var + }, + { + name: "partial existing config", + envVars: map[string]string{ + "USAGE_GCS_BUCKET": "env-bucket", + "USAGE_GCS_PREFIX": "env-prefix", + }, + existingGCSBucket: "existing-bucket", + existingGCSPrefix: "", // no existing prefix + expectedGCSBucket: "env-bucket", // env var takes priority + expectedGCSPrefix: "env-prefix", // env var takes priority + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Set environment variables + for k, v := range tt.envVars { + t.Setenv(k, v) + } + + config := &config.Config{ + Usage: usagetypes.UsageConfig{ + ScrapeInterval: runtime.NewDynamicValue(5 * time.Minute), + PolicyVersion: runtime.NewDynamicValue("2025-06-01"), + }, + } + + // Set existing values if specified + if tt.existingGCSBucket != "" { + config.Usage.GCSBucket = runtime.NewDynamicValue(tt.existingGCSBucket) + } + if tt.existingGCSPrefix != "" { + config.Usage.GCSPrefix = runtime.NewDynamicValue(tt.existingGCSPrefix) + } + + err := parseGCSConfig(config) + assert.NoError(t, err) + + // Verify expected results - parseGCSConfig always creates DynamicValue objects + require.NotNil(t, config.Usage.GCSBucket) + assert.Equal(t, tt.expectedGCSBucket, config.Usage.GCSBucket.Get()) + + require.NotNil(t, config.Usage.GCSPrefix) + assert.Equal(t, tt.expectedGCSPrefix, config.Usage.GCSPrefix.Get()) + + // These should always be preserved + assert.Equal(t, 5*time.Minute, config.Usage.ScrapeInterval.Get()) + assert.Equal(t, "2025-06-01", config.Usage.PolicyVersion.Get()) + }) + } +} + +func TestParseCommonUsageConfig(t *testing.T) { + tests := []struct { + name string + envVars map[string]string + existingInterval time.Duration + existingVersion string + expectedInterval time.Duration + expectedVersion string + wantErr bool + }{ + { + name: "all common environment variables set", + envVars: map[string]string{ + "USAGE_SCRAPE_INTERVAL": "2h", + "USAGE_POLICY_VERSION": "2025-06-01", + }, + existingInterval: 5 * time.Minute, + existingVersion: "2025-01-01", + expectedInterval: 2 * time.Hour, // env vars take priority + expectedVersion: "2025-06-01", // env vars take priority + }, + { + name: "no environment variables, preserve existing values", + envVars: map[string]string{}, + existingInterval: 5 * time.Minute, + existingVersion: "2025-01-01", + expectedInterval: 5 * time.Minute, // preserve existing + expectedVersion: "2025-01-01", // preserve existing + }, + { + name: "no environment variables, no existing values", + envVars: map[string]string{}, + existingInterval: 0, + existingVersion: "", + expectedInterval: common.DefaultCollectionInterval, // use default + expectedVersion: common.DefaultPolicyVersion, // use default + }, + { + name: "environment variables with no existing values", + envVars: map[string]string{ + "USAGE_SCRAPE_INTERVAL": "2h", + "USAGE_POLICY_VERSION": "2025-06-01", + }, + existingInterval: 0, + existingVersion: "", + expectedInterval: 2 * time.Hour, // no existing config, use env var + expectedVersion: "2025-06-01", // no existing config, use env var + }, + { + name: "invalid scrape interval", + envVars: map[string]string{ + "USAGE_SCRAPE_INTERVAL": "invalid-duration", + }, + wantErr: true, + }, + { + name: "partial environment variables with partial existing config", + envVars: map[string]string{ + "USAGE_SCRAPE_INTERVAL": "3h", + "USAGE_POLICY_VERSION": "2025-06-01", + }, + existingInterval: 5 * time.Minute, + existingVersion: "", // no existing version + expectedInterval: 3 * time.Hour, // env var takes priority + expectedVersion: "2025-06-01", // no existing config, use env var + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Set environment variables + for k, v := range tt.envVars { + t.Setenv(k, v) + } + + config := &config.Config{ + Usage: usagetypes.UsageConfig{}, + } + + // Set existing values if specified + if tt.existingInterval > 0 { + config.Usage.ScrapeInterval = runtime.NewDynamicValue(tt.existingInterval) + } + if tt.existingVersion != "" { + config.Usage.PolicyVersion = runtime.NewDynamicValue(tt.existingVersion) + } + + err := common.ParseCommonUsageConfig(config) + if tt.wantErr { + assert.Error(t, err) + return + } + + assert.NoError(t, err) + require.NotNil(t, config.Usage.ScrapeInterval) + assert.Equal(t, tt.expectedInterval, config.Usage.ScrapeInterval.Get()) + require.NotNil(t, config.Usage.PolicyVersion) + assert.Equal(t, tt.expectedVersion, config.Usage.PolicyVersion.Get()) + }) + } +} + +func TestModule_Close(t *testing.T) { + // Set up environment for GCS bucket + t.Setenv("USAGE_GCS_BUCKET", "test-bucket") + t.Setenv("USAGE_GCS_PREFIX", "test-prefix") + t.Setenv("CLUSTER_IN_LOCALHOST", "true") + + mod := New() + logger := logrus.New() + logger.SetOutput(os.Stdout) + + // Initialize module first + testConfig := config.Config{ + Cluster: cluster.Config{ + Hostname: "test-node", + }, + Usage: usagetypes.UsageConfig{ + ScrapeInterval: runtime.NewDynamicValue(2 * time.Hour), + PolicyVersion: runtime.NewDynamicValue("2025-06-01"), + }, + } + + params := moduletools.NewMockModuleInitParams(t) + params.EXPECT().GetConfig().Return(&testConfig) + params.EXPECT().GetLogger().Return(logger) + params.EXPECT().GetMetricsRegisterer().Return(prometheus.NewPedanticRegistry()) + + err := mod.Init(context.Background(), params) + require.NoError(t, err) + + // Test close + err = mod.Close() + assert.NoError(t, err) +} + +func TestModule_MetricsPrefixGeneration(t *testing.T) { + // Test that metrics are created with correct prefix for GCS module + registry := prometheus.NewRegistry() + + // Create metrics with the GCS module name + metrics := common.NewMetrics(registry, "usage-gcs") + + // Verify metrics are created + assert.NotNil(t, metrics) + assert.NotNil(t, metrics.OperationTotal) + assert.NotNil(t, metrics.OperationLatency) + assert.NotNil(t, metrics.ResourceCount) + assert.NotNil(t, metrics.UploadedFileSize) + + // Trigger some metric values to make them appear in the registry + metrics.OperationTotal.WithLabelValues("test", "success").Inc() + metrics.ResourceCount.WithLabelValues("collections").Set(1) + metrics.UploadedFileSize.Set(100) + + // Gather metrics to verify names + metricFamilies, err := registry.Gather() + require.NoError(t, err) + + // Debug: print all found metrics + foundMetrics := make(map[string]bool) + for _, mf := range metricFamilies { + foundMetrics[mf.GetName()] = true + t.Logf("Found metric: %s", mf.GetName()) + } + + // Check that metrics have correct prefixes + expectedPrefixes := []string{ + "weaviate_usage_gcs_operations_total", + "weaviate_usage_gcs_resource_count", + "weaviate_usage_gcs_uploaded_file_size_bytes", + } + + for _, expectedName := range expectedPrefixes { + assert.True(t, foundMetrics[expectedName], "Expected metric %s not found", expectedName) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/usage-gcs/storage.go b/platform/dbops/binaries/weaviate-src/modules/usage-gcs/storage.go new file mode 100644 index 0000000000000000000000000000000000000000..24055254bc266d12146761df39008626b0a90b90 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/usage-gcs/storage.go @@ -0,0 +1,198 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package usagegcs + +import ( + "context" + "fmt" + "time" + + "cloud.google.com/go/storage" + "github.com/googleapis/gax-go/v2" + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "golang.org/x/oauth2/google" + "google.golang.org/api/option" + storageapi "google.golang.org/api/storage/v1" + + "github.com/weaviate/weaviate/cluster/usage/types" + common "github.com/weaviate/weaviate/usecases/modulecomponents/usage" +) + +// GCSStorage implements the StorageBackend interface for GCS +type GCSStorage struct { + *common.BaseStorage + storageClient *storage.Client +} + +// NewGCSStorage creates a new GCS storage backend +func NewGCSStorage(ctx context.Context, logger logrus.FieldLogger, metrics *common.Metrics) (*GCSStorage, error) { + options := []option.ClientOption{} + + // Use base storage localhost check for authentication + baseStorage := common.NewBaseStorage(logger, metrics) + + if baseStorage.IsLocalhostEnvironment() { + options = append(options, option.WithoutAuthentication()) + } else { + scopes := []string{ + "https://www.googleapis.com/auth/devstorage.read_write", + } + creds, err := google.FindDefaultCredentials(ctx, scopes...) + if err != nil { + return nil, errors.Wrap(err, "find default credentials") + } + options = append(options, option.WithCredentials(creds)) + } + + client, err := storage.NewClient(ctx, options...) + if err != nil { + return nil, fmt.Errorf("failed to create GCP storage client: %w", err) + } + + // Configure retry policy + client.SetRetry(storage.WithBackoff(gax.Backoff{ + Initial: 2 * time.Second, + Max: 60 * time.Second, + Multiplier: 3, + }), + storage.WithPolicy(storage.RetryAlways), + ) + + return &GCSStorage{ + BaseStorage: baseStorage, + storageClient: client, + }, nil +} + +// VerifyPermissions checks if the backend can access the storage location +func (g *GCSStorage) VerifyPermissions(ctx context.Context) error { + if g.storageClient == nil { + return fmt.Errorf("storage client is not initialized") + } + + // During initialization, bucket may not be configured yet due to runtime overrides + // being loaded after module initialization. + if g.BucketName == "" { + g.Logger.Debug("GCS bucket not configured yet - skipping permission verification") + return nil + } + + g.LogVerificationStart() + + if g.IsLocalhostEnvironment() { + return nil + } + + // Create context with timeout to report early in case of invalid permissions + timeoutCtx, cancel := context.WithTimeout(ctx, 30*time.Second) + defer cancel() + + // GCS-specific permission check using IAM + storageService, err := storageapi.NewService(timeoutCtx) + if err != nil { + return fmt.Errorf("failed to create storage API client: %w", err) + } + + permissions := []string{"storage.objects.create"} + _, err = storageService.Buckets.TestIamPermissions(g.BucketName, permissions).Context(timeoutCtx).Do() + if err != nil { + return fmt.Errorf("IAM permission check failed for bucket %s: %w", g.BucketName, err) + } + + g.LogVerificationSuccess(logrus.Fields{ + "permissions": permissions, + }) + return nil +} + +// UploadUsageData uploads the usage data to the storage backend +func (g *GCSStorage) UploadUsageData(ctx context.Context, usage *types.Report) error { + if g.storageClient == nil { + return fmt.Errorf("storage client is not initialized") + } + + data, err := g.MarshalUsageData(usage) + if err != nil { + return err + } + + obj := g.storageClient.Bucket(g.BucketName).Object(g.ConstructObjectKey(usage.CollectingTime)) + writer := obj.NewWriter(ctx) + writer.ContentType = "application/json" + writer.Metadata = map[string]string{ + "version": usage.Version, + } + + if _, err := writer.Write(data); err != nil { + writer.Close() + return fmt.Errorf("failed to write to GCS: %w", err) + } + + if err := writer.Close(); err != nil { + return fmt.Errorf("failed to close GCS writer: %w", err) + } + + g.RecordUploadMetrics(len(data)) + return nil +} + +// Close cleans up resources +func (g *GCSStorage) Close() error { + if g.storageClient != nil { + return g.storageClient.Close() + } + return nil +} + +// UpdateConfig updates the backend configuration from the provided config +func (g *GCSStorage) UpdateConfig(config common.StorageConfig) (bool, error) { + // Store old bucket name to detect changes + oldBucketName := g.BucketName + + // Update the configuration + configChanged := g.UpdateCommonConfig(config) + if !configChanged { + return configChanged, nil + } + + // If bucket name changed, verify permissions + if oldBucketName != g.BucketName { + g.Logger.WithFields(logrus.Fields{ + "old_bucket": oldBucketName, + "new_bucket": g.BucketName, + }).Info("GCS bucket name changed") + } + + if !config.VerifyPermissions { + g.Logger.Info("permission verification skipped after bucket change (disabled by configuration)") + return configChanged, nil + } + + g.Logger.Info("verifying permissions") + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + + if err := g.VerifyPermissions(ctx); err != nil { + g.Logger.WithError(err).Error("GCS permission verification failed after bucket change") + return configChanged, err + } + g.Logger.Info("GCS permissions verified successfully") + + return configChanged, nil +} + +// verify we implement the required interfaces +var ( + gcsStorage, _ = NewGCSStorage(context.Background(), logrus.New(), &common.Metrics{}) + _ = common.StorageBackend(gcsStorage) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/usage-s3/module.go b/platform/dbops/binaries/weaviate-src/modules/usage-s3/module.go new file mode 100644 index 0000000000000000000000000000000000000000..31517ee5ba13e4ee0a34f28186315fecf87bf208 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/usage-s3/module.go @@ -0,0 +1,160 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package usages3 + +import ( + "context" + "fmt" + "os" + + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/config/runtime" + common "github.com/weaviate/weaviate/usecases/modulecomponents/usage" +) + +const ( + Name = "usage-s3" +) + +// module is the S3 usage module using the common base +type module struct { + *common.BaseModule + s3Storage *S3Storage +} + +func New() *module { + return &module{} +} + +func (m *module) SetUsageService(usageService any) { + m.BaseModule.SetUsageService(usageService) +} + +func (m *module) Name() string { + return Name +} + +func (m *module) Type() modulecapabilities.ModuleType { + return modulecapabilities.Usage +} + +func (m *module) Init(ctx context.Context, params moduletools.ModuleInitParams) error { + // Parse usage configuration from environment + config := params.GetConfig() + if err := common.ParseCommonUsageConfig(config); err != nil { + return err + } + if err := parseS3Config(config); err != nil { + return err + } + + // Validate required configuration + if config.Usage.S3Bucket.Get() == "" && !config.RuntimeOverrides.Enabled { + return fmt.Errorf("S3 bucket name not configured - set USAGE_S3_BUCKET environment variable or enable runtime overrides with RUNTIME_OVERRIDES_ENABLED=true") + } + + // Initialize logger + logger := params.GetLogger().WithField("component", Name) + + // Create metrics first + metrics := common.NewMetrics(params.GetMetricsRegisterer(), Name) + + // Create S3 storage backend with metrics + s3Storage, err := NewS3Storage(ctx, logger, metrics) + if err != nil { + return fmt.Errorf("failed to create S3 storage: %w", err) + } + + m.s3Storage = s3Storage + + // Update storage configuration (this may have empty bucket initially) + storageConfig := m.buildS3Config(config) + if _, err := m.s3Storage.UpdateConfig(storageConfig); err != nil { + return fmt.Errorf("failed to configure S3 storage: %w", err) + } + + // Create base module with S3 storage + m.BaseModule = common.NewBaseModule(Name, m.s3Storage) + + // Initialize base module with metrics + if err := m.InitializeCommon(ctx, config, logger, metrics); err != nil { + return err + } + + // Build log fields, omitting empty values for cleaner output + logFields := map[string]interface{}{ + "node_id": config.Cluster.Hostname, + "collection_interval": config.Usage.ScrapeInterval.Get().String(), + } + + if bucket := config.Usage.S3Bucket.Get(); bucket != "" { + logFields["s3_bucket"] = bucket + } else if config.RuntimeOverrides.Enabled { + logFields["s3_bucket"] = "[pending runtime overrides]" + } + + if prefix := config.Usage.S3Prefix.Get(); prefix != "" { + logFields["s3_prefix"] = prefix + } else if config.RuntimeOverrides.Enabled { + logFields["s3_prefix"] = "[pending runtime overrides]" + } + + logger.WithFields(logFields).Info("initializing usage-s3 module with configuration") + + return nil +} + +func (m *module) buildS3Config(config *config.Config) common.StorageConfig { + storageConfig := common.StorageConfig{ + NodeID: config.Cluster.Hostname, + } + + if config.Usage.S3Bucket != nil { + storageConfig.Bucket = config.Usage.S3Bucket.Get() + } + if config.Usage.S3Prefix != nil { + storageConfig.Prefix = config.Usage.S3Prefix.Get() + } + if config.Usage.PolicyVersion != nil { + storageConfig.Version = config.Usage.PolicyVersion.Get() + } + + return storageConfig +} + +func parseS3Config(config *config.Config) error { + s3Bucket := "" + if v := os.Getenv("USAGE_S3_BUCKET"); v != "" { + s3Bucket = v + } else if config.Usage.S3Bucket != nil { + s3Bucket = config.Usage.S3Bucket.Get() + } + config.Usage.S3Bucket = runtime.NewDynamicValue(s3Bucket) + + s3Prefix := "" + if v := os.Getenv("USAGE_S3_PREFIX"); v != "" { + s3Prefix = v + } else if config.Usage.S3Prefix != nil { + s3Prefix = config.Usage.S3Prefix.Get() + } + config.Usage.S3Prefix = runtime.NewDynamicValue(s3Prefix) + + return nil +} + +// verify we implement the required interfaces +var ( + _ = modulecapabilities.ModuleWithClose(New()) + _ = modulecapabilities.ModuleWithUsageService(New()) +) diff --git a/platform/dbops/binaries/weaviate-src/modules/usage-s3/module_test.go b/platform/dbops/binaries/weaviate-src/modules/usage-s3/module_test.go new file mode 100644 index 0000000000000000000000000000000000000000..9ac3e861fcd56c1a8867092320ee89aa89cd062c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/usage-s3/module_test.go @@ -0,0 +1,606 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package usages3 + +import ( + "context" + "os" + "testing" + "time" + + "github.com/prometheus/client_golang/prometheus" + "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" + + clusterusage "github.com/weaviate/weaviate/cluster/usage" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/usecases/cluster" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/config/runtime" + common "github.com/weaviate/weaviate/usecases/modulecomponents/usage" + usagetypes "github.com/weaviate/weaviate/usecases/modulecomponents/usage/types" +) + +func TestModule_Name(t *testing.T) { + m := New() + assert.Equal(t, "usage-s3", m.Name()) +} + +func TestModule_Init_Success(t *testing.T) { + // Set up environment for S3 bucket + os.Setenv("USAGE_S3_BUCKET", "test-bucket") + defer os.Unsetenv("USAGE_S3_BUCKET") + + // Set up localhost environment to avoid real AWS authentication + os.Setenv("CLUSTER_IN_LOCALHOST", "true") + defer os.Unsetenv("CLUSTER_IN_LOCALHOST") + + config := &config.Config{ + Cluster: cluster.Config{ + Hostname: "test-node", + }, + Usage: usagetypes.UsageConfig{ + ScrapeInterval: runtime.NewDynamicValue(5 * time.Minute), + PolicyVersion: runtime.NewDynamicValue("2025-06-01"), + }, + } + + logger := logrus.New() + registry := prometheus.NewRegistry() + + params := moduletools.NewMockModuleInitParams(t) + params.EXPECT().GetLogger().Return(logger) + params.EXPECT().GetConfig().Return(config) + params.EXPECT().GetMetricsRegisterer().Return(registry) + + m := New() + err := m.Init(context.Background(), params) + require.NoError(t, err) + + assert.NotNil(t, m.BaseModule) + assert.NotNil(t, m.s3Storage) +} + +func TestModule_Init_MissingBucket(t *testing.T) { + // Test case where environment variable is not set at all + config := &config.Config{ + Cluster: cluster.Config{ + Hostname: "test-node", + }, + Usage: usagetypes.UsageConfig{ + ScrapeInterval: runtime.NewDynamicValue(5 * time.Minute), + PolicyVersion: runtime.NewDynamicValue("2025-06-01"), + }, + } + + params := moduletools.NewMockModuleInitParams(t) + params.EXPECT().GetConfig().Return(config) + + m := New() + err := m.Init(context.Background(), params) + require.Error(t, err) + assert.Contains(t, err.Error(), "S3 bucket name not configured") +} + +func TestModule_Init_MissingEnvVars(t *testing.T) { + config := &config.Config{ + Cluster: cluster.Config{ + Hostname: "test-node", + }, + Usage: usagetypes.UsageConfig{ + ScrapeInterval: runtime.NewDynamicValue(5 * time.Minute), + PolicyVersion: runtime.NewDynamicValue("2025-06-01"), + }, + } + + params := moduletools.NewMockModuleInitParams(t) + // Only expect GetConfig() to be called since the method returns early on validation failure + params.EXPECT().GetConfig().Return(config) + + m := New() + err := m.Init(context.Background(), params) + require.Error(t, err) + assert.Contains(t, err.Error(), "S3 bucket name not configured") +} + +func TestModule_Init_MissingHostname(t *testing.T) { + t.Setenv("USAGE_S3_BUCKET", "test-bucket") + t.Setenv("CLUSTER_IN_LOCALHOST", "true") + + params := moduletools.NewMockModuleInitParams(t) + params.EXPECT().GetConfig().Return(&config.Config{ + Cluster: cluster.Config{ + Hostname: "", // Empty hostname + }, + Usage: usagetypes.UsageConfig{ + ScrapeInterval: runtime.NewDynamicValue(5 * time.Minute), + PolicyVersion: runtime.NewDynamicValue("2025-06-01"), + }, + }) + params.EXPECT().GetLogger().Return(logrus.New()) + params.EXPECT().GetMetricsRegisterer().Return(prometheus.NewRegistry()) + + m := New() + err := m.Init(context.Background(), params) + require.Error(t, err) + assert.Contains(t, err.Error(), "cluster hostname is not set") +} + +func TestModule_Init_ConfigurationParsing(t *testing.T) { + // Set up environment variables + t.Setenv("USAGE_S3_BUCKET", "env-bucket") + t.Setenv("USAGE_S3_PREFIX", "env-prefix") + t.Setenv("USAGE_SCRAPE_INTERVAL", "10m") + t.Setenv("USAGE_POLICY_VERSION", "2025-06-01") + t.Setenv("CLUSTER_IN_LOCALHOST", "true") + + config := &config.Config{ + Cluster: cluster.Config{ + Hostname: "test-node", + }, + Usage: usagetypes.UsageConfig{ + ScrapeInterval: runtime.NewDynamicValue(5 * time.Minute), // env vars take priority + PolicyVersion: runtime.NewDynamicValue("2025-01-01"), // env vars take priority + }, + } + + logger := logrus.New() + registry := prometheus.NewRegistry() + + params := moduletools.NewMockModuleInitParams(t) + params.EXPECT().GetLogger().Return(logger) + params.EXPECT().GetConfig().Return(config) + params.EXPECT().GetMetricsRegisterer().Return(registry) + + m := New() + err := m.Init(context.Background(), params) + require.NoError(t, err) + + // Verify that environment variables take priority over existing config values + // S3 bucket and prefix will use env vars since config has no existing values for them + assert.Equal(t, "env-bucket", config.Usage.S3Bucket.Get()) + assert.Equal(t, "env-prefix", config.Usage.S3Prefix.Get()) + // Environment variables take priority over existing config values + assert.Equal(t, 10*time.Minute, config.Usage.ScrapeInterval.Get()) + assert.Equal(t, "2025-06-01", config.Usage.PolicyVersion.Get()) +} + +func TestModule_Init_InvalidScrapeInterval(t *testing.T) { + // Set up environment with invalid scrape interval + t.Setenv("USAGE_S3_BUCKET", "test-bucket") + t.Setenv("USAGE_SCRAPE_INTERVAL", "invalid-duration") + t.Setenv("CLUSTER_IN_LOCALHOST", "true") + + config := &config.Config{ + Cluster: cluster.Config{ + Hostname: "test-node", + }, + Usage: usagetypes.UsageConfig{ + PolicyVersion: runtime.NewDynamicValue("2025-06-01"), + }, + } + + params := moduletools.NewMockModuleInitParams(t) + params.EXPECT().GetConfig().Return(config) + + m := New() + err := m.Init(context.Background(), params) + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid USAGE_SCRAPE_INTERVAL") +} + +func TestModule_BuildS3Config(t *testing.T) { + m := New() + + config := &config.Config{ + Cluster: cluster.Config{ + Hostname: "test-node", + }, + Usage: usagetypes.UsageConfig{ + S3Bucket: runtime.NewDynamicValue("test-bucket"), + S3Prefix: runtime.NewDynamicValue("test-prefix"), + PolicyVersion: runtime.NewDynamicValue("2025-06-01"), + }, + } + + storageConfig := m.buildS3Config(config) + + assert.Equal(t, "test-node", storageConfig.NodeID) + assert.Equal(t, "test-bucket", storageConfig.Bucket) + assert.Equal(t, "test-prefix", storageConfig.Prefix) + assert.Equal(t, "2025-06-01", storageConfig.Version) +} + +func TestModule_BuildS3Config_EmptyValues(t *testing.T) { + m := New() + + config := &config.Config{ + Cluster: cluster.Config{ + Hostname: "test-node", + }, + Usage: usagetypes.UsageConfig{ + // All values are nil/empty + }, + } + + storageConfig := m.buildS3Config(config) + + assert.Equal(t, "test-node", storageConfig.NodeID) + assert.Equal(t, "", storageConfig.Bucket) + assert.Equal(t, "", storageConfig.Prefix) + assert.Equal(t, "", storageConfig.Version) +} + +func TestModule_SetUsageService(t *testing.T) { + os.Setenv("USAGE_S3_BUCKET", "test-bucket") + os.Setenv("CLUSTER_IN_LOCALHOST", "true") + defer func() { + os.Unsetenv("USAGE_S3_BUCKET") + os.Unsetenv("CLUSTER_IN_LOCALHOST") + }() + + m := New() + + config := &config.Config{ + Cluster: cluster.Config{ + Hostname: "test-node", + }, + Usage: usagetypes.UsageConfig{ + ScrapeInterval: runtime.NewDynamicValue(5 * time.Minute), + PolicyVersion: runtime.NewDynamicValue("2025-06-01"), + }, + } + + logger := logrus.New() + registry := prometheus.NewRegistry() + + params := moduletools.NewMockModuleInitParams(t) + params.EXPECT().GetLogger().Return(logger) + params.EXPECT().GetConfig().Return(config) + params.EXPECT().GetMetricsRegisterer().Return(registry) + + // Initialize the module first + err := m.Init(context.Background(), params) + require.NoError(t, err) + + // Test with valid service after initialization + usageService := clusterusage.NewMockService(t) + usageService.EXPECT().SetJitterInterval(mock.Anything).Return() + + assert.NotPanics(t, func() { + m.SetUsageService(usageService) + }) + assert.NotNil(t, m.BaseModule) +} + +func TestParseS3Config(t *testing.T) { + tests := []struct { + name string + envVars map[string]string + existingS3Bucket string + existingS3Prefix string + expectedS3Bucket string + expectedS3Prefix string + }{ + { + name: "all S3 environment variables set", + envVars: map[string]string{ + "USAGE_S3_BUCKET": "env-bucket", + "USAGE_S3_PREFIX": "env-prefix", + }, + existingS3Bucket: "existing-bucket", + existingS3Prefix: "existing-prefix", + expectedS3Bucket: "env-bucket", // env vars take priority + expectedS3Prefix: "env-prefix", // env vars take priority + }, + { + name: "no environment variables, empty config", + envVars: map[string]string{}, + existingS3Bucket: "", + existingS3Prefix: "", + expectedS3Bucket: "", // no existing config, no env var → empty string + expectedS3Prefix: "", // no existing config, no env var → empty string + }, + { + name: "no environment variables but config has existing values", + envVars: map[string]string{}, + existingS3Bucket: "existing-bucket", + existingS3Prefix: "existing-prefix", + expectedS3Bucket: "existing-bucket", // existing config takes priority + expectedS3Prefix: "existing-prefix", // existing config takes priority + }, + { + name: "partial environment variables", + envVars: map[string]string{ + "USAGE_S3_BUCKET": "env-bucket", + // USAGE_S3_PREFIX not set + }, + existingS3Bucket: "existing-bucket", + existingS3Prefix: "existing-prefix", + expectedS3Bucket: "env-bucket", // env var takes priority + expectedS3Prefix: "existing-prefix", // no env var, use existing config + }, + { + name: "environment variables with no existing config", + envVars: map[string]string{ + "USAGE_S3_BUCKET": "env-bucket", + "USAGE_S3_PREFIX": "env-prefix", + }, + existingS3Bucket: "", + existingS3Prefix: "", + expectedS3Bucket: "env-bucket", // no existing config, use env var + expectedS3Prefix: "env-prefix", // no existing config, use env var + }, + { + name: "partial existing config", + envVars: map[string]string{ + "USAGE_S3_BUCKET": "env-bucket", + "USAGE_S3_PREFIX": "env-prefix", + }, + existingS3Bucket: "existing-bucket", + existingS3Prefix: "", // no existing prefix + expectedS3Bucket: "env-bucket", // env var takes priority + expectedS3Prefix: "env-prefix", // env var takes priority + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Set environment variables + for k, v := range tt.envVars { + t.Setenv(k, v) + } + + config := &config.Config{ + Usage: usagetypes.UsageConfig{ + ScrapeInterval: runtime.NewDynamicValue(5 * time.Minute), + PolicyVersion: runtime.NewDynamicValue("2025-06-01"), + }, + } + + // Set existing values if specified + if tt.existingS3Bucket != "" { + config.Usage.S3Bucket = runtime.NewDynamicValue(tt.existingS3Bucket) + } + if tt.existingS3Prefix != "" { + config.Usage.S3Prefix = runtime.NewDynamicValue(tt.existingS3Prefix) + } + + err := parseS3Config(config) + assert.NoError(t, err) + + // Verify expected results - parseS3Config always creates DynamicValue objects + require.NotNil(t, config.Usage.S3Bucket) + assert.Equal(t, tt.expectedS3Bucket, config.Usage.S3Bucket.Get()) + + require.NotNil(t, config.Usage.S3Prefix) + assert.Equal(t, tt.expectedS3Prefix, config.Usage.S3Prefix.Get()) + + // These should always be preserved + assert.Equal(t, 5*time.Minute, config.Usage.ScrapeInterval.Get()) + assert.Equal(t, "2025-06-01", config.Usage.PolicyVersion.Get()) + }) + } +} + +func TestParseCommonUsageConfig(t *testing.T) { + tests := []struct { + name string + envVars map[string]string + existingInterval time.Duration + existingVersion string + expectedInterval time.Duration + expectedVersion string + wantErr bool + }{ + { + name: "all common environment variables set", + envVars: map[string]string{ + "USAGE_SCRAPE_INTERVAL": "2h", + "USAGE_POLICY_VERSION": "2025-06-01", + }, + existingInterval: 5 * time.Minute, + existingVersion: "2025-01-01", + expectedInterval: 2 * time.Hour, // env vars take priority + expectedVersion: "2025-06-01", // env vars take priority + }, + { + name: "no environment variables, preserve existing values", + envVars: map[string]string{}, + existingInterval: 5 * time.Minute, + existingVersion: "2025-01-01", + expectedInterval: 5 * time.Minute, // preserve existing + expectedVersion: "2025-01-01", // preserve existing + }, + { + name: "no environment variables, no existing values", + envVars: map[string]string{}, + existingInterval: 0, + existingVersion: "", + expectedInterval: common.DefaultCollectionInterval, // use default + expectedVersion: common.DefaultPolicyVersion, // use default + }, + { + name: "environment variables with no existing values", + envVars: map[string]string{ + "USAGE_SCRAPE_INTERVAL": "2h", + "USAGE_POLICY_VERSION": "2025-06-01", + }, + existingInterval: 0, + existingVersion: "", + expectedInterval: 2 * time.Hour, // no existing config, use env var + expectedVersion: "2025-06-01", // no existing config, use env var + }, + { + name: "invalid scrape interval", + envVars: map[string]string{ + "USAGE_SCRAPE_INTERVAL": "invalid-duration", + }, + wantErr: true, + }, + { + name: "partial environment variables with partial existing config", + envVars: map[string]string{ + "USAGE_SCRAPE_INTERVAL": "3h", + "USAGE_POLICY_VERSION": "2025-06-01", + }, + existingInterval: 5 * time.Minute, + existingVersion: "", // no existing version + expectedInterval: 3 * time.Hour, // env var takes priority + expectedVersion: "2025-06-01", // no existing config, use env var + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Set environment variables + for k, v := range tt.envVars { + t.Setenv(k, v) + } + + config := &config.Config{ + Usage: usagetypes.UsageConfig{}, + } + + // Set existing values if specified + if tt.existingInterval > 0 { + config.Usage.ScrapeInterval = runtime.NewDynamicValue(tt.existingInterval) + } + if tt.existingVersion != "" { + config.Usage.PolicyVersion = runtime.NewDynamicValue(tt.existingVersion) + } + + err := common.ParseCommonUsageConfig(config) + if tt.wantErr { + assert.Error(t, err) + return + } + + assert.NoError(t, err) + require.NotNil(t, config.Usage.ScrapeInterval) + assert.Equal(t, tt.expectedInterval, config.Usage.ScrapeInterval.Get()) + require.NotNil(t, config.Usage.PolicyVersion) + assert.Equal(t, tt.expectedVersion, config.Usage.PolicyVersion.Get()) + }) + } +} + +func TestModule_Type(t *testing.T) { + m := New() + assert.Equal(t, "Usage", string(m.Type())) +} + +func TestModule_InterfaceCompliance(t *testing.T) { + // Test that module implements required interfaces + m := New() + assert.NotNil(t, m) + + // These should compile without errors if interfaces are implemented correctly + _ = m.Name() + _ = m.Type() + m.SetUsageService("test") +} + +func TestModule_MetricsPrefixGeneration(t *testing.T) { + // Test that metrics are created with correct prefix for S3 module + registry := prometheus.NewRegistry() + + // Create metrics with the S3 module name + metrics := common.NewMetrics(registry, "usage-s3") + + // Verify metrics are created + assert.NotNil(t, metrics) + assert.NotNil(t, metrics.OperationTotal) + assert.NotNil(t, metrics.OperationLatency) + assert.NotNil(t, metrics.ResourceCount) + assert.NotNil(t, metrics.UploadedFileSize) + + // Trigger some metric values to make them appear in the registry + metrics.OperationTotal.WithLabelValues("test", "success").Inc() + metrics.ResourceCount.WithLabelValues("collections").Set(1) + metrics.UploadedFileSize.Set(100) + + // Gather metrics to verify names + metricFamilies, err := registry.Gather() + require.NoError(t, err) + + // Debug: print all found metrics + foundMetrics := make(map[string]bool) + for _, mf := range metricFamilies { + foundMetrics[mf.GetName()] = true + t.Logf("Found metric: %s", mf.GetName()) + } + + // Check that metrics have correct prefixes + expectedPrefixes := []string{ + "weaviate_usage_s3_operations_total", + "weaviate_usage_s3_resource_count", + "weaviate_usage_s3_uploaded_file_size_bytes", + } + + for _, expectedName := range expectedPrefixes { + assert.True(t, foundMetrics[expectedName], "Expected metric %s not found", expectedName) + } +} + +func TestModule_VerifyPermissions_OptIn(t *testing.T) { + tests := []struct { + name string + envVar string + expectedVerification bool + wantErr bool + }{ + { + name: "default behavior when env var not set", + envVar: "", + expectedVerification: false, + wantErr: false, + }, + { + name: "explicitly enable verification", + envVar: "true", + expectedVerification: true, + wantErr: false, + }, + { + name: "explicitly disable verification", + envVar: "false", + expectedVerification: false, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Set environment variable + if tt.envVar != "" { + t.Setenv("USAGE_VERIFY_PERMISSIONS", tt.envVar) + } + + config := &config.Config{ + Usage: usagetypes.UsageConfig{}, + } + + err := common.ParseCommonUsageConfig(config) + if tt.wantErr { + assert.Error(t, err) + return + } + + assert.NoError(t, err) + require.NotNil(t, config.Usage.VerifyPermissions) + assert.Equal(t, tt.expectedVerification, config.Usage.VerifyPermissions.Get()) + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/modules/usage-s3/storage.go b/platform/dbops/binaries/weaviate-src/modules/usage-s3/storage.go new file mode 100644 index 0000000000000000000000000000000000000000..5647d73cfe2373a92339c23a58339b5c254dce09 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/modules/usage-s3/storage.go @@ -0,0 +1,171 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package usages3 + +import ( + "bytes" + "context" + "fmt" + "os" + "strings" + "time" + + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/service/s3" + "github.com/sirupsen/logrus" + + "github.com/weaviate/weaviate/cluster/usage/types" + common "github.com/weaviate/weaviate/usecases/modulecomponents/usage" +) + +// S3Storage implements the StorageBackend interface for S3 +type S3Storage struct { + *common.BaseStorage + s3Client *s3.Client +} + +// NewS3Storage creates a new S3 storage backend +func NewS3Storage(ctx context.Context, logger logrus.FieldLogger, metrics *common.Metrics) (*S3Storage, error) { + cfg, err := config.LoadDefaultConfig(ctx) + if err != nil { + return nil, fmt.Errorf("failed to load AWS config: %w", err) + } + + s3Client := s3.NewFromConfig(cfg, func(o *s3.Options) { + if endpoint := os.Getenv("AWS_ENDPOINT"); endpoint != "" { + if !strings.HasPrefix(endpoint, "http://") && !strings.HasPrefix(endpoint, "https://") { + endpoint = "http://" + endpoint + } + o.BaseEndpoint = aws.String(endpoint) + o.UsePathStyle = true + } + }) + + return &S3Storage{ + BaseStorage: common.NewBaseStorage(logger, metrics), + s3Client: s3Client, + }, nil +} + +// VerifyPermissions checks if the backend can access the storage location +func (s *S3Storage) VerifyPermissions(ctx context.Context) error { + if s.s3Client == nil { + return fmt.Errorf("S3 client is not initialized") + } + + // During initialization, bucket may not be configured yet due to runtime overrides + // being loaded after module initialization. + if s.BucketName == "" { + s.Logger.Debug("S3 bucket not configured yet - skipping permission verification") + return nil + } + + s.LogVerificationStart() + + if s.IsLocalhostEnvironment() { + return nil + } + + // Create context with timeout to report early in case of invalid permissions + timeoutCtx, cancel := context.WithTimeout(ctx, 30*time.Second) + defer cancel() + + // S3-specific permission check + _, err := s.s3Client.ListObjectsV2(timeoutCtx, &s3.ListObjectsV2Input{ + Bucket: aws.String(s.BucketName), + MaxKeys: aws.Int32(1), + }) + if err != nil { + return fmt.Errorf("S3 permission check failed for bucket %s: %w", s.BucketName, err) + } + + s.LogVerificationSuccess() + return nil +} + +// UploadUsageData uploads the usage data to the storage backend +func (s *S3Storage) UploadUsageData(ctx context.Context, usage *types.Report) error { + if s.s3Client == nil { + return fmt.Errorf("S3 client is not initialized") + } + + data, err := s.MarshalUsageData(usage) + if err != nil { + return err + } + + _, err = s.s3Client.PutObject(ctx, &s3.PutObjectInput{ + Bucket: aws.String(s.BucketName), + Key: aws.String(s.ConstructObjectKey(usage.CollectingTime)), + Body: bytes.NewReader(data), + ContentType: aws.String("application/json"), + Metadata: map[string]string{ + "version": usage.Version, + }, + }) + if err != nil { + return fmt.Errorf("failed to upload to S3: %w", err) + } + + s.RecordUploadMetrics(len(data)) + return nil +} + +// Close cleans up resources +func (s *S3Storage) Close() error { + return nil +} + +// UpdateConfig updates the backend configuration from the provided config +func (s *S3Storage) UpdateConfig(config common.StorageConfig) (bool, error) { + // Store old bucket name to detect changes + oldBucketName := s.BucketName + + // Update the configuration + configChanged := s.UpdateCommonConfig(config) + + if !configChanged { + return configChanged, nil + } + + // If bucket name changed, verify permissions + if oldBucketName != s.BucketName { + s.Logger.WithFields(logrus.Fields{ + "old_bucket": oldBucketName, + "new_bucket": s.BucketName, + }).Info("S3 bucket name changed") + } + + if !config.VerifyPermissions { + s.Logger.Info("permission verification skipped after bucket change (disabled by configuration)") + return configChanged, nil + } + + s.Logger.Info("verifying permissions") + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + + if err := s.VerifyPermissions(ctx); err != nil { + s.Logger.WithError(err).Error("S3 permission verification failed after bucket change") + return configChanged, err + } + s.Logger.Info("S3 permissions verified successfully") + + return configChanged, nil +} + +// verify we implement the required interfaces +var ( + s3Storage, _ = NewS3Storage(context.Background(), logrus.New(), &common.Metrics{}) + _ = common.StorageBackend(s3Storage) +) diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/actions/add_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/actions/add_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ff8b58fed213ebe56d511aa40317ab8d8ee4deb6 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/actions/add_test.go @@ -0,0 +1,131 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "encoding/json" + "fmt" + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +// executed in setup_test.go +func addingObjects(t *testing.T) { + class := "TestObject" + t.Run("can create object", func(t *testing.T) { + // Set all object values to compare + objectTestString := "Test string" + objectTestInt := 1 + objectTestBoolean := true + objectTestNumber := 1.337 + objectTestDate := "2017-10-06T08:15:30+01:00" + + params := objects.NewObjectsCreateParams().WithBody( + &models.Object{ + Class: class, + Properties: map[string]interface{}{ + "testString": objectTestString, + "testWholeNumber": objectTestInt, + "testTrueFalse": objectTestBoolean, + "testNumber": objectTestNumber, + "testDateTime": objectTestDate, + }, + }) + + resp, err := helper.Client(t).Objects.ObjectsCreate(params, nil) + + // Ensure that the response is OK + helper.AssertRequestOk(t, resp, err, func() { + object := resp.Payload + _, err := uuid.Parse(object.ID.String()) + assert.NoError(t, err) + + schema, ok := object.Properties.(map[string]interface{}) + if !ok { + t.Fatal("The returned schema is not an JSON object") + } + + testWholeNumber, _ := schema["testWholeNumber"].(json.Number).Int64() + testNumber, _ := schema["testNumber"].(json.Number).Float64() + + // Check whether the returned information is the same as the data added + assert.Equal(t, objectTestString, schema["testString"]) + assert.Equal(t, objectTestInt, int(testWholeNumber)) + assert.Equal(t, objectTestBoolean, schema["testTrueFalse"]) + assert.Equal(t, objectTestNumber, testNumber) + assert.Equal(t, objectTestDate, schema["testDateTime"]) + }) + }) + + t.Run("can create and get object", func(t *testing.T) { + objectTestString := "Test string" + objectTestInt := 1 + objectTestBoolean := true + objectTestNumber := 1.337 + objectTestDate := "2017-10-06T08:15:30+01:00" + + objectID := helper.AssertCreateObject(t, class, map[string]interface{}{ + "testString": objectTestString, + "testWholeNumber": objectTestInt, + "testTrueFalse": objectTestBoolean, + "testNumber": objectTestNumber, + "testDateTime": objectTestDate, + }) + helper.AssertGetObjectEventually(t, class, objectID) + + // Now fetch the object + getResp, err := helper.Client(t).Objects.ObjectsGet(objects.NewObjectsGetParams().WithID(objectID), nil) + + helper.AssertRequestOk(t, getResp, err, func() { + object := getResp.Payload + + schema, ok := object.Properties.(map[string]interface{}) + if !ok { + t.Fatal("The returned schema is not an JSON object") + } + + testWholeNumber, _ := schema["testWholeNumber"].(json.Number).Int64() + testNumber, _ := schema["testNumber"].(json.Number).Float64() + + // Check whether the returned information is the same as the data added + assert.Equal(t, objectTestString, schema["testString"]) + assert.Equal(t, objectTestInt, int(testWholeNumber)) + assert.Equal(t, objectTestBoolean, schema["testTrueFalse"]) + assert.Equal(t, objectTestNumber, testNumber) + assert.Equal(t, objectTestDate, schema["testDateTime"]) + }) + }) + + t.Run("can add single ref", func(t *testing.T) { + firstID := helper.AssertCreateObject(t, class, map[string]interface{}{}) + helper.AssertGetObjectEventually(t, class, firstID) + + secondID := helper.AssertCreateObject(t, "TestObjectTwo", map[string]interface{}{ + "testString": "stringy", + "testReference": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s", firstID), + }, + }, + }) + + secondObject := helper.AssertGetObjectEventually(t, "TestObjectTwo", secondID) + + singleRef := secondObject.Properties.(map[string]interface{})["testReference"].([]interface{})[0].(map[string]interface{}) + assert.Equal(t, singleRef["beacon"].(string), fmt.Sprintf("weaviate://localhost/TestObject/%s", firstID)) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/actions/delete_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/actions/delete_test.go new file mode 100644 index 0000000000000000000000000000000000000000..fdf2fdefaf786eefee203b6632a4ef4038d80088 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/actions/delete_test.go @@ -0,0 +1,38 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +// Acceptance tests for objects + +import ( + "testing" + + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/test/helper" +) + +func removingObjects(t *testing.T) { + objectId := helper.AssertCreateObject(t, "TestObject", map[string]interface{}{}) + + // Yes, it's created + _ = helper.AssertGetObjectEventually(t, "TestObject", objectId) + + // Now perorm the the deletion + delResp, err := helper.Client(t).Objects.ObjectsDelete(objects.NewObjectsDeleteParams().WithID(objectId), nil) + helper.AssertRequestOk(t, delResp, err, nil) + + _ = helper.AssertGetObjectFailsEventually(t, "TestObject", objectId) + + // And verify that the object is gone + getResp, err := helper.Client(t).Objects.ObjectsGet(objects.NewObjectsGetParams().WithID(objectId), nil) + helper.AssertRequestFail(t, getResp, err, nil) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/actions/individual_refs_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/actions/individual_refs_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ab0d48115452e1cd274195063bebf596eef76b8c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/actions/individual_refs_test.go @@ -0,0 +1,167 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +// Acceptance tests for objects + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema/crossref" + "github.com/weaviate/weaviate/test/helper" +) + +// run from setup_test.go +func objectReferences(t *testing.T) { + var ( + class1 = "TestObject" + class2 = "TestObjectTwo" + ) + t.Run("can add reference individually", func(t *testing.T) { + t.Parallel() + + toPointToUuid := helper.AssertCreateObject(t, "TestObject", map[string]interface{}{}) + helper.AssertGetObjectEventually(t, class1, toPointToUuid) + + uuid := helper.AssertCreateObject(t, class2, map[string]interface{}{}) + + // Verify that testReferences is empty + updatedObject := helper.AssertGetObjectEventually(t, class2, uuid) + updatedSchema := updatedObject.Properties.(map[string]interface{}) + assert.Nil(t, updatedSchema["testReferences"]) + + // Append a property reference + params := objects.NewObjectsReferencesCreateParams(). + WithID(uuid). + WithPropertyName("testReferences"). + WithBody(crossref.NewLocalhost(class1, toPointToUuid).SingleRef()) + + updateResp, err := helper.Client(t).Objects.ObjectsReferencesCreate(params, nil) + helper.AssertRequestOk(t, updateResp, err, nil) + + checkThunk := func() interface{} { + resp, err := helper.Client(t).Objects.ObjectsGet(objects.NewObjectsGetParams().WithID(uuid), nil) + if err != nil { + t.Log(err) + return false + } + + updatedSchema = resp.Payload.Properties.(map[string]interface{}) + return updatedSchema["testReferences"] != nil + } + + helper.AssertEventuallyEqual(t, true, checkThunk) + }) + + t.Run("can replace all properties", func(t *testing.T) { + t.Parallel() + + toPointToUuidFirst := helper.AssertCreateObject(t, "TestObject", map[string]interface{}{}) + toPointToUuidLater := helper.AssertCreateObject(t, "TestObject", map[string]interface{}{}) + helper.AssertGetObjectEventually(t, "TestObject", toPointToUuidFirst) + helper.AssertGetObjectEventually(t, "TestObject", toPointToUuidLater) + + uuid := helper.AssertCreateObject(t, "TestObjectTwo", map[string]interface{}{ + "testReferences": models.MultipleRef{ + crossref.NewLocalhost("TestObject", toPointToUuidFirst).SingleRef(), + }, + }) + + // Verify that testReferences is empty + updatedObject := helper.AssertGetObjectEventually(t, "TestObjectTwo", uuid) + updatedSchema := updatedObject.Properties.(map[string]interface{}) + assert.NotNil(t, updatedSchema["testReferences"]) + + // Replace + params := objects.NewObjectsReferencesUpdateParams(). + WithID(uuid). + WithPropertyName("testReferences"). + WithBody(models.MultipleRef{ + crossref.NewLocalhost("TestObject", toPointToUuidLater).SingleRef(), + }) + + updateResp, err := helper.Client(t).Objects.ObjectsReferencesUpdate(params, nil) + helper.AssertRequestOk(t, updateResp, err, nil) + + checkThunk := func() interface{} { + resp, err := helper.Client(t).Objects.ObjectsGet(objects.NewObjectsGetParams().WithID(uuid), nil) + if err != nil { + t.Log(err) + return false + } + + updatedSchema = resp.Payload.Properties.(map[string]interface{}) + return updatedSchema["testReferences"] != nil + } + + helper.AssertEventuallyEqual(t, true, checkThunk) + }) + + t.Run("remove property individually", func(t *testing.T) { + t.Parallel() + + toPointToUuid := helper.AssertCreateObject(t, "TestObject", map[string]interface{}{}) + helper.AssertGetObjectEventually(t, "TestObject", toPointToUuid) + + uuid := helper.AssertCreateObject(t, "TestObjectTwo", map[string]interface{}{ + "testReferences": models.MultipleRef{ + crossref.NewLocalhost("TestObject", toPointToUuid).SingleRef(), + }, + }) + + // Verify that testReferences is not empty + updatedObject := helper.AssertGetObjectEventually(t, "TestObjectTwo", uuid) + updatedSchema := updatedObject.Properties.(map[string]interface{}) + assert.NotNil(t, updatedSchema["testReferences"]) + + // Delete a property reference + params := objects.NewObjectsReferencesDeleteParams(). + WithID(uuid). + WithPropertyName("testReferences"). + WithBody( + crossref.NewLocalhost("TestObject", toPointToUuid).SingleRef(), + ) + + updateResp, err := helper.Client(t).Objects.ObjectsReferencesDelete(params, nil) + helper.AssertRequestOk(t, updateResp, err, nil) + + checkThunk := func() interface{} { + resp, err := helper.Client(t).Objects.ObjectsGet(objects.NewObjectsGetParams().WithID(uuid), nil) + if err != nil { + t.Log(err) + return false + } + + refs := resp.Payload.Properties.(map[string]interface{})["testReferences"] + + if refs == nil { + return true + } + + refsSlice, ok := refs.([]interface{}) + if ok { + return len(refsSlice) == 0 + } + + // neither nil, nor a list + t.Logf("prop %s was neither nil nor a list after deleting, instead we got %#v", "testReferences", refs) + t.Fail() + + return false + } + + helper.AssertEventuallyEqual(t, true, checkThunk) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/actions/network_refs_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/actions/network_refs_test.go new file mode 100644 index 0000000000000000000000000000000000000000..b32288a7bff31ddcd28c21d1ccdc910d7a3cb4e4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/actions/network_refs_test.go @@ -0,0 +1,90 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +// func TestCanAddSingleNetworkRef(t *testing.T) { +// networkRefID := "711da979-4b0b-41e2-bcb8-fcc03554c7c8" +// actionID := assertCreateAction(t, "TestAction", map[string]interface{}{ +// "testReference": []interface{}{ +// map[string]interface{}{ +// "beacon": strfmt.UUID(fmt.Sprintf("weaviate://RemoteWeaviateForAcceptanceTest/things/%s", networkRefID)), +// }, +// }, +// }) +// assertGetActionEventually(t, actionID) + +// t.Run("it can query the resource again to verify the cross ref was added", func(t *testing.T) { +// action := assertGetAction(t, actionID) +// list := action.Schema.(map[string]interface{})["testReference"] +// require.NotNil(t, list, "cross-ref is present") +// rawCref := list.([]interface{})[0] +// cref := rawCref.(map[string]interface{}) +// assert.Equal(t, +// fmt.Sprintf("weaviate://RemoteWeaviateForAcceptanceTest/things/%s", networkRefID), cref["beacon"]) +// }) + +// t.Run("an implicit schema update has happened, we now include the network ref's class", func(t *testing.T) { +// schema := assertGetSchema(t) +// require.NotNil(t, schema.Actions) +// class := assertClassInSchema(t, schema.Actions, "TestAction") +// prop := assertPropertyInClass(t, class, "testReference") +// expectedDataType := []string{"TestThing", "RemoteWeaviateForAcceptanceTest/Instruments"} +// assert.Equal(t, expectedDataType, prop.DataType, "prop should have old and newly added dataTypes") +// }) +// } + +// func TestCanPatchSingleNetworkRef(t *testing.T) { +// t.Parallel() + +// actionID := assertCreateAction(t, "TestAction", nil) +// assertGetActionEventually(t, actionID) +// networkRefID := "711da979-4b0b-41e2-bcb8-fcc03554c7c8" + +// op := "add" +// path := "/schema/testReference" + +// patch := &models.PatchDocument{ +// Op: &op, +// Path: &path, +// Value: []interface{}{ +// map[string]interface{}{ +// "beacon": strfmt.UUID(fmt.Sprintf("weaviate://RemoteWeaviateForAcceptanceTest/things/%s", networkRefID)), +// }, +// }, +// } + +// t.Run("it can apply the patch", func(t *testing.T) { +// params := actions.NewActionsPatchParams(). +// WithBody([]*models.PatchDocument{patch}). +// WithID(actionID) +// patchResp, err := helper.Client(t).Actions.ActionsPatch(params, nil) +// helper.AssertRequestOk(t, patchResp, err, nil) +// }) + +// t.Run("it can query the resource again to verify the cross ref was added", func(t *testing.T) { +// patchedAction := assertGetAction(t, actionID) +// list := patchedAction.Schema.(map[string]interface{})["testReference"] +// require.NotNil(t, list, "cross-ref is present") +// rawCref := list.([]interface{})[0] +// cref := rawCref.(map[string]interface{}) +// assert.Equal(t, fmt.Sprintf("weaviate://RemoteWeaviateForAcceptanceTest/things/%s", networkRefID), cref["beacon"]) +// }) + +// t.Run("an implicit schema update has happened, we now include the network ref's class", func(t *testing.T) { +// schema := assertGetSchema(t) +// require.NotNil(t, schema.Actions) +// class := assertClassInSchema(t, schema.Actions, "TestAction") +// prop := assertPropertyInClass(t, class, "testReference") +// expectedDataType := []string{"TestThing", "RemoteWeaviateForAcceptanceTest/Instruments"} +// assert.Equal(t, expectedDataType, prop.DataType, "prop should have old and newly added dataTypes") +// }) +// } diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/actions/object_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/actions/object_test.go new file mode 100644 index 0000000000000000000000000000000000000000..5b491af4b988e915b170374d0f3d4245d0086f08 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/actions/object_test.go @@ -0,0 +1,789 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "encoding/json" + "errors" + "fmt" + "testing" + "time" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/client/batch" + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/schema/crossref" + "github.com/weaviate/weaviate/test/helper" +) + +func TestFindObject(t *testing.T) { + t.Parallel() + var ( + cls = "TestObjectHTTPGet" + first_friend = "TestObjectHTTPGetFriendFirst" + second_friend = "TestObjectHTTPGetFriendSecond" + ) + + // test setup + first_uuid := helper.AssertCreateObject(t, first_friend, map[string]interface{}{}) + defer helper.DeleteClassObject(t, first_friend) + second_uuid := helper.AssertCreateObject(t, second_friend, map[string]interface{}{}) + defer helper.DeleteClassObject(t, second_friend) + + helper.AssertCreateObjectClass(t, &models.Class{ + Class: cls, + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + { + Name: "friend", + DataType: []string{first_friend, second_friend}, + }, + }, + }) + // tear down + defer helper.DeleteClassObject(t, cls) + link1 := map[string]interface{}{ + "beacon": crossref.NewLocalhost(first_friend, first_uuid).String(), + "href": fmt.Sprintf("/v1/objects/%s/%s", first_friend, first_uuid), + } + link2 := map[string]interface{}{ + "beacon": crossref.NewLocalhost(second_friend, second_uuid).String(), + "href": fmt.Sprintf("/v1/objects/%s/%s", second_friend, second_uuid), + } + expected := map[string]interface{}{ + "number": json.Number("2"), + "friend": []interface{}{link1, link2}, + } + + uuid := helper.AssertCreateObject(t, cls, expected) + + r := objects.NewObjectsClassGetParams().WithID(uuid).WithClassName(cls) + resp, err := helper.Client(t).Objects.ObjectsClassGet(r, nil) + helper.AssertRequestOk(t, resp, err, nil) + assert.Equal(t, expected, resp.Payload.Properties.(map[string]interface{})) + + // check for an object which doesn't exist + unknown_uuid := strfmt.UUID("11110000-0000-0000-0000-000011110000") + r = objects.NewObjectsClassGetParams().WithID(unknown_uuid).WithClassName(cls) + resp, err = helper.Client(t).Objects.ObjectsClassGet(r, nil) + helper.AssertRequestFail(t, resp, err, nil) +} + +func TestHeadObject(t *testing.T) { + t.Parallel() + cls := "TestObjectHTTPHead" + // test setup + helper.DeleteClassObject(t, cls) + helper.AssertCreateObjectClass(t, &models.Class{ + Class: cls, + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + }, + }) + // tear down + defer helper.DeleteClassObject(t, cls) + + uuid := helper.AssertCreateObject(t, cls, map[string]interface{}{ + "name": "John", + }) + + r := objects.NewObjectsClassHeadParams().WithID(uuid).WithClassName(cls) + resp, err := helper.Client(t).Objects.ObjectsClassHead(r, nil) + helper.AssertRequestOk(t, resp, err, nil) + + // check for an object which doesn't exist + unknown_uuid := strfmt.UUID("11110000-0000-0000-0000-000011110000") + r = objects.NewObjectsClassHeadParams().WithID(unknown_uuid).WithClassName(cls) + resp, err = helper.Client(t).Objects.ObjectsClassHead(r, nil) + helper.AssertRequestFail(t, resp, err, nil) +} + +func TestPutObject(t *testing.T) { + t.Parallel() + var ( + cls = "TestObjectHTTPUpdate" + friend_cls = "TestObjectHTTPUpdateFriend" + ) + + // test setup + friend_uuid := helper.AssertCreateObject(t, friend_cls, map[string]interface{}{}) + defer helper.DeleteClassObject(t, friend_cls) + + helper.AssertCreateObjectClass(t, &models.Class{ + Class: cls, + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + Properties: []*models.Property{ + { + Name: "testString", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + { + Name: "testWholeNumber", + DataType: []string{"int"}, + }, + { + Name: "testNumber", + DataType: []string{"number"}, + }, + { + Name: "testDateTime", + DataType: []string{"date"}, + }, + { + Name: "testTrueFalse", + DataType: []string{"boolean"}, + }, + { + Name: "friend", + DataType: []string{friend_cls}, + }, + }, + }) + // tear down + defer helper.DeleteClassObject(t, cls) + uuid := helper.AssertCreateObject(t, cls, map[string]interface{}{ + "testWholeNumber": 2.0, + "testDateTime": time.Now(), + "testString": "wibbly", + }) + + link1 := map[string]interface{}{ + "beacon": crossref.NewLocalhost(friend_cls, friend_uuid).String(), + "href": fmt.Sprintf("/v1/objects/%s/%s", friend_cls, friend_uuid), + } + link2 := map[string]interface{}{ + "beacon": crossref.NewLocalhost(friend_cls, friend_uuid).String(), + "href": fmt.Sprintf("/v1/objects/%s/%s", friend_cls, friend_uuid), + } + expected := map[string]interface{}{ + "testNumber": json.Number("2"), + "testTrueFalse": true, + "testString": "wibbly wobbly", + "friend": []interface{}{link1, link2}, + } + update := models.Object{ + Class: cls, + Properties: models.PropertySchema(expected), + ID: uuid, + } + params := objects.NewObjectsClassPutParams().WithID(uuid).WithBody(&update) + updateResp, err := helper.Client(t).Objects.ObjectsClassPut(params, nil) + helper.AssertRequestOk(t, updateResp, err, nil) + actual := helper.AssertGetObject(t, cls, uuid).Properties.(map[string]interface{}) + assert.Equal(t, expected, actual) +} + +func TestPatchObject(t *testing.T) { + t.Parallel() + var ( + cls = "TestObjectHTTPPatch" + friend_cls = "TestObjectHTTPPatchFriend" + mconfig = map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + } + ) + // test setup + helper.DeleteClassObject(t, friend_cls) + helper.DeleteClassObject(t, cls) + helper.AssertCreateObjectClass(t, &models.Class{ // friend + Class: friend_cls, + ModuleConfig: mconfig, + Properties: []*models.Property{}, + }) + defer helper.DeleteClassObject(t, friend_cls) + helper.AssertCreateObjectClass(t, &models.Class{ // class + Class: cls, + ModuleConfig: mconfig, + Properties: []*models.Property{ + { + Name: "string1", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + { + Name: "integer1", + DataType: []string{"int"}, + }, + { + Name: "number1", + DataType: []string{"number"}, + }, + { + Name: "friend", + DataType: []string{friend_cls}, + }, + { + Name: "boolean1", + DataType: []string{"boolean"}, + }, + }, + }) + defer helper.DeleteClassObject(t, cls) + + uuid := helper.AssertCreateObject(t, cls, map[string]interface{}{ + "integer1": 2.0, + "string1": "wibbly", + }) + friendID := helper.AssertCreateObject(t, friend_cls, nil) + link1 := map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s/%s", friend_cls, friendID), + "href": fmt.Sprintf("/v1/objects/%s/%s", friend_cls, friendID), + } + link2 := map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s/%s", friend_cls, friendID), + "href": fmt.Sprintf("/v1/objects/%s/%s", friend_cls, friendID), + } + expected := map[string]interface{}{ + "integer1": json.Number("2"), + "number1": json.Number("3"), + "boolean1": true, + "string1": "wibbly wobbly", + "friend": []interface{}{link1, link2}, + } + update := map[string]interface{}{ + "number1": 3.0, + "boolean1": true, + "string1": "wibbly wobbly", + "friend": []interface{}{ + map[string]interface{}{ + "beacon": link1["beacon"], + }, map[string]interface{}{ + "beacon": link2["beacon"], + }, + }, + } + updateObj := models.Object{ + Properties: models.PropertySchema(update), + } + params := objects.NewObjectsClassPatchParams().WithClassName(cls) + params.WithID(uuid).WithBody(&updateObj) + updateResp, err := helper.Client(t).Objects.ObjectsClassPatch(params, nil) + helper.AssertRequestOk(t, updateResp, err, nil) + actual := func() interface{} { + obj := helper.AssertGetObject(t, cls, uuid) + props := obj.Properties.(map[string]interface{}) + return props + } + helper.AssertEventuallyEqual(t, expected, actual) + + params.WithID(strfmt.UUID("e5be1f32-0001-0000-0000-ebb25dfc811f")) + _, err = helper.Client(t).Objects.ObjectsClassPatch(params, nil) + if err == nil { + t.Errorf("must return an error for non existing object") + } +} + +func TestDeleteObject(t *testing.T) { + t.Parallel() + var ( + id = strfmt.UUID("21111111-1111-1111-1111-111111111111") + classA = "TestObjectHTTPDeleteA" + classB = "TestObjectHTTPDeleteB" + props = []*models.Property{ + { + Name: "text", + DataType: []string{"text"}, + }, + } + ) + // test setup + helper.DeleteClassObject(t, classA) + helper.AssertCreateObjectClass(t, &models.Class{ + Class: classA, + Vectorizer: "none", + Properties: props, + }) + defer helper.DeleteClassObject(t, classA) + + helper.DeleteClassObject(t, classB) + helper.AssertCreateObjectClass(t, &models.Class{ + Class: classB, + Vectorizer: "none", + Properties: props, + }) + + defer helper.DeleteClassObject(t, classB) + + object1 := &models.Object{ + Class: classA, + ID: id, + Properties: map[string]interface{}{ + "text": "string 1", + }, + } + object2 := &models.Object{ + Class: classB, + ID: id, + Properties: map[string]interface{}{ + "text": "string 2", + }, + } + + // create objects + returnedFields := "ALL" + params := batch.NewBatchObjectsCreateParams().WithBody( + batch.BatchObjectsCreateBody{ + Objects: []*models.Object{object1, object2}, + Fields: []*string{&returnedFields}, + }) + + resp, err := helper.BatchClient(t).BatchObjectsCreate(params, nil) + + // ensure that the response is OK + helper.AssertRequestOk(t, resp, err, func() { + objectsCreateResponse := resp.Payload + + // check if the batch response contains two batched responses + assert.Equal(t, 2, len(objectsCreateResponse)) + + for _, elem := range resp.Payload { + assert.Nil(t, elem.Result.Errors) + } + }) + + { // "delete object from first class + params := objects.NewObjectsClassDeleteParams().WithClassName(classA).WithID(id) + resp, err := helper.Client(t).Objects.ObjectsClassDelete(params, nil) + if err != nil { + t.Errorf("cannot delete existing object err: %v", err) + } + assert.Equal(t, &objects.ObjectsClassDeleteNoContent{}, resp) + } + { // check if object still exit + params := objects.NewObjectsClassGetParams().WithClassName(classA).WithID(id) + _, err := helper.Client(t).Objects.ObjectsClassGet(params, nil) + werr := &objects.ObjectsClassGetNotFound{} + if !errors.As(err, &werr) { + t.Errorf("Get deleted object error got: %v want %v", err, werr) + } + } + { // object with a different class must exist + params := objects.NewObjectsClassGetParams().WithClassName(classB).WithID(id) + resp, err := helper.Client(t).Objects.ObjectsClassGet(params, nil) + if err != nil { + t.Errorf("object must exist err: %v", err) + } + if resp.Payload == nil { + t.Errorf("payload of an existing object cannot be empty") + } + } +} + +func TestPostReference(t *testing.T) { + t.Parallel() + var ( + cls = "TestObjectHTTPAddReference" + friend_cls = "TestObjectHTTPAddReferenceFriend" + mconfig = map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + } + ) + + // test setup + helper.DeleteClassObject(t, cls) + helper.DeleteClassObject(t, friend_cls) + helper.AssertCreateObjectClass(t, &models.Class{ + Class: friend_cls, + ModuleConfig: mconfig, + Properties: []*models.Property{}, + }) + defer helper.DeleteClassObject(t, friend_cls) + helper.AssertCreateObjectClass(t, &models.Class{ + Class: cls, + ModuleConfig: mconfig, + Properties: []*models.Property{ + { + Name: "number", + DataType: []string{"number"}, + }, + { + Name: "friend", + DataType: []string{friend_cls}, + }, + }, + }) + defer helper.DeleteClassObject(t, cls) + uuid := helper.AssertCreateObject(t, cls, map[string]interface{}{ + "number": 2.0, + }) + friendID := helper.AssertCreateObject(t, friend_cls, nil) + expected := map[string]interface{}{ + "number": json.Number("2"), + "friend": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s/%s", friend_cls, friendID), + "href": fmt.Sprintf("/v1/objects/%s/%s", friend_cls, friendID), + }, + }, + } + updateObj := crossref.NewLocalhost(friend_cls, friendID).SingleRef() + params := objects.NewObjectsClassReferencesCreateParams().WithClassName(cls) + params.WithID(uuid).WithBody(updateObj).WithPropertyName("friend") + resp, err := helper.Client(t).Objects.ObjectsClassReferencesCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + obj := helper.AssertGetObject(t, cls, uuid) + actual := obj.Properties.(map[string]interface{}) + assert.Equal(t, expected, actual) + + params.WithPropertyName("unknown") + _, err = helper.Client(t).Objects.ObjectsClassReferencesCreate(params, nil) + var targetErr *objects.ObjectsClassReferencesCreateUnprocessableEntity + if !errors.As(err, &targetErr) { + t.Errorf("error type expected: %T, got %T", objects.ObjectsClassReferencesCreateUnprocessableEntity{}, err) + } + + params.WithPropertyName("friend") + params.WithID("e7cd261a-0000-0000-0000-d7b8e7b5c9ea") + _, err = helper.Client(t).Objects.ObjectsClassReferencesCreate(params, nil) + var targetNotFoundErr *objects.ObjectsClassReferencesCreateNotFound + if !errors.As(err, &targetNotFoundErr) { + t.Errorf("error type expected: %T, got %T", objects.ObjectsClassReferencesCreateNotFound{}, err) + } +} + +func TestPutReferences(t *testing.T) { + t.Parallel() + var ( + cls = "TestObjectHTTPUpdateReferences" + first_friend = "TestObjectHTTPUpdateReferencesFriendFirst" + second_friend = "TestObjectHTTPUpdateReferencesFriendSecond" + mconfig = map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + } + ) + // test setup + helper.DeleteClassObject(t, first_friend) + helper.AssertCreateObjectClass(t, &models.Class{ + Class: first_friend, + ModuleConfig: mconfig, + Properties: []*models.Property{}, + }) + defer helper.DeleteClassObject(t, first_friend) + + helper.DeleteClassObject(t, second_friend) + helper.AssertCreateObjectClass(t, &models.Class{ + Class: second_friend, + ModuleConfig: mconfig, + Properties: []*models.Property{}, + }) + defer helper.DeleteClassObject(t, second_friend) + + helper.DeleteClassObject(t, cls) + helper.AssertCreateObjectClass(t, &models.Class{ + Class: cls, + ModuleConfig: mconfig, + Properties: []*models.Property{ + { + Name: "number", + DataType: []string{"number"}, + }, + { + Name: "friend", + DataType: []string{first_friend, second_friend}, + }, + }, + }) + defer helper.DeleteClassObject(t, cls) + + uuid := helper.AssertCreateObject(t, cls, map[string]interface{}{ + "number": 2.0, + }) + first_friendID := helper.AssertCreateObject(t, first_friend, nil) + second_friendID := helper.AssertCreateObject(t, second_friend, nil) + + expected := map[string]interface{}{ + "number": json.Number("2"), + "friend": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s/%s", first_friend, first_friendID), + "href": fmt.Sprintf("/v1/objects/%s/%s", first_friend, first_friendID), + }, + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s/%s", second_friend, second_friendID), + "href": fmt.Sprintf("/v1/objects/%s/%s", second_friend, second_friendID), + }, + }, + } + updateObj := models.MultipleRef{ + crossref.NewLocalhost(first_friend, first_friendID).SingleRef(), + crossref.NewLocalhost(second_friend, second_friendID).SingleRef(), + } + // add two references + params := objects.NewObjectsClassReferencesPutParams().WithClassName(cls) + params.WithID(uuid).WithBody(updateObj).WithPropertyName("friend") + resp, err := helper.Client(t).Objects.ObjectsClassReferencesPut(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + obj := helper.AssertGetObject(t, cls, uuid) + actual := obj.Properties.(map[string]interface{}) + assert.Equal(t, expected, actual) + + // exclude one reference + params.WithID(uuid).WithBody(updateObj[:1]).WithPropertyName("friend") + resp, err = helper.Client(t).Objects.ObjectsClassReferencesPut(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + obj = helper.AssertGetObject(t, cls, uuid) + actual = obj.Properties.(map[string]interface{}) + expected["friend"] = expected["friend"].([]interface{})[:1] + assert.Equal(t, expected, actual) + + params.WithPropertyName("unknown") + _, err = helper.Client(t).Objects.ObjectsClassReferencesPut(params, nil) + var expectedErr *objects.ObjectsClassReferencesPutUnprocessableEntity + if !errors.As(err, &expectedErr) { + t.Errorf("error type expected: %T, got %T", objects.ObjectsClassReferencesPutUnprocessableEntity{}, err) + } + params.WithPropertyName("friend") + + params.WithID("e7cd261a-0000-0000-0000-d7b8e7b5c9ea") + _, err = helper.Client(t).Objects.ObjectsClassReferencesPut(params, nil) + var expectedRefErr *objects.ObjectsClassReferencesPutNotFound + if !errors.As(err, &expectedRefErr) { + t.Errorf("error type expected: %T, got %T", objects.ObjectsClassReferencesPutNotFound{}, err) + } + params.WithID(uuid) + + // exclude all + params.WithBody(models.MultipleRef{}).WithPropertyName("friend") + resp, err = helper.Client(t).Objects.ObjectsClassReferencesPut(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + obj = helper.AssertGetObject(t, cls, uuid) + actual = obj.Properties.(map[string]interface{}) + expected["friend"] = expected["friend"].([]interface{})[1:] + assert.Equal(t, expected, actual) + + // bad request since body is required + params.WithID(uuid).WithBody(nil).WithPropertyName("friend") + _, err = helper.Client(t).Objects.ObjectsClassReferencesPut(params, nil) + var expectedErr2 *objects.ObjectsClassReferencesPutUnprocessableEntity + if !errors.As(err, &expectedErr2) { + t.Errorf("error type expected: %T, got %T", objects.ObjectsClassReferencesPutUnprocessableEntity{}, err) + } +} + +func TestDeleteReference(t *testing.T) { + t.Parallel() + var ( + cls = "TestObjectHTTPDeleteReference" + first_friend = "TestObjectHTTPDeleteReferenceFriendFirst" + second_friend = "TestObjectHTTPDeleteReferenceFriendSecond" + mconfig = map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + } + ) + // test setup + helper.DeleteClassObject(t, first_friend) + helper.AssertCreateObjectClass(t, &models.Class{ + Class: first_friend, + ModuleConfig: mconfig, + Properties: []*models.Property{}, + }) + defer helper.DeleteClassObject(t, first_friend) + + helper.DeleteClassObject(t, second_friend) + helper.AssertCreateObjectClass(t, &models.Class{ + Class: second_friend, + ModuleConfig: mconfig, + Properties: []*models.Property{}, + }) + defer helper.DeleteClassObject(t, second_friend) + + helper.DeleteClassObject(t, cls) + helper.AssertCreateObjectClass(t, &models.Class{ + Class: cls, + ModuleConfig: mconfig, + Properties: []*models.Property{ + { + Name: "number", + DataType: []string{"number"}, + }, + { + Name: "friend", + DataType: []string{first_friend, second_friend}, + }, + }, + }) + defer helper.DeleteClassObject(t, cls) + + first_friendID := helper.AssertCreateObject(t, first_friend, nil) + second_friendID := helper.AssertCreateObject(t, second_friend, nil) + uuid := helper.AssertCreateObject(t, cls, map[string]interface{}{ + "number": 2.0, + "friend": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s/%s", first_friend, first_friendID), + "href": fmt.Sprintf("/v1/objects/%s/%s", first_friend, first_friendID), + }, + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s/%s", second_friend, second_friendID), + "href": fmt.Sprintf("/v1/objects/%s/%s", second_friend, second_friendID), + }, + }, + }) + expected := map[string]interface{}{ + "number": json.Number("2"), + "friend": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s/%s", first_friend, first_friendID), + "href": fmt.Sprintf("/v1/objects/%s/%s", first_friend, first_friendID), + }, + }, + } + + updateObj := crossref.NewLocalhost(second_friend, second_friendID).SingleRef() + // delete second reference + params := objects.NewObjectsClassReferencesDeleteParams().WithClassName(cls) + params.WithID(uuid).WithBody(updateObj).WithPropertyName("friend") + resp, err := helper.Client(t).Objects.ObjectsClassReferencesDelete(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + obj := helper.AssertGetObject(t, cls, uuid) + actual := obj.Properties.(map[string]interface{}) + assert.Equal(t, expected, actual) + + // delete same reference again + resp, err = helper.Client(t).Objects.ObjectsClassReferencesDelete(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + obj = helper.AssertGetObject(t, cls, uuid) + actual = obj.Properties.(map[string]interface{}) + assert.Equal(t, expected, actual) + + // delete last reference + expected = map[string]interface{}{ + "number": json.Number("2"), + "friend": []interface{}{}, + } + updateObj = crossref.NewLocalhost(first_friend, first_friendID).SingleRef() + params.WithID(uuid).WithBody(updateObj).WithPropertyName("friend") + resp, err = helper.Client(t).Objects.ObjectsClassReferencesDelete(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + obj = helper.AssertGetObject(t, cls, uuid) + actual = obj.Properties.(map[string]interface{}) + assert.Equal(t, expected, actual) + + // property is not part of the schema + params.WithPropertyName("unknown") + _, err = helper.Client(t).Objects.ObjectsClassReferencesDelete(params, nil) + var deleteUnprocessableEntityErr *objects.ObjectsClassReferencesDeleteUnprocessableEntity + if !errors.As(err, &deleteUnprocessableEntityErr) { + t.Errorf("error type expected: %T, got %T", objects.ObjectsClassReferencesDeleteUnprocessableEntity{}, err) + } + params.WithPropertyName("friend") + + // This ID doesn't exist + params.WithID("e7cd261a-0000-0000-0000-d7b8e7b5c9ea") + _, err = helper.Client(t).Objects.ObjectsClassReferencesDelete(params, nil) + var deleteNotFoundErr *objects.ObjectsClassReferencesDeleteNotFound + if !errors.As(err, &deleteNotFoundErr) { + t.Errorf("error type expected: %T, got %T", *deleteNotFoundErr, err) + } + params.WithID(uuid) + + // bad request since body is required + params.WithID(uuid).WithBody(nil).WithPropertyName("friend") + _, err = helper.Client(t).Objects.ObjectsClassReferencesDelete(params, nil) + if !errors.As(err, &deleteUnprocessableEntityErr) { + t.Errorf("error type expected: %T, got %T", *deleteUnprocessableEntityErr, err) + } +} + +func TestQuery(t *testing.T) { + t.Parallel() + var ( + cls = "TestObjectHTTPQuery" + first_friend = "TestObjectHTTPQueryFriend" + ) + // test setup + helper.DeleteClassObject(t, cls) + helper.DeleteClassObject(t, first_friend) + + helper.AssertCreateObject(t, first_friend, map[string]interface{}{}) + defer helper.DeleteClassObject(t, first_friend) + helper.AssertCreateObjectClass(t, &models.Class{ + Class: cls, + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "count", + DataType: []string{"int"}, + }, + }, + }) + defer helper.DeleteClassObject(t, cls) + helper.AssertCreateObject(t, cls, map[string]interface{}{"count": 1}) + helper.AssertCreateObject(t, cls, map[string]interface{}{"count": 1}) + + listParams := objects.NewObjectsListParams() + listParams.Class = &cls + resp, err := helper.Client(t).Objects.ObjectsList(listParams, nil) + require.Nil(t, err, "unexpected error", resp) + + if n := len(resp.Payload.Objects); n != 2 { + t.Errorf("Number of object got:%v want %v", n, 2) + } + var count int64 + for _, x := range resp.Payload.Objects { + if x.Class != cls { + t.Errorf("Class got:%v want:%v", x.Class, cls) + } + m, ok := x.Properties.(map[string]interface{}) + if !ok { + t.Error("wrong property type") + } + n, _ := m["count"].(json.Number).Int64() + count += n + } + if count != 2 { + t.Errorf("Count got:%v want:%v", count, 2) + } + + listParams.Class = &first_friend + resp, err = helper.Client(t).Objects.ObjectsList(listParams, nil) + require.Nil(t, err, "unexpected error", resp) + if n := len(resp.Payload.Objects); n != 1 { + t.Errorf("Number of friend objects got:%v want %v", n, 2) + } + + unknown_cls := "unknow" + listParams.Class = &unknown_cls + _, err = helper.Client(t).Objects.ObjectsList(listParams, nil) + var customErr *objects.ObjectsListNotFound + if !errors.As(err, &customErr) { + t.Errorf("error type expected: %T, got %T", *customErr, err) + } +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/actions/setup_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/actions/setup_test.go new file mode 100644 index 0000000000000000000000000000000000000000..74f423d3049ccd643d0158f50e63968138b0a8e8 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/actions/setup_test.go @@ -0,0 +1,192 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "encoding/json" + "fmt" + "net/http" + "testing" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/nodes" + clschema "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/storagestate" + "github.com/weaviate/weaviate/entities/verbosity" + "github.com/weaviate/weaviate/test/helper" + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" +) + +func Test_Objects(t *testing.T) { + t.Run("setup", func(t *testing.T) { + helper.AssertCreateObjectClass(t, &models.Class{ + Class: "ObjectTestThing", + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + Properties: []*models.Property{ + { + Name: "testString", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + }, + }) + helper.AssertCreateObjectClass(t, &models.Class{ + Class: "TestObject", + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + Properties: []*models.Property{ + { + Name: "testString", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + { + Name: "testWholeNumber", + DataType: []string{"int"}, + }, + { + Name: "testNumber", + DataType: []string{"number"}, + }, + { + Name: "testDateTime", + DataType: []string{"date"}, + }, + { + Name: "testTrueFalse", + DataType: []string{"boolean"}, + }, + { + Name: "testReference", + DataType: []string{"ObjectTestThing"}, + }, + }, + }) + helper.AssertCreateObjectClass(t, &models.Class{ + Class: "TestObjectTwo", + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + Properties: []*models.Property{ + { + Name: "testReference", + DataType: []string{"TestObject"}, + }, + { + Name: "testReferences", + DataType: []string{"TestObject"}, + }, + { + Name: "testString", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + }, + }) + }) + + // tests + t.Run("adding objects", addingObjects) + t.Run("removing objects", removingObjects) + t.Run("object references", objectReferences) + t.Run("updating objects deprecated", updateObjectsDeprecated) + + // tear down + helper.AssertDeleteObjectClass(t, "ObjectTestThing") + helper.AssertDeleteObjectClass(t, "TestObject") + helper.AssertDeleteObjectClass(t, "TestObjectTwo") +} + +func Test_Delete_ReadOnly_Classes(t *testing.T) { + className := "DeleteReadonlyClassTest" + + t.Run("setup", func(t *testing.T) { + helper.AssertCreateObjectClass(t, &models.Class{ + Class: className, + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + Properties: []*models.Property{ + { + Name: "stringProp", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + }, + }) + + batchSize := 1000 + batch := make([]*models.Object, batchSize) + for i := 0; i < batchSize; i++ { + batch[i] = &models.Object{ + Class: className, + Properties: map[string]interface{}{ + "stringProp": fmt.Sprintf("obj#%d", i+1), + }, + } + } + helper.CreateObjectsBatch(t, batch) + }) + + t.Run("assert data exists", func(t *testing.T) { + res := graphqlhelper.AssertGraphQL(t, helper.RootAuth, + fmt.Sprintf("{Aggregate {%s {meta {count}}}}", className)) + count := res.Get("Aggregate", className).AsSlice()[0].(map[string]interface{})["meta"].(map[string]interface{})["count"] + require.EqualValues(t, json.Number("1000"), count) + }) + + t.Run("set shard to readonly", func(t *testing.T) { + verbose := verbosity.OutputVerbose + nodesResp, err := helper.Client(t).Nodes.NodesGet(nodes.NewNodesGetParams().WithOutput(&verbose), nil) + require.Nil(t, err) + require.NotNil(t, nodesResp.Payload) + require.Len(t, nodesResp.Payload.Nodes, 1) + require.Len(t, nodesResp.Payload.Nodes[0].Shards, 1) + require.Equal(t, className, nodesResp.Payload.Nodes[0].Shards[0].Class) + targetShard := nodesResp.Payload.Nodes[0].Shards[0].Name + + params := clschema.NewSchemaObjectsShardsUpdateParams(). + WithBody(&models.ShardStatus{Status: storagestate.StatusReadOnly.String()}). + WithClassName(className). + WithShardName(targetShard) + shardsResp, err := helper.Client(t).Schema.SchemaObjectsShardsUpdate(params, nil) + require.Nil(t, err) + require.NotNil(t, shardsResp.Payload) + require.Equal(t, storagestate.StatusReadOnly.String(), shardsResp.Payload.Status) + }) + + t.Run("delete class with readonly shard", func(t *testing.T) { + params := clschema.NewSchemaObjectsDeleteParams().WithClassName(className) + resp, err := helper.Client(t).Schema.SchemaObjectsDelete(params, nil) + require.Nil(t, err) + require.True(t, resp.IsCode(http.StatusOK)) + }) + + t.Run("assert class is deleted", func(t *testing.T) { + params := clschema.NewSchemaObjectsGetParams().WithClassName(className) + _, err := helper.Client(t).Schema.SchemaObjectsGet(params, nil) + require.Equal(t, &clschema.SchemaObjectsGetNotFound{}, err) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/actions/update_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/actions/update_test.go new file mode 100644 index 0000000000000000000000000000000000000000..d5eea5d833d2cfb8a91b6366352af35350c98a6c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/actions/update_test.go @@ -0,0 +1,165 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +// Acceptance tests for objects + +import ( + "encoding/json" + "fmt" + "testing" + + "github.com/davecgh/go-spew/spew" + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +// run from setup_test.go +func updateObjectsDeprecated(t *testing.T) { + t.Run("update and set number", func(t *testing.T) { + uuid := helper.AssertCreateObject(t, "TestObject", map[string]interface{}{}) + helper.AssertGetObjectEventually(t, "TestObject", uuid) + + schema := models.PropertySchema(map[string]interface{}{ + "testNumber": 41.0, + }) + + update := models.Object{} + update.Properties = schema + update.Class = "TestObject" + update.ID = uuid + + params := objects.NewObjectsUpdateParams().WithID(uuid).WithBody(&update) + updateResp, err := helper.Client(t).Objects.ObjectsUpdate(params, nil) + helper.AssertRequestOk(t, updateResp, err, nil) + + actualThunk := func() interface{} { + updatedObject := helper.AssertGetObject(t, update.Class, uuid) + updatedSchema := updatedObject.Properties.(map[string]interface{}) + if updatedSchema["testNumber"] == nil { + return nil + } + num, _ := updatedSchema["testNumber"].(json.Number).Float64() + return num + } + helper.AssertEventuallyEqual(t, 41.0, actualThunk) + }) + + t.Run("update and set string", func(t *testing.T) { + uuid := helper.AssertCreateObject(t, "TestObject", map[string]interface{}{}) + helper.AssertGetObjectEventually(t, "TestObject", uuid) + + schema := models.PropertySchema(map[string]interface{}{ + "testString": "wibbly wobbly", + }) + + update := models.Object{} + update.Properties = schema + update.Class = "TestObject" + update.ID = uuid + + params := objects.NewObjectsUpdateParams().WithID(uuid).WithBody(&update) + updateResp, err := helper.Client(t).Objects.ObjectsUpdate(params, nil) + helper.AssertRequestOk(t, updateResp, err, nil) + + actualThunk := func() interface{} { + updatedObject := helper.AssertGetObject(t, update.Class, uuid) + updatedSchema := updatedObject.Properties.(map[string]interface{}) + return updatedSchema["testString"] + } + helper.AssertEventuallyEqual(t, "wibbly wobbly", actualThunk) + }) + + t.Run("update and set bool", func(t *testing.T) { + t.Parallel() + uuid := helper.AssertCreateObject(t, "TestObject", map[string]interface{}{}) + helper.AssertGetObjectEventually(t, "TestObject", uuid) + + schema := models.PropertySchema(map[string]interface{}{ + "testTrueFalse": true, + }) + + update := models.Object{} + update.Properties = schema + update.Class = "TestObject" + update.ID = uuid + + params := objects.NewObjectsUpdateParams().WithID(uuid).WithBody(&update) + updateResp, err := helper.Client(t).Objects.ObjectsUpdate(params, nil) + + helper.AssertRequestOk(t, updateResp, err, nil) + + actualThunk := func() interface{} { + updatedObject := helper.AssertGetObject(t, update.Class, uuid) + updatedSchema := updatedObject.Properties.(map[string]interface{}) + return updatedSchema["testTrueFalse"] + } + helper.AssertEventuallyEqual(t, true, actualThunk) + }) + + t.Run("can patch object with cref", func(t *testing.T) { + thingToRefID := helper.AssertCreateObject(t, "ObjectTestThing", nil) + helper.AssertGetObjectEventually(t, "ObjectTestThing", thingToRefID) + objectID := helper.AssertCreateObject(t, "TestObject", nil) + helper.AssertGetObjectEventually(t, "TestObject", objectID) + + merge := &models.Object{ + Class: "TestObject", + Properties: map[string]interface{}{ + "testReference": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s", thingToRefID), + }, + }, + }, + } + + // Now to try to link + params := objects.NewObjectsPatchParams(). + WithBody(merge). + WithID(objectID) + patchResp, err := helper.Client(t).Objects.ObjectsPatch(params, nil) + spew.Dump(err) + helper.AssertRequestOk(t, patchResp, err, nil) + + actualThunk := func() interface{} { + patchedObject := helper.AssertGetObject(t, merge.Class, objectID) + + rawRef, ok := patchedObject.Properties.(map[string]interface{})["testReference"] + if !ok { + return nil + } + + refsSlice, ok := rawRef.([]interface{}) + if !ok { + t.Logf("found the ref prop, but it was not a slice, but %T", refsSlice) + t.Fail() + } + + if len(refsSlice) != 1 { + t.Logf("expected ref slice to have one element, but got: %d", len(refsSlice)) + t.Fail() + } + + refMap, ok := refsSlice[0].(map[string]interface{}) + if !ok { + t.Logf("found the ref element, but it was not a map, but %T", refsSlice[0]) + t.Fail() + } + + return refMap["beacon"] + } + + helper.AssertEventuallyEqual(t, fmt.Sprintf("weaviate://localhost/ObjectTestThing/%s", thingToRefID), actualThunk) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/aliases/aliases_api_backup_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/aliases/aliases_api_backup_test.go new file mode 100644 index 0000000000000000000000000000000000000000..2163651357c77cde69c54dbed2a14f6c6d2ff8c7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/aliases/aliases_api_backup_test.go @@ -0,0 +1,166 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "context" + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/books" +) + +func Test_AliasesAPI_Backup(t *testing.T) { + ctx := context.Background() + compose, err := docker.New(). + WithBackendFilesystem(). + WithWeaviate(). + WithText2VecModel2Vec(). + Start(ctx) + require.NoError(t, err) + defer func() { + require.NoError(t, compose.Terminate(ctx)) + }() + + defer helper.SetupClient(fmt.Sprintf("%s:%s", helper.ServerHost, helper.ServerPort)) + helper.SetupClient(compose.GetWeaviate().URI()) + + // Three options for the test: + // 1. full: backup and restore collection after deleting both collection and the alias, will pass as of 1.32 + // 2. overwrite: backup and restore after deleting the collection but not the alias, should pass only if "overwrite option is set" + for _, options := range []string{"full", "overwrite"} { + t.Run("backup with "+options, func(t *testing.T) { + t.Run("create schema", func(t *testing.T) { + t.Run("Books", func(t *testing.T) { + booksClass := books.ClassModel2VecVectorizer() + helper.CreateClass(t, booksClass) + for _, book := range books.Objects() { + helper.CreateObject(t, book) + helper.AssertGetObjectEventually(t, book.Class, book.ID) + } + }) + }) + + var aliases []string + t.Run("create aliases", func(t *testing.T) { + tests := []struct { + name string + alias *models.Alias + }{ + { + name: books.DefaultClassName, + alias: &models.Alias{Alias: "BookAlias", Class: books.DefaultClassName}, + }, + { + name: books.DefaultClassName, + alias: &models.Alias{Alias: "BookAliasToBeDeleted", Class: books.DefaultClassName}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + helper.CreateAlias(t, tt.alias) + resp := helper.GetAliases(t, &tt.alias.Class) + require.NotNil(t, resp) + require.NotEmpty(t, resp.Aliases) + aliasCreated := false + for _, alias := range resp.Aliases { + if tt.alias.Alias == alias.Alias && tt.alias.Class == alias.Class { + aliasCreated = true + } + } + assert.True(t, aliasCreated) + aliases = append(aliases, tt.alias.Alias) + }) + } + }) + + defer func() { + resp := helper.GetAliases(t, nil) + require.NotNil(t, resp) + for _, alias := range resp.Aliases { + helper.DeleteAlias(t, alias.Alias) + } + helper.DeleteClass(t, books.DefaultClassName) + }() + + t.Run("delete alias", func(t *testing.T) { + checkAliasesCount := func(t *testing.T, count int) { + resp := helper.GetAliases(t, nil) + require.NotNil(t, resp) + require.NotEmpty(t, resp.Aliases) + require.Equal(t, count, len(resp.Aliases)) + } + checkAliasesCount(t, 2) + helper.DeleteAlias(t, "BookAliasToBeDeleted") + checkAliasesCount(t, 1) + }) + + backend := "filesystem" + backupID := options + "-backup-id" + + t.Run("backup with local filesystem backend", func(t *testing.T) { + backupResp, err := helper.CreateBackup(t, helper.DefaultBackupConfig(), books.DefaultClassName, backend, backupID) + assert.Nil(t, err) + assert.NotNil(t, backupResp) + helper.ExpectBackupEventuallyCreated(t, backupID, backend, nil, helper.WithPollInterval(helper.MinPollInterval), helper.WithDeadline(helper.MaxDeadline)) + }) + + t.Run("delete collection", func(t *testing.T) { + helper.DeleteClass(t, books.DefaultClassName) + }) + + if options != "overwrite" { + t.Run("delete alias", func(t *testing.T) { + helper.DeleteAlias(t, "BookAlias") + }) + + t.Run("check alias count after deletion", func(t *testing.T) { + resp := helper.GetAliases(t, nil) + require.NotNil(t, resp) + require.Empty(t, resp.Aliases) + }) + } + + t.Run("restore with local filesystem backend", func(t *testing.T) { + var overwriteAlias bool + if options == "overwrite" { + overwriteAlias = true + } + + restoreResp, err := helper.RestoreBackup(t, helper.DefaultRestoreConfig(), books.DefaultClassName, backend, backupID, map[string]string{}, overwriteAlias) + assert.Nil(t, err) + assert.NotNil(t, restoreResp) + helper.ExpectBackupEventuallyRestored(t, backupID, backend, nil, helper.WithPollInterval(helper.MinPollInterval), helper.WithDeadline(helper.MaxDeadline)) + }) + + t.Run("check class after restore", func(t *testing.T) { + resp := helper.GetClass(t, books.DefaultClassName) + require.NotNil(t, resp) + }) + + t.Run("check alias count after restore", func(t *testing.T) { + checkAliasesCount := func(t *testing.T, count int) { + resp := helper.GetAliases(t, nil) + require.NotNil(t, resp) + require.NotEmpty(t, resp.Aliases) + require.Equal(t, count, len(resp.Aliases)) + } + checkAliasesCount(t, 1) + }) + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/aliases/aliases_api_grpc_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/aliases/aliases_api_grpc_test.go new file mode 100644 index 0000000000000000000000000000000000000000..8eecc8c33caa273e00cf0a4654f3d0291f60fe8f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/aliases/aliases_api_grpc_test.go @@ -0,0 +1,310 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "context" + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/filters" + "github.com/weaviate/weaviate/entities/models" + pb "github.com/weaviate/weaviate/grpc/generated/protocol/v1" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/books" + "google.golang.org/grpc" + "google.golang.org/protobuf/types/known/structpb" +) + +func Test_AliasesAPI_gRPC(t *testing.T) { + ctx := context.Background() + compose, err := docker.New(). + WithWeaviateWithGRPC(). + WithText2VecModel2Vec(). + Start(ctx) + require.NoError(t, err) + defer func() { + require.NoError(t, compose.Terminate(ctx)) + }() + + gRPCClient := func(t *testing.T, addr string) (pb.WeaviateClient, *grpc.ClientConn) { + conn, err := helper.CreateGrpcConnectionClient(addr) + require.NoError(t, err) + require.NotNil(t, conn) + grpcClient := helper.CreateGrpcWeaviateClient(conn) + require.NotNil(t, grpcClient) + return grpcClient, conn + } + + defer helper.SetupClient(fmt.Sprintf("%s:%s", helper.ServerHost, helper.ServerPort)) + + helper.SetupClient(compose.GetWeaviate().URI()) + grpcClient, _ := gRPCClient(t, compose.GetWeaviate().GrpcURI()) + require.NotNil(t, gRPCClient) + + booksAliasName := "BooksAlias" + + t.Run("create schema", func(t *testing.T) { + booksClass := books.ClassModel2VecVectorizer() + helper.CreateClass(t, booksClass) + for _, book := range books.Objects() { + helper.CreateObject(t, book) + helper.AssertGetObjectEventually(t, book.Class, book.ID) + } + }) + + t.Run("create alias", func(t *testing.T) { + alias := &models.Alias{Alias: booksAliasName, Class: books.DefaultClassName} + helper.CreateAlias(t, alias) + resp := helper.GetAliases(t, &alias.Class) + require.NotNil(t, resp) + require.NotEmpty(t, resp.Aliases) + }) + + assertTargetCollectionName := func(res []*pb.SearchResult, collection string) { + for _, r := range res { + require.NotNil(t, r.GetProperties()) + assert.Equal(t, collection, r.GetProperties().GetTargetCollection()) + } + } + + tests := []struct { + name string + collection string + accessUsing string + }{ + { + name: "search using collection name", + collection: books.DefaultClassName, + accessUsing: books.DefaultClassName, + }, + { + name: "search using alias", + collection: books.DefaultClassName, + accessUsing: booksAliasName, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Run("search", func(t *testing.T) { + t.Run("get", func(t *testing.T) { + resp, err := grpcClient.Search(ctx, &pb.SearchRequest{ + Collection: tt.accessUsing, + Uses_123Api: true, + Uses_125Api: true, + Uses_127Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + assert.Len(t, resp.Results, 3) + assertTargetCollectionName(resp.Results, tt.collection) + }) + t.Run("get with filters", func(t *testing.T) { + resp, err := grpcClient.Search(ctx, &pb.SearchRequest{ + Collection: tt.accessUsing, + Metadata: &pb.MetadataRequest{Vector: true, Uuid: true}, + Filters: &pb.Filters{ + Operator: pb.Filters_OPERATOR_EQUAL, + On: []string{"title"}, + TestValue: &pb.Filters_ValueText{ValueText: "Dune"}, + }, + Uses_123Api: true, + Uses_125Api: true, + Uses_127Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + assert.Len(t, resp.Results, 1) + assert.Equal(t, resp.Results[0].Metadata.Id, books.Dune.String()) + assert.NotEmpty(t, resp.Results[0].Metadata.GetVectorBytes()) + assertTargetCollectionName(resp.Results, tt.collection) + }) + t.Run("nearText", func(t *testing.T) { + resp, err := grpcClient.Search(ctx, &pb.SearchRequest{ + Collection: tt.accessUsing, + Metadata: &pb.MetadataRequest{Uuid: true}, + NearText: &pb.NearTextSearch{ + Query: []string{"Dune"}, + }, + Uses_123Api: true, + Uses_125Api: true, + Uses_127Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + assert.Len(t, resp.Results, 3) + assert.Equal(t, resp.Results[0].Metadata.Id, books.Dune.String()) + assertTargetCollectionName(resp.Results, tt.collection) + }) + t.Run("bm25", func(t *testing.T) { + resp, err := grpcClient.Search(ctx, &pb.SearchRequest{ + Collection: tt.accessUsing, + Metadata: &pb.MetadataRequest{Uuid: true}, + Bm25Search: &pb.BM25{ + Query: "Dune", + Properties: []string{"title"}, + }, + Uses_123Api: true, + Uses_125Api: true, + Uses_127Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + assert.Len(t, resp.Results, 1) + assert.Equal(t, resp.Results[0].Metadata.Id, books.Dune.String()) + assertTargetCollectionName(resp.Results, tt.collection) + }) + t.Run("hybrid", func(t *testing.T) { + resp, err := grpcClient.Search(ctx, &pb.SearchRequest{ + Collection: tt.accessUsing, + Metadata: &pb.MetadataRequest{Uuid: true}, + HybridSearch: &pb.Hybrid{ + Query: "Project", + Alpha: 0.75, + }, + Properties: &pb.PropertiesRequest{ + NonRefProperties: []string{"title"}, + }, + Uses_123Api: true, + Uses_125Api: true, + Uses_127Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + assert.Len(t, resp.Results, 3) + assert.Equal(t, resp.Results[0].Metadata.Id, books.ProjectHailMary.String()) + assertTargetCollectionName(resp.Results, tt.collection) + }) + }) + t.Run("aggregate using alias", func(t *testing.T) { + t.Run("count", func(t *testing.T) { + resp, err := grpcClient.Aggregate(ctx, &pb.AggregateRequest{ + Collection: tt.accessUsing, + ObjectsCount: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.GetSingleResult()) + require.Equal(t, int64(3), resp.GetSingleResult().GetObjectsCount()) + }) + t.Run("count with filters", func(t *testing.T) { + resp, err := grpcClient.Aggregate(ctx, &pb.AggregateRequest{ + Collection: tt.accessUsing, + Filters: &pb.Filters{ + Operator: pb.Filters_OPERATOR_EQUAL, + On: []string{"title"}, + TestValue: &pb.Filters_ValueText{ValueText: "Dune"}, + }, + ObjectsCount: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.GetSingleResult()) + require.Equal(t, int64(1), resp.GetSingleResult().GetObjectsCount()) + }) + t.Run("count with nearText", func(t *testing.T) { + certainty := float64(0.8) + resp, err := grpcClient.Aggregate(ctx, &pb.AggregateRequest{ + Collection: tt.accessUsing, + Filters: &pb.Filters{ + Operator: pb.Filters_OPERATOR_EQUAL, + On: []string{"title"}, + TestValue: &pb.Filters_ValueText{ValueText: "Dune"}, + }, + Search: &pb.AggregateRequest_NearText{ + NearText: &pb.NearTextSearch{ + Query: []string{"Dune"}, + Certainty: &certainty, + }, + }, + ObjectsCount: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.GetSingleResult()) + require.Equal(t, int64(1), resp.GetSingleResult().GetObjectsCount()) + }) + }) + }) + } + + t.Run("batch insert using alias", func(t *testing.T) { + theMartian := "67b79643-cf8b-4b22-b206-000000000001" + resp, err := grpcClient.BatchObjects(ctx, &pb.BatchObjectsRequest{ + Objects: []*pb.BatchObject{ + { + Collection: booksAliasName, + Uuid: theMartian, + Properties: &pb.BatchObject_Properties{ + NonRefProperties: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "title": structpb.NewStringValue("The Martian"), + "description": structpb.NewStringValue("Stranded on Mars after a dust storm forces his crew to evacuate, astronaut Mark Watney is presumed dead and left alone on the hostile planet."), + }, + }, + }, + }, + }, + }) + require.NoError(t, err) + require.NotNil(t, resp) + + tests := []struct { + name string + collection string + }{ + { + name: "search using collection name", + collection: books.DefaultClassName, + }, + { + name: "search using alias", + collection: booksAliasName, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Run("count", func(t *testing.T) { + resp, err := grpcClient.Aggregate(ctx, &pb.AggregateRequest{ + Collection: tt.collection, + ObjectsCount: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.GetSingleResult()) + require.Equal(t, int64(4), resp.GetSingleResult().GetObjectsCount()) + }) + t.Run("search using id", func(t *testing.T) { + resp, err := grpcClient.Search(ctx, &pb.SearchRequest{ + Collection: tt.collection, + Metadata: &pb.MetadataRequest{Vector: true, Uuid: true}, + Filters: &pb.Filters{ + Operator: pb.Filters_OPERATOR_EQUAL, + On: []string{filters.InternalPropID}, + TestValue: &pb.Filters_ValueText{ValueText: theMartian}, + }, + Uses_123Api: true, + Uses_125Api: true, + Uses_127Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.Len(t, resp.Results, 1) + assert.Equal(t, theMartian, resp.Results[0].Metadata.Id) + }) + }) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/aliases/aliases_api_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/aliases/aliases_api_test.go new file mode 100644 index 0000000000000000000000000000000000000000..3dc6a8e936981c61c2ec02575034423803de233d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/aliases/aliases_api_test.go @@ -0,0 +1,693 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "testing" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/cluster/router/types" + "github.com/weaviate/weaviate/entities/models" + entschema "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" + "github.com/weaviate/weaviate/test/helper/sample-schema/books" + "github.com/weaviate/weaviate/test/helper/sample-schema/documents" +) + +func Test_AliasesAPI(t *testing.T) { + ctx := context.Background() + compose, err := docker.New(). + WithWeaviate(). + WithText2VecModel2Vec(). + Start(ctx) + require.NoError(t, err) + defer func() { + require.NoError(t, compose.Terminate(ctx)) + }() + + defer helper.SetupClient(fmt.Sprintf("%s:%s", helper.ServerHost, helper.ServerPort)) + helper.SetupClient(compose.GetWeaviate().URI()) + + t.Run("create schema", func(t *testing.T) { + t.Run("Books", func(t *testing.T) { + booksClass := books.ClassModel2VecVectorizer() + helper.CreateClass(t, booksClass) + for _, book := range books.Objects() { + helper.CreateObject(t, book) + helper.AssertGetObjectEventually(t, book.Class, book.ID) + } + }) + t.Run("Document and Passage", func(t *testing.T) { + docsClasses := documents.ClassesModel2VecVectorizer(false) + helper.CreateClass(t, docsClasses[0]) + helper.CreateClass(t, docsClasses[1]) + for _, doc := range documents.Objects() { + helper.CreateObject(t, doc) + helper.AssertGetObjectEventually(t, doc.Class, doc.ID) + } + }) + }) + + var aliases []string + t.Run("create aliases", func(t *testing.T) { + tests := []struct { + name string + alias *models.Alias + }{ + { + name: books.DefaultClassName, + alias: &models.Alias{Alias: "BookAlias", Class: books.DefaultClassName}, + }, + { + name: documents.Document, + alias: &models.Alias{Alias: "DocumentAlias", Class: documents.Document}, + }, + { + name: documents.Document, + alias: &models.Alias{Alias: "PassageAlias", Class: documents.Document}, + }, + { + name: documents.Passage, + alias: &models.Alias{Alias: "PassageAlias1", Class: documents.Passage}, + }, + { + name: documents.Passage, + alias: &models.Alias{Alias: "PassageAlias2", Class: documents.Passage}, + }, + { + name: documents.Passage, + alias: &models.Alias{Alias: "PassageAlias3", Class: documents.Passage}, + }, + { + name: documents.Passage, + alias: &models.Alias{Alias: "AliasThatWillBeReplaced", Class: documents.Passage}, + }, + { + name: "create with different case", + // passing in `aliasThatCreated` but should transform into `AliasThatCreated`. + alias: &models.Alias{Alias: "aliasThatCreated", Class: documents.Passage}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + helper.CreateAlias(t, tt.alias) + resp := helper.GetAliases(t, &tt.alias.Class) + require.NotNil(t, resp) + require.NotEmpty(t, resp.Aliases) + aliasCreated := false + for _, alias := range resp.Aliases { + expAlias := entschema.UppercaseClassName(tt.alias.Alias) + expClass := entschema.UppercaseClassName(tt.alias.Class) + if expAlias == alias.Alias && expClass == alias.Class { + aliasCreated = true + } + } + assert.True(t, aliasCreated) + aliases = append(aliases, tt.alias.Alias) + }) + } + }) + + defer func() { + resp := helper.GetAliases(t, nil) + require.NotNil(t, resp) + for _, alias := range resp.Aliases { + helper.DeleteAlias(t, alias.Alias) + } + helper.DeleteClass(t, books.DefaultClassName) + helper.DeleteClass(t, documents.Passage) + helper.DeleteClass(t, documents.Document) + }() + + t.Run("create alias with invalid char", func(t *testing.T) { + cases := []struct { + name string + input string + }{ + {name: "symbols1", input: "invalid_alias_!#"}, + {name: "symbols2", input: "invalid_alias_@"}, + {name: "symbols3", input: "!invalid_alias_@"}, + {name: "symbols4", input: "#invalid_alias_*"}, + {name: "empty", input: ""}, + {name: "maxlength", input: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"}, // more than max 255 chars + } + + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + alias := &models.Alias{Class: books.DefaultClassName, Alias: "valid_alias_!#"} + p := schema.NewAliasesCreateParams().WithBody(alias) + resp, err := helper.Client(t).Schema.AliasesCreate(p, nil) + require.Error(t, err) + assert.Nil(t, resp) + var cerr *schema.AliasesCreateUnprocessableEntity + ok := errors.As(err, &cerr) // convert to concrete error type + assert.True(t, ok) + assert.Contains(t, cerr.Payload.Error[0].Message, "is not a valid alias name") + }) + } + }) + + t.Run("get aliases", func(t *testing.T) { + resp := helper.GetAliases(t, nil) + require.NotNil(t, resp) + require.NotEmpty(t, resp.Aliases) + require.Equal(t, 8, len(resp.Aliases)) + }) + + t.Run("get alias", func(t *testing.T) { + resp := helper.GetAlias(t, "BookAlias") + require.NotNil(t, resp) + require.Equal(t, "BookAlias", resp.Alias) + }) + + t.Run("get alias different case", func(t *testing.T) { + resp := helper.GetAlias(t, "bookAlias") // first letter is different case. + require.NotNil(t, resp) + require.Equal(t, "BookAlias", resp.Alias) + }) + + t.Run("get alias not found", func(t *testing.T) { + resp := helper.GetAliasNotFound(t, "AliasThatDoesntExist") + require.Nil(t, resp) + }) + + t.Run("replace alias", func(t *testing.T) { + checkAlias := func(t *testing.T, aliasName, expectedClass string) { + resp := helper.GetAlias(t, aliasName) + require.NotNil(t, resp) + require.Equal(t, aliasName, resp.Alias) + require.Equal(t, expectedClass, resp.Class) + } + aliasName := "AliasThatWillBeReplaced" + checkAlias(t, aliasName, documents.Passage) + helper.UpdateAlias(t, aliasName, documents.Document) + checkAlias(t, aliasName, documents.Document) + }) + + t.Run("replace alias different case", func(t *testing.T) { + checkAlias := func(t *testing.T, aliasName, expectedClass string) { + resp := helper.GetAlias(t, aliasName) + require.NotNil(t, resp) + require.Equal(t, aliasName, resp.Alias) + require.Equal(t, expectedClass, resp.Class) + } + aliasName := "AliasThatWillBeReplaced" + dAliasName := "aliasThatWillBeReplaced" // same with first lower case + checkAlias(t, aliasName, documents.Document) + helper.UpdateAlias(t, dAliasName, documents.Passage) + checkAlias(t, aliasName, documents.Passage) + }) + + t.Run("replace non existing alias", func(t *testing.T) { + checkAlias := func(t *testing.T, aliasName, expectedClass string) { + resp := helper.GetAliasNotFound(t, aliasName) + require.Nil(t, resp) + } + aliasName := "AliasThatDoesntExist" + checkAlias(t, aliasName, documents.Document) + resp, err := helper.UpdateAliasWithReturn(t, aliasName, documents.Document) + require.Error(t, err) + require.Nil(t, resp) + checkAlias(t, aliasName, documents.Document) + }) + + t.Run("replace alias of non existing collection", func(t *testing.T) { + checkAlias := func(t *testing.T, aliasName, expectedClass string) { + resp := helper.GetAlias(t, aliasName) + require.NotNil(t, resp) + require.Equal(t, aliasName, resp.Alias) + require.Equal(t, expectedClass, resp.Class) + } + aliasName := "AliasThatWillBeReplaced" + checkAlias(t, aliasName, documents.Passage) + resp, err := helper.UpdateAliasWithReturn(t, aliasName, "errorCollection") + require.Error(t, err) + require.Nil(t, resp) + }) + + t.Run("try to use updateAlias with existing collection name", func(t *testing.T) { + resp, err := helper.UpdateAliasWithReturn(t, documents.Document, documents.Passage) + require.Error(t, err) + require.Nil(t, resp) + }) + + t.Run("delete alias", func(t *testing.T) { + checkAliasesCount := func(t *testing.T, count int) { + resp := helper.GetAliases(t, nil) + require.NotNil(t, resp) + require.NotEmpty(t, resp.Aliases) + require.Equal(t, count, len(resp.Aliases)) + } + checkAliasesCount(t, 8) + helper.DeleteAlias(t, "AliasThatWillBeReplaced") + checkAliasesCount(t, 7) + }) + + t.Run("delete alias different case", func(t *testing.T) { + checkAliasesCount := func(t *testing.T, count int) { + resp := helper.GetAliases(t, nil) + require.NotNil(t, resp) + require.NotEmpty(t, resp.Aliases) + require.Equal(t, count, len(resp.Aliases)) + } + checkAliasesCount(t, 7) + helper.DeleteAlias(t, "aliasThatCreated") // note first letter is small + checkAliasesCount(t, 6) + }) + + t.Run("delete alias that doesn't exist", func(t *testing.T) { + resp, err := helper.DeleteAliasWithReturn(t, "AliasThatWillBeReplaced") + require.Error(t, err) + require.Nil(t, resp) + }) + + t.Run("create with clashing names", func(t *testing.T) { + t.Run("create aliases", func(t *testing.T) { + tests := []struct { + name string + alias *models.Alias + expectedErrorMsg string + }{ + { + // trying to create alias with existing class name. + // Should report "Hey, that alias that your are trying to create, there already exists class name with that". + name: "clashing class name", + alias: &models.Alias{Alias: books.DefaultClassName, Class: documents.Passage}, + expectedErrorMsg: fmt.Sprintf("create alias: class %s already exists", books.DefaultClassName), + }, + { + name: "clashing alias name", + alias: &models.Alias{Alias: "BookAlias", Class: documents.Passage}, + expectedErrorMsg: fmt.Sprintf("create alias: %s, alias already exists", "BookAlias"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + alias := tt.alias + params := schema.NewAliasesCreateParams().WithBody(alias) + resp, err := helper.Client(t).Schema.AliasesCreate(params, nil) + require.Nil(t, resp) + require.Error(t, err) + errorPayload, _ := json.MarshalIndent(err, "", " ") + assert.Contains(t, string(errorPayload), tt.expectedErrorMsg) + }) + } + }) + t.Run("create class", func(t *testing.T) { + tests := []struct { + name string + class *models.Class + expectedErrorMsg string + }{ + { + // trying to create class with existing class name. + name: "with existing class name", + class: books.ClassModel2VecVectorizerWithName(books.DefaultClassName), + expectedErrorMsg: fmt.Sprintf("class name %s already exists", books.DefaultClassName), + }, + // trying to create class with existing alias name. + { + name: "with existing alias name", + class: books.ClassModel2VecVectorizerWithName("BookAlias"), + expectedErrorMsg: fmt.Sprintf("alias name %s already exists", "BookAlias"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + params := schema.NewSchemaObjectsCreateParams().WithObjectClass(tt.class) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + require.Error(t, err) + assert.Nil(t, resp) + errorPayload, _ := json.MarshalIndent(err, "", " ") + assert.Contains(t, string(errorPayload), tt.expectedErrorMsg) + }) + } + }) + }) + + t.Run("create alias to non existing collection", func(t *testing.T) { + alias := &models.Alias{Alias: "NonExistingAlias", Class: "NonExistingCollection"} + resp, err := helper.CreateAliasWithReturn(t, alias) + require.Error(t, err) + require.Nil(t, resp) + }) + + t.Run("tests with BookAlias", func(t *testing.T) { + aliasName := "BookAlias" + + assertGetObject := func(t *testing.T, id strfmt.UUID) { + objWithClassName, err := helper.GetObject(t, books.DefaultClassName, id) + require.NoError(t, err) + require.NotNil(t, objWithClassName) + assert.Equal(t, books.DefaultClassName, objWithClassName.Class) + + objWithAlias, err := helper.GetObject(t, aliasName, id) + require.NoError(t, err) + require.NotNil(t, objWithAlias) + assert.Equal(t, objWithClassName.ID, objWithAlias.ID) + + // no matter how the object is accessed(via collection name or alias name), + // returned JSON should have original class name as source of truth. + assert.Equal(t, books.DefaultClassName, objWithAlias.Class) + } + + // Properties test via alias. Any collection properties manipulation needs + // original class name, not the alias. Assumes we have collection: Book, alias: BookAlias. + t.Run("update class property with alias - should fail", func(t *testing.T) { + c := &models.Class{ + Class: aliasName, // using alias name to add property + Properties: []*models.Property{ + { + Name: "new-property", + DataType: []string{"int"}, + }, + }, + } + params := schema.NewSchemaObjectsUpdateParams().WithClassName(aliasName).WithObjectClass(c) + resp, err := helper.Client(t).Schema.SchemaObjectsUpdate(params, nil) + require.Error(t, err) + assert.Nil(t, resp) + }) + t.Run("delete class with alias - should fail", func(t *testing.T) { + params := schema.NewSchemaObjectsDeleteParams().WithClassName(aliasName) + resp, err := helper.Client(t).Schema.SchemaObjectsDelete(params, nil) + // even deleting non-existing class will return 200 OK for collection. so we verify by getting the collection back. + require.NoError(t, err) + assert.NotNil(t, resp) + + gparams := schema.NewSchemaObjectsGetParams().WithClassName(books.DefaultClassName) + gresp, err := helper.Client(t).Schema.SchemaObjectsGet(gparams, nil) + require.NoError(t, err) + assert.NotNil(t, gresp) + assert.NotNil(t, gresp.Payload) + assert.Equal(t, books.DefaultClassName, gresp.Payload.Class) + }) + + // Tenants test via alias. Any collection tenants manipulation needs + // original class name, not the alias. Assumes we have collection: Book, alias: BookAlias. + t.Run("add_update_delete tenants withalias - should fail", func(t *testing.T) { + className := "MultiTenantClass" + testClass := models.Class{ + Class: className, + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: entschema.DataTypeText.PropString(), + }, + }, + } + helper.CreateClass(t, &testClass) + defer helper.DeleteClass(t, className) + + aliasName := "MultiTenantAlias" + alias := models.Alias{ + Class: className, + Alias: aliasName, + } + + helper.CreateAlias(t, &alias) + resp := helper.GetAliases(t, &alias.Class) + require.NotNil(t, resp) + require.NotEmpty(t, resp.Aliases) + aliasCreated := false + for _, alias := range resp.Aliases { + expAlias := entschema.UppercaseClassName(alias.Alias) + expClass := entschema.UppercaseClassName(alias.Class) + if expAlias == alias.Alias && expClass == alias.Class { + aliasCreated = true + } + } + assert.True(t, aliasCreated) + defer helper.DeleteAlias(t, aliasName) + + // try to add tenants via alias should fail + tenantName := "Tenant1" + tenants := []*models.Tenant{{ + Name: tenantName, + ActivityStatus: "HOT", + }} + params := schema.NewTenantsCreateParams().WithClassName(aliasName).WithBody(tenants) + xresp, err := helper.Client(t).Schema.TenantsCreate(params, nil) + require.Error(t, err) + assert.Nil(t, xresp) + + // try to update tenants via alias should fail + tenantName = "Tenant2" + tenants = []*models.Tenant{{ + Name: tenantName, + ActivityStatus: "HOT", + }} + params = schema.NewTenantsCreateParams().WithClassName(className).WithBody(tenants) // try to create with class name + xresp, err = helper.Client(t).Schema.TenantsCreate(params, nil) + require.NoError(t, err) + assert.NotNil(t, xresp) + + tenants[0].ActivityStatus = "COLD" + uparams := schema.NewTenantsUpdateParams().WithClassName(aliasName).WithBody(tenants) // try to update with alias name + uresp, err := helper.Client(t).Schema.TenantsUpdate(uparams, nil) + require.Error(t, err) + assert.Nil(t, uresp) + + // try to delete tenants via alias + dparams := schema.NewTenantsDeleteParams().WithClassName(aliasName).WithTenants([]string{tenantName}) + dresp, err := helper.Client(t).Schema.TenantsDelete(dparams, nil) + require.Error(t, err) + assert.Nil(t, dresp) + }) + + t.Run("create class with alias name", func(t *testing.T) { + class := books.ClassModel2VecVectorizerWithName(aliasName) + params := schema.NewSchemaObjectsCreateParams().WithObjectClass(class) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + require.Nil(t, resp) + require.Error(t, err) + errorPayload, _ := json.MarshalIndent(err, "", " ") + assert.Contains(t, string(errorPayload), fmt.Sprintf("alias name %s already exists", class.Class)) + }) + t.Run("GraphQL Get query with alias", func(t *testing.T) { + getQuery := ` + { + Get{ + %s%s{ + title + description + _additional{ + id + } + } + } + }` + tests := []struct { + name string + query string + }{ + { + name: "Get", + query: fmt.Sprintf(getQuery, aliasName, ""), + }, + { + name: "Get with nearText", + query: fmt.Sprintf(getQuery, aliasName, `(nearText:{concepts:"Dune"})`), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + res := graphqlhelper.AssertGraphQL(t, nil, tt.query).Get("Get", aliasName).AsSlice() + require.NotEmpty(t, res) + for _, r := range res { + elem, ok := r.(map[string]interface{}) + require.True(t, ok) + title, ok := elem["title"].(string) + require.True(t, ok) + require.NotEmpty(t, title) + description, ok := elem["description"].(string) + require.True(t, ok) + require.NotEmpty(t, description) + id, ok := elem["_additional"].(map[string]interface{})["id"].(string) + require.True(t, ok) + require.NotEmpty(t, id) + } + }) + } + }) + t.Run("GraphQL Aggregate query with alias", func(t *testing.T) { + aggregateQuery := ` + { + Aggregate{ + %s%s{ + meta{count} + } + } + }` + tests := []struct { + name string + query string + }{ + { + name: "Aggregate", + query: fmt.Sprintf(aggregateQuery, aliasName, ""), + }, + { + name: "Aggregate with nearText", + query: fmt.Sprintf(aggregateQuery, aliasName, `(nearText:{concepts:"Dune" certainty:0.8})`), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + res := graphqlhelper.AssertGraphQL(t, nil, tt.query).Get("Aggregate", aliasName).AsSlice() + require.NotEmpty(t, res) + for _, r := range res { + elem, ok := r.(map[string]interface{}) + require.True(t, ok) + count, ok := elem["meta"].(map[string]interface{})["count"].(json.Number) + require.True(t, ok) + c, err := count.Int64() + require.NoError(t, err) + require.Greater(t, c, int64(0)) + } + }) + } + }) + t.Run("get class objects with alias", func(t *testing.T) { + assertGetObject(t, books.ProjectHailMary) + }) + + t.Run("create object with alias", func(t *testing.T) { + objID := strfmt.UUID("67b79643-cf8b-4b22-b206-000000000001") + obj := &models.Object{ + Class: aliasName, + ID: objID, + Properties: map[string]interface{}{ + "title": "The Martian", + "description": "Stranded on Mars after a dust storm forces his crew to evacuate, astronaut Mark Watney is presumed dead and left alone on the hostile planet.", + }, + } + created, err := helper.CreateObjectWithResponse(t, obj) + require.NoError(t, err) + // should still return original class name in the response (not alias) + assert.Equal(t, books.DefaultClassName, created.Class) + assertGetObject(t, objID) + }) + + t.Run("update object with alias", func(t *testing.T) { + objID := strfmt.UUID("67b79643-cf8b-4b22-b206-000000000001") + obj := &models.Object{ + Class: aliasName, + ID: objID, + Properties: map[string]interface{}{ + "title": "The Martian", + "description": "A book about an astronaut Mark Watney.", + }, + } + updated, err := helper.UpdateObjectWithResponse(t, obj) + require.NoError(t, err) + // should still return original class name in the response (not alias) + assert.Equal(t, books.DefaultClassName, updated.Class) + assertGetObject(t, objID) + }) + + t.Run("patch object with alias", func(t *testing.T) { + objID := strfmt.UUID("67b79643-cf8b-4b22-b206-000000000001") + obj := &models.Object{ + Class: aliasName, + ID: objID, + Properties: map[string]interface{}{ + "title": "The Martian", + "description": "A book about an astronaut Mark Watney.", + }, + } + err := helper.PatchObject(t, obj) + require.NoError(t, err) + assertGetObject(t, objID) + }) + + t.Run("head object with alias", func(t *testing.T) { + objID := strfmt.UUID("67b79643-cf8b-4b22-b206-000000000001") + err := helper.HeadObject(t, objID) + require.NoError(t, err) + }) + + t.Run("validate object with alias", func(t *testing.T) { + objID := strfmt.UUID("67b79643-cf8b-4b22-b206-000000000001") + obj := &models.Object{ + Class: aliasName, + ID: objID, + Properties: map[string]interface{}{ + "title": "The Martian", + "description": "A book about an astronaut Mark Watney.", + }, + } + err := helper.ValidateObject(t, obj) + require.NoError(t, err) + assertGetObject(t, objID) + }) + + t.Run("batch insert with alias", func(t *testing.T) { + objID1 := strfmt.UUID("67b79643-cf8b-4b22-b206-000000000001") + obj1 := &models.Object{ + Class: aliasName, + ID: objID1, + Properties: map[string]interface{}{ + "title": "The Martian", + "description": "A book about an astronaut Mark Watney that was left on Mars.", + }, + } + objID2 := strfmt.UUID("67b79643-cf8b-4b22-b206-000000000002") + obj2 := &models.Object{ + Class: aliasName, + ID: objID2, + Properties: map[string]interface{}{ + "title": "Nonexistent", + "description": "A book about nothing.", + }, + } + resp := helper.CreateObjectsBatchWithResponse(t, []*models.Object{obj1, obj2}) + for _, obj := range resp { + // should still return original class name in the response (not alias) + assert.Equal(t, books.DefaultClassName, obj.Class) + } + assertGetObject(t, objID1) + assertGetObject(t, objID2) + }) + + t.Run("batch delete with alias", func(t *testing.T) { + valueText := "Nonexistent" + batchDelete := &models.BatchDelete{ + Match: &models.BatchDeleteMatch{ + Class: aliasName, + Where: &models.WhereFilter{ + Path: []string{"title"}, + Operator: models.WhereFilterOperatorEqual, + ValueText: &valueText, + }, + }, + } + resp := helper.DeleteObjectsBatchWithResponse(t, batchDelete, types.ConsistencyLevelAll) + require.NotNil(t, resp) + require.NotNil(t, resp.Match) + // should still return original class name in the response (not alias) + assert.Equal(t, books.DefaultClassName, resp.Match.Class) + }) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authn/dynamic_users_restore_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authn/dynamic_users_restore_test.go new file mode 100644 index 0000000000000000000000000000000000000000..300e3dc1384a2f82b68600211c1a2021025f7480 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authn/dynamic_users_restore_test.go @@ -0,0 +1,185 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authn + +import ( + "context" + "testing" + "time" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/users" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/articles" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestBackupAndRestoreDynamicUsers(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + defer cancel() + + compose, err := docker. + New(). + WithWeaviate(). + WithApiKey().WithUserApiKey(adminUser, adminKey). + WithRBAC().WithRbacRoots(adminUser). + WithBackendFilesystem().WithDbUsers(). + Start(ctx) + require.Nil(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %v", err) + } + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + defer helper.ResetClient() + + backend := "filesystem" + testUserName := "test-user" + + // one class is needed for backup + par := articles.ParagraphsClass() + + t.Run("Backup and full restore", func(t *testing.T) { + backupID := "backup-1" + + helper.DeleteClassWithAuthz(t, par.Class, helper.CreateAuth(adminKey)) + helper.CreateClassAuth(t, par, adminKey) + defer helper.DeleteClassWithAuthz(t, par.Class, helper.CreateAuth(adminKey)) + + helper.DeleteUser(t, testUserName, adminKey) + helper.CreateUser(t, testUserName, adminKey) + defer helper.DeleteUser(t, testUserName, adminKey) + + resp, err := helper.CreateBackupWithAuthz(t, helper.DefaultBackupConfig(), par.Class, backend, backupID, helper.CreateAuth(adminKey)) + require.Nil(t, err) + require.NotNil(t, resp.Payload) + require.Equal(t, "", resp.Payload.Error) + + helper.ExpectBackupEventuallyCreated(t, backupID, backend, helper.CreateAuth(adminKey), helper.WithPollInterval(helper.MinPollInterval), helper.WithDeadline(helper.MaxDeadline)) + + // delete user + helper.DeleteUser(t, testUserName, adminKey) + helper.DeleteClassWithAuthz(t, par.Class, helper.CreateAuth(adminKey)) + + all := "all" + restoreConf := helper.DefaultRestoreConfig() + restoreConf.UsersOptions = &all + respR, err := helper.RestoreBackupWithAuthz(t, restoreConf, par.Class, backend, backupID, map[string]string{}, helper.CreateAuth(adminKey)) + require.Nil(t, err) + require.NotNil(t, respR.Payload) + require.Equal(t, "", respR.Payload.Error) + + helper.ExpectBackupEventuallyRestored(t, backupID, backend, helper.CreateAuth(adminKey), helper.WithPollInterval(helper.MinPollInterval), helper.WithDeadline(helper.MaxDeadline)) + + user := helper.GetUser(t, testUserName, adminKey) + require.NotNil(t, user) + require.Equal(t, *user.UserID, testUserName) + }) + + t.Run("Backup and restore without users", func(t *testing.T) { + backupID := "backup-2" + + helper.DeleteClassWithAuthz(t, par.Class, helper.CreateAuth(adminKey)) + helper.CreateClassAuth(t, par, adminKey) + defer helper.DeleteClassWithAuthz(t, par.Class, helper.CreateAuth(adminKey)) + + helper.DeleteUser(t, testUserName, adminKey) + helper.CreateUser(t, testUserName, adminKey) + defer helper.DeleteUser(t, testUserName, adminKey) + + resp, err := helper.CreateBackupWithAuthz(t, helper.DefaultBackupConfig(), par.Class, backend, backupID, helper.CreateAuth(adminKey)) + require.Nil(t, err) + require.NotNil(t, resp.Payload) + require.Equal(t, "", resp.Payload.Error) + + helper.ExpectBackupEventuallyCreated(t, backupID, backend, helper.CreateAuth(adminKey), helper.WithPollInterval(helper.MinPollInterval), helper.WithDeadline(helper.MaxDeadline)) + + // delete user + helper.DeleteUser(t, testUserName, adminKey) + helper.DeleteClassWithAuthz(t, par.Class, helper.CreateAuth(adminKey)) + + noRestore := "noRestore" + restoreConf := helper.DefaultRestoreConfig() + restoreConf.UsersOptions = &noRestore + respR, err := helper.RestoreBackupWithAuthz(t, restoreConf, par.Class, backend, backupID, map[string]string{}, helper.CreateAuth(adminKey)) + require.Nil(t, err) + require.NotNil(t, respR.Payload) + require.Equal(t, "", respR.Payload.Error) + + helper.ExpectBackupEventuallyRestored(t, backupID, backend, helper.CreateAuth(adminKey), helper.WithPollInterval(helper.MinPollInterval), helper.WithDeadline(helper.MaxDeadline)) + + respU, err := helper.Client(t).Users.GetUserInfo(users.NewGetUserInfoParams().WithUserID(testUserName), helper.CreateAuth(adminKey)) + require.Nil(t, respU) + require.Error(t, err) + }) + + testRoleName := "testRole" + testCollectionName := "TestCollection" + testRole := &models.Role{ + Name: String(testRoleName), + Permissions: []*models.Permission{ + {Action: String(authorization.ReadRoles), Backups: &models.PermissionBackups{Collection: String(testCollectionName)}}, + }, + } + + t.Run("Backup and full restore users and roles", func(t *testing.T) { + backupID := "backup-3" + + helper.DeleteClassWithAuthz(t, par.Class, helper.CreateAuth(adminKey)) + helper.CreateClassAuth(t, par, adminKey) + defer helper.DeleteClassWithAuthz(t, par.Class, helper.CreateAuth(adminKey)) + + helper.DeleteUser(t, testUserName, adminKey) + helper.CreateUser(t, testUserName, adminKey) + defer helper.DeleteUser(t, testUserName, adminKey) + + helper.DeleteRole(t, adminKey, testRoleName) + helper.CreateRole(t, adminKey, testRole) + defer helper.DeleteRole(t, adminKey, testRoleName) + helper.AssignRoleToUser(t, adminKey, testRoleName, testUserName) + + resp, err := helper.CreateBackupWithAuthz(t, helper.DefaultBackupConfig(), par.Class, backend, backupID, helper.CreateAuth(adminKey)) + require.Nil(t, err) + require.NotNil(t, resp.Payload) + require.Equal(t, "", resp.Payload.Error) + + helper.ExpectBackupEventuallyCreated(t, backupID, backend, helper.CreateAuth(adminKey), helper.WithPollInterval(helper.MinPollInterval), helper.WithDeadline(helper.MaxDeadline)) + + // delete user and role + helper.DeleteUser(t, testUserName, adminKey) + helper.DeleteClassWithAuthz(t, par.Class, helper.CreateAuth(adminKey)) + helper.DeleteRole(t, adminKey, testRoleName) + + all := "all" + restoreConf := helper.DefaultRestoreConfig() + restoreConf.UsersOptions = &all + restoreConf.RolesOptions = &all + respR, err := helper.RestoreBackupWithAuthz(t, restoreConf, par.Class, backend, backupID, map[string]string{}, helper.CreateAuth(adminKey)) + require.Nil(t, err) + require.NotNil(t, respR.Payload) + require.Equal(t, "", respR.Payload.Error) + + helper.ExpectBackupEventuallyRestored(t, backupID, backend, helper.CreateAuth(adminKey), helper.WithPollInterval(helper.MinPollInterval), helper.WithDeadline(helper.MaxDeadline)) + + user := helper.GetUser(t, testUserName, adminKey) + require.NotNil(t, user) + require.Equal(t, *user.UserID, testUserName) + require.Equal(t, user.Roles[0], testRoleName) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authn/dynamic_users_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authn/dynamic_users_test.go new file mode 100644 index 0000000000000000000000000000000000000000..800bf2f9b3a6803526fb7b6f0039d658a00c10fd --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authn/dynamic_users_test.go @@ -0,0 +1,374 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authn + +import ( + "context" + "errors" + "testing" + "time" + + "github.com/weaviate/weaviate/entities/models" + + "github.com/weaviate/weaviate/test/docker" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/users" + "github.com/weaviate/weaviate/test/helper" +) + +func TestCreateUser(t *testing.T) { + adminKey := "admin-key" + adminUser := "admin-user" + + otherUser := "custom-user" + otherKey := "custom-key" + + otherUser2 := "custom-user2" + otherKey2 := "custom-key2" + + otherUser3 := "custom-user3" + otherKey3 := "custom-key3" + + otherUser4 := "custom-user4" + otherKey4 := "custom-key4" + + otherUser5 := "custom-user5" + otherKey5 := "custom-key5" + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + compose, err := docker.New().WithWeaviate(). + WithApiKey().WithUserApiKey(adminUser, adminKey).WithUserApiKey(otherUser, otherKey).WithUserApiKey(otherUser2, otherKey2).WithUserApiKey(otherUser3, otherKey3).WithUserApiKey(otherUser4, otherKey4).WithUserApiKey(otherUser5, otherKey5). + WithDbUsers(). + WithRBAC().WithRbacRoots(adminUser). + Start(ctx) + require.Nil(t, err) + helper.SetupClient(compose.GetWeaviate().URI()) + defer func() { + helper.ResetClient() + require.NoError(t, compose.Terminate(ctx)) + cancel() + }() + userName := "CreateUserTestUser" + + t.Run("create and delete user", func(t *testing.T) { + helper.DeleteUser(t, userName, adminKey) + resp, err := helper.Client(t).Users.CreateUser(users.NewCreateUserParams().WithUserID(userName), helper.CreateAuth(adminKey)) + require.NoError(t, err) + require.NotEmpty(t, resp) + require.Greater(t, len(*resp.Payload.Apikey), 10) + + info := helper.GetInfoForOwnUser(t, *resp.Payload.Apikey) + require.Equal(t, userName, *info.Username) + require.Len(t, info.Roles, 0) + require.Len(t, info.Groups, 0) + + respDelete, err := helper.Client(t).Users.DeleteUser(users.NewDeleteUserParams().WithUserID(userName), helper.CreateAuth(adminKey)) + require.NoError(t, err) + require.NotNil(t, respDelete) + var parsedDelete *users.DeleteUserNoContent + require.True(t, errors.As(respDelete, &parsedDelete)) + require.Equal(t, 204, respDelete.Code()) + + _, err = helper.Client(t).Users.GetOwnInfo(users.NewGetOwnInfoParams(), helper.CreateAuth("non-existent")) + require.NotNil(t, err) + var parsed *users.GetOwnInfoUnauthorized + require.True(t, errors.As(err, &parsed)) + require.Equal(t, 401, parsed.Code()) + }) + + t.Run("create and rotate key", func(t *testing.T) { + helper.DeleteUser(t, userName, adminKey) + oldKey := helper.CreateUser(t, userName, adminKey) + + // login works after user creation + info := helper.GetInfoForOwnUser(t, oldKey) + require.Equal(t, userName, *info.Username) + user := helper.GetUser(t, userName, adminKey) + require.Equal(t, user.APIKeyFirstLetters, oldKey[:3]) + + // rotate key and test that old key is not working anymore + newKey := helper.RotateKey(t, userName, adminKey) + _, err := helper.Client(t).Users.GetOwnInfo(users.NewGetOwnInfoParams(), helper.CreateAuth(oldKey)) + require.Error(t, err) + + infoNew := helper.GetInfoForOwnUser(t, newKey) + require.Equal(t, userName, *infoNew.Username) + + user = helper.GetUser(t, userName, adminKey) + require.Equal(t, user.APIKeyFirstLetters, newKey[:3]) + require.NotEqual(t, newKey, oldKey) + require.NotEqual(t, newKey[:10], oldKey[:10]) + + helper.DeleteUser(t, userName, adminKey) + }) + + t.Run("import static user and rotate key", func(t *testing.T) { + allUsers := helper.ListAllUsers(t, adminKey) + found := false + for _, user := range allUsers { + if *user.UserID == otherUser { + require.Equal(t, *user.DbUserType, string(models.UserTypeOutputDbEnvUser)) + found = true + break + } + } + require.True(t, found) + + timeBeforeImport := time.Now() + time.Sleep(time.Millisecond * 2) // make sure that times are actually less, as we lose ns precision during serialization + oldKey := helper.CreateUserWithApiKey(t, otherUser, adminKey, nil) + require.Equal(t, oldKey, otherKey) + time.Sleep(time.Millisecond * 2) + timeAfterImport := time.Now() + + info := helper.GetInfoForOwnUser(t, oldKey) + require.Equal(t, otherUser, *info.Username) + user := helper.GetUser(t, otherUser, adminKey) + require.Equal(t, user.APIKeyFirstLetters, oldKey[:3]) + require.Equal(t, *user.DbUserType, string(models.UserTypeOutputDbUser)) + + // rotate key and test that old key is not working anymore + newKey := helper.RotateKey(t, otherUser, adminKey) + _, err := helper.Client(t).Users.GetOwnInfo(users.NewGetOwnInfoParams(), helper.CreateAuth(oldKey)) + require.Error(t, err) + + infoNew := helper.GetInfoForOwnUser(t, newKey) + require.Equal(t, otherUser, *infoNew.Username) + + user = helper.GetUser(t, otherUser, adminKey) + require.Equal(t, user.APIKeyFirstLetters, newKey[:3]) + require.NotEqual(t, newKey, oldKey) + require.NotEqual(t, newKey[:10], oldKey[:10]) + require.Less(t, timeBeforeImport.UTC(), time.Time(user.CreatedAt).UTC()) + require.Less(t, time.Time(user.CreatedAt).UTC(), timeAfterImport.UTC()) + + helper.DeleteUser(t, otherUser, adminKey) + }) + + t.Run("import static user with time", func(t *testing.T) { + createTime := time.Now().Add(-time.Hour) + helper.CreateUserWithApiKey(t, otherUser2, adminKey, &createTime) + + user := helper.GetUser(t, otherUser2, adminKey) + require.Equal(t, time.Time(user.CreatedAt).UTC().Truncate(time.Millisecond), createTime.UTC().Truncate(time.Millisecond)) + }) + + t.Run("import static user and delete", func(t *testing.T) { + key := helper.CreateUserWithApiKey(t, otherUser3, adminKey, nil) + + info := helper.GetInfoForOwnUser(t, key) + require.Equal(t, otherUser3, *info.Username) + + helper.DeleteUser(t, otherUser3, adminKey) + + _, err := helper.Client(t).Users.GetOwnInfo(users.NewGetOwnInfoParams(), helper.CreateAuth(otherKey3)) + require.Error(t, err) + var parsed *users.GetOwnInfoUnauthorized + require.True(t, errors.As(err, &parsed)) + require.Equal(t, 401, parsed.Code()) + }) + + t.Run("import static user and suspend with rotate", func(t *testing.T) { + key := helper.CreateUserWithApiKey(t, otherUser4, adminKey, nil) + + info := helper.GetInfoForOwnUser(t, key) + require.Equal(t, otherUser4, *info.Username) + + helper.DeactivateUser(t, adminKey, otherUser4, true) + + _, err := helper.Client(t).Users.GetOwnInfo(users.NewGetOwnInfoParams(), helper.CreateAuth(otherKey4)) + require.Error(t, err) + var parsed *users.GetOwnInfoUnauthorized + require.True(t, errors.As(err, &parsed)) + require.Equal(t, 401, parsed.Code()) + + helper.ActivateUser(t, adminKey, otherUser4) + _, err = helper.Client(t).Users.GetOwnInfo(users.NewGetOwnInfoParams(), helper.CreateAuth(otherKey4)) + require.Error(t, err) + require.True(t, errors.As(err, &parsed)) + require.Equal(t, 401, parsed.Code()) + + newKey := helper.RotateKey(t, otherUser4, adminKey) + helper.GetInfoForOwnUser(t, newKey) + }) + + t.Run("import static user and suspend without rotate", func(t *testing.T) { + key := helper.CreateUserWithApiKey(t, otherUser5, adminKey, nil) + + info := helper.GetInfoForOwnUser(t, key) + require.Equal(t, otherUser5, *info.Username) + + helper.DeactivateUser(t, adminKey, otherUser5, false) + + _, err := helper.Client(t).Users.GetOwnInfo(users.NewGetOwnInfoParams(), helper.CreateAuth(otherKey5)) + require.Error(t, err) + var parsed *users.GetOwnInfoUnauthorized + require.True(t, errors.As(err, &parsed)) + require.Equal(t, 401, parsed.Code()) + + helper.ActivateUser(t, adminKey, otherUser5) + helper.GetInfoForOwnUser(t, key) + }) +} + +func TestWithStaticUser(t *testing.T) { + adminKey := "admin-key" + adminUser := "admin-user" + + otherKey := "custom-key" + otherUser := "custom-user" + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + compose, err := docker.New().WithWeaviate().WithApiKey().WithUserApiKey(adminUser, adminKey).WithUserApiKey(otherUser, otherKey).WithDbUsers().Start(ctx) + require.Nil(t, err) + helper.SetupClient(compose.GetWeaviate().URI()) + + defer func() { + helper.ResetClient() + require.NoError(t, compose.Terminate(ctx)) + cancel() + }() + + t.Run("create with existing static user name", func(t *testing.T) { + resp, err := helper.Client(t).Users.CreateUser(users.NewCreateUserParams().WithUserID(otherUser), helper.CreateAuth(adminKey)) + require.Error(t, err) + require.Nil(t, resp) + var parsed *users.CreateUserConflict + require.True(t, errors.As(err, &parsed)) + }) + + t.Run("delete existing static user name", func(t *testing.T) { + resp, err := helper.Client(t).Users.DeleteUser(users.NewDeleteUserParams().WithUserID(otherUser), helper.CreateAuth(adminKey)) + require.Error(t, err) + require.Nil(t, resp) + var parsed *users.DeleteUserUnprocessableEntity + require.True(t, errors.As(err, &parsed)) + }) + + t.Run("rotate existing static user name", func(t *testing.T) { + resp, err := helper.Client(t).Users.RotateUserAPIKey(users.NewRotateUserAPIKeyParams().WithUserID(otherUser), helper.CreateAuth(adminKey)) + require.Error(t, err) + require.Nil(t, resp) + var parsed *users.RotateUserAPIKeyUnprocessableEntity + require.True(t, errors.As(err, &parsed)) + }) +} + +func TestSuspendAndActivate(t *testing.T) { + adminKey := "admin-key" + adminUser := "admin-user" + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + compose, err := docker.New().WithWeaviate().WithApiKey().WithUserApiKey(adminUser, adminKey).WithDbUsers().Start(ctx) + require.Nil(t, err) + helper.SetupClient(compose.GetWeaviate().URI()) + + defer func() { + helper.ResetClient() + require.NoError(t, compose.Terminate(ctx)) + cancel() + }() + helper.SetupClient(compose.GetWeaviate().URI()) + + dynamicUser := "dynamic-user" + + t.Run("suspend and activate without revocation", func(t *testing.T) { + helper.DeleteUser(t, dynamicUser, adminKey) + apiKey := helper.CreateUser(t, dynamicUser, adminKey) + + info := helper.GetInfoForOwnUser(t, apiKey) + require.NotNil(t, info) + + helper.DeactivateUser(t, adminKey, dynamicUser, false) + _, err := helper.Client(t).Users.GetOwnInfo(users.NewGetOwnInfoParams(), helper.CreateAuth(apiKey)) + require.Error(t, err) + + helper.ActivateUser(t, adminKey, dynamicUser) + infoActive := helper.GetInfoForOwnUser(t, apiKey) + require.NotNil(t, infoActive) + }) + + t.Run("suspend and activate with revocation", func(t *testing.T) { + helper.DeleteUser(t, dynamicUser, adminKey) + apiKey := helper.CreateUser(t, dynamicUser, adminKey) + + info := helper.GetInfoForOwnUser(t, apiKey) + require.NotNil(t, info) + + helper.DeactivateUser(t, adminKey, dynamicUser, true) + _, err := helper.Client(t).Users.GetOwnInfo(users.NewGetOwnInfoParams(), helper.CreateAuth(apiKey)) + require.Error(t, err) + + helper.ActivateUser(t, adminKey, dynamicUser) + _, err = helper.Client(t).Users.GetOwnInfo(users.NewGetOwnInfoParams(), helper.CreateAuth(apiKey)) + require.Error(t, err) + + // need to rotate key for user to work again + apiKey = helper.RotateKey(t, dynamicUser, adminKey) + require.NotNil(t, helper.GetInfoForOwnUser(t, apiKey)) + }) + + t.Run("suspend and activate with revocation - first rotate then activate", func(t *testing.T) { + helper.DeleteUser(t, dynamicUser, adminKey) + apiKey := helper.CreateUser(t, dynamicUser, adminKey) + + info := helper.GetInfoForOwnUser(t, apiKey) + require.NotNil(t, info) + + helper.DeactivateUser(t, adminKey, dynamicUser, true) + _, err := helper.Client(t).Users.GetOwnInfo(users.NewGetOwnInfoParams(), helper.CreateAuth(apiKey)) + require.Error(t, err) + + apiKey = helper.RotateKey(t, dynamicUser, adminKey) + + helper.ActivateUser(t, adminKey, dynamicUser) + require.NotNil(t, helper.GetInfoForOwnUser(t, apiKey)) + }) + + t.Run("suspend and delete with deactivate key", func(t *testing.T) { + for _, deactivateKey := range []bool{true, false} { + helper.DeleteUser(t, dynamicUser, adminKey) + apiKey := helper.CreateUser(t, dynamicUser, adminKey) + + info := helper.GetInfoForOwnUser(t, apiKey) + require.NotNil(t, info) + + helper.DeactivateUser(t, adminKey, dynamicUser, deactivateKey) + helper.DeleteUser(t, dynamicUser, adminKey) + + // create new user with same name, should not be suspended anymore + apiKey = helper.CreateUser(t, dynamicUser, adminKey) + require.NotNil(t, helper.GetInfoForOwnUser(t, apiKey)) + } + }) + + t.Run("double suspend", func(t *testing.T) { + helper.DeleteUser(t, dynamicUser, adminKey) + helper.CreateUser(t, dynamicUser, adminKey) + helper.DeactivateUser(t, adminKey, dynamicUser, false) + // suspend again + _, err := helper.Client(t).Users.DeactivateUser(users.NewDeactivateUserParams().WithUserID(dynamicUser), helper.CreateAuth(adminKey)) + require.Error(t, err) + var conflict *users.DeactivateUserConflict + require.True(t, errors.As(err, &conflict)) + }) + + t.Run("activate active user", func(t *testing.T) { + helper.DeleteUser(t, dynamicUser, adminKey) + helper.CreateUser(t, dynamicUser, adminKey) + _, err := helper.Client(t).Users.ActivateUser(users.NewActivateUserParams().WithUserID(dynamicUser), helper.CreateAuth(adminKey)) + require.Error(t, err) + var conflict *users.ActivateUserConflict + require.True(t, errors.As(err, &conflict)) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authn/helper.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authn/helper.go new file mode 100644 index 0000000000000000000000000000000000000000..1b51576a1cf93e291597f1e25e32f3d7c3a8b4e1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authn/helper.go @@ -0,0 +1,16 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authn + +func String(s string) *string { + return &s +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authn/own_info_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authn/own_info_test.go new file mode 100644 index 0000000000000000000000000000000000000000..1275926a4730a03ddc35503f25a762b5f36a580f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authn/own_info_test.go @@ -0,0 +1,251 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authn + +import ( + "context" + "testing" + "time" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/users" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestAuthnGetOwnInfoWithAnonAccessEnabled(t *testing.T) { + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + compose, err := docker.New().WithWeaviate().Start(ctx) + require.Nil(t, err) + + helper.SetupClient(compose.GetWeaviate().URI()) + + defer func() { + helper.ResetClient() + require.NoError(t, compose.Terminate(ctx)) + cancel() + }() + + t.Run("Get own info for anonymous access", func(t *testing.T) { + _, err := helper.Client(t).Users.GetOwnInfo(users.NewGetOwnInfoParams(), nil) + require.NotNil(t, err) + parsed, ok := err.(*users.GetOwnInfoUnauthorized) //nolint:errorlint + require.True(t, ok) + require.Equal(t, 401, parsed.Code()) + }) +} + +func TestAuthnGetOwnInfoWithAdminlistAndOidc(t *testing.T) { + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + compose, err := docker.New(). + WithWeaviate(). + WithMockOIDC(). + WithWeaviateEnv("AUTHORIZATION_ADMINLIST_ENABLED", "true"). + WithWeaviateEnv("AUTHORIZATION_ADMINLIST_USERS", "admin-user"). + Start(ctx) + require.Nil(t, err) + + helper.SetupClient(compose.GetWeaviate().URI()) + + // the oidc mock server returns first the token for the admin user and then for the custom-user. See its + // description for details + token, _ := docker.GetTokensFromMockOIDCWithHelper(t, compose.GetMockOIDCHelper().URI()) + + defer func() { + helper.ResetClient() + require.NoError(t, compose.Terminate(ctx)) + cancel() + }() + + t.Run("Get own info", func(t *testing.T) { + info := helper.GetInfoForOwnUser(t, token) + require.Equal(t, "admin-user", *info.Username) + require.Len(t, info.Roles, 0) + require.Len(t, info.Groups, 0) + }) + + t.Run("Unauthenticated", func(t *testing.T) { + _, err := helper.Client(t).Users.GetOwnInfo(users.NewGetOwnInfoParams(), helper.CreateAuth("non-existent")) + require.NotNil(t, err) + parsed, ok := err.(*users.GetOwnInfoUnauthorized) //nolint:errorlint + require.True(t, ok) + require.Equal(t, 401, parsed.Code()) + }) +} + +func TestAuthnGetOwnInfoWithOidc(t *testing.T) { + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + + compose, err := docker.New().WithWeaviate().WithMockOIDC().Start(ctx) + require.Nil(t, err) + + helper.SetupClient(compose.GetWeaviate().URI()) + + // the oidc mock server returns first the token for the admin user and then for the custom-user. See its + // description for details + token, _ := docker.GetTokensFromMockOIDCWithHelper(t, compose.GetMockOIDCHelper().URI()) + + defer func() { + helper.ResetClient() + require.NoError(t, compose.Terminate(ctx)) + cancel() + }() + + t.Run("Get own info", func(t *testing.T) { + info := helper.GetInfoForOwnUser(t, token) + require.Equal(t, "admin-user", *info.Username) + require.Len(t, info.Roles, 0) + require.Len(t, info.Groups, 0) + }) + + t.Run("Unauthenticated", func(t *testing.T) { + _, err := helper.Client(t).Users.GetOwnInfo(users.NewGetOwnInfoParams(), helper.CreateAuth("non-existent")) + require.NotNil(t, err) + parsed, ok := err.(*users.GetOwnInfoUnauthorized) //nolint:errorlint + require.True(t, ok) + require.Equal(t, 401, parsed.Code()) + }) +} + +func TestAuthnGetOwnInfoWithRBAC(t *testing.T) { + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + + customUser := "custom-user" + customKey := "custom-key" + + testingRole := "testingOwnRole" + + adminKey := "admin-key" + adminUser := "admin-user" + + compose, err := docker.New(). + WithWeaviate(). + WithRBAC(). + WithApiKey(). + WithUserApiKey(customUser, customKey). + WithUserApiKey(adminUser, adminKey). + WithRbacRoots(adminUser). + WithRbacViewers(customUser). + Start(ctx) + require.Nil(t, err) + + helper.SetupClient(compose.GetWeaviate().URI()) + + defer func() { + helper.DeleteRole(t, adminKey, testingRole) + helper.ResetClient() + require.NoError(t, compose.Terminate(ctx)) + cancel() + }() + + t.Run("Get own info - no roles", func(t *testing.T) { + info := helper.GetInfoForOwnUser(t, customKey) + require.Equal(t, customUser, *info.Username) + require.Len(t, info.Roles, 0) + require.Len(t, info.Groups, 0) + }) + + t.Run("Create and assign role", func(t *testing.T) { + helper.CreateRole( + t, + adminKey, + &models.Role{ + Name: &testingRole, + Permissions: []*models.Permission{{ + Action: String(authorization.CreateCollections), + Collections: &models.PermissionCollections{Collection: String("*")}, + }}, + }, + ) + helper.AssignRoleToUser(t, adminKey, testingRole, customUser) + }) + + t.Run("Get own roles - existing roles", func(t *testing.T) { + info := helper.GetInfoForOwnUser(t, customKey) + require.Equal(t, customUser, *info.Username) + require.Len(t, info.Roles, 1) + require.Equal(t, testingRole, *info.Roles[0].Name) + require.Len(t, info.Groups, 0) + }) + + t.Run("Unauthenticated", func(t *testing.T) { + _, err := helper.Client(t).Users.GetOwnInfo(users.NewGetOwnInfoParams(), helper.CreateAuth("non-existent")) + require.NotNil(t, err) + parsed, ok := err.(*users.GetOwnInfoUnauthorized) //nolint:errorlint + require.True(t, ok) + require.Equal(t, 401, parsed.Code()) + }) +} + +func TestAuthnGetOwnInfoWithRBACAndOIDC(t *testing.T) { + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + + customUser := "custom-user" + testingRole := "testingOwnRole" + adminUser := "admin-user" + + compose, err := docker.New(). + WithWeaviate(). + WithRBAC(). + WithApiKey(). + WithRbacRoots(adminUser). + WithRbacViewers(customUser). + WithMockOIDC(). + Start(ctx) + require.Nil(t, err) + + helper.SetupClient(compose.GetWeaviate().URI()) + + // the oidc mock server returns first the token for the admin user and then for the custom-user. See its + // description for details + tokenAdmin, _ := docker.GetTokensFromMockOIDCWithHelper(t, compose.GetMockOIDCHelper().URI()) + tokenCustom, _ := docker.GetTokensFromMockOIDCWithHelper(t, compose.GetMockOIDCHelper().URI()) + + defer func() { + helper.DeleteRole(t, tokenAdmin, testingRole) + helper.ResetClient() + require.NoError(t, compose.Terminate(ctx)) + cancel() + }() + + t.Run("Get own info - no roles", func(t *testing.T) { + info := helper.GetInfoForOwnUser(t, tokenCustom) + require.Equal(t, customUser, *info.Username) + require.Len(t, info.Roles, 0) + require.Len(t, info.Groups, 1) + }) + + t.Run("Create and assign role", func(t *testing.T) { + helper.CreateRole( + t, + tokenAdmin, + &models.Role{ + Name: &testingRole, + Permissions: []*models.Permission{{ + Action: String(authorization.CreateCollections), + Collections: &models.PermissionCollections{Collection: String("*")}, + }}, + }, + ) + helper.AssignRoleToUserOIDC(t, tokenAdmin, testingRole, customUser) + }) + + t.Run("Get own roles - existing roles", func(t *testing.T) { + info := helper.GetInfoForOwnUser(t, tokenCustom) + require.Equal(t, customUser, *info.Username) + require.Len(t, info.Roles, 1) + require.Equal(t, testingRole, *info.Roles[0].Name) + require.Len(t, info.Groups, 1) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/adminlist_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/adminlist_test.go new file mode 100644 index 0000000000000000000000000000000000000000..da05926fed048cfc79fcbcad35a460562ce45a8d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/adminlist_test.go @@ -0,0 +1,84 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "context" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/client/authz" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestAdminlistWithRBACEndpoints(t *testing.T) { + adminKey := "admin-key" + adminUser := "admin-user" + customKey := "custom-key" + customUser := "custom-user" + readonlyKey := "readonly-key" + readonlyUser := "readonly-user" + + adminAuth := helper.CreateAuth(adminKey) + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + defer cancel() + + compose, err := docker. + New(). + WithWeaviate(). + WithApiKey().WithUserApiKey(adminUser, adminKey).WithUserApiKey(customUser, customKey).WithUserApiKey(readonlyUser, readonlyKey). + WithAdminListAdmins(adminUser).WithAdminListUsers(readonlyUser). + Start(ctx) + require.NoError(t, err) + + helper.SetupClient(compose.GetWeaviate().URI()) + defer helper.ResetClient() + defer compose.Terminate(ctx) + + // as admin you can all the endpoints, but nothing happens + testRoleName := "testing" + testRole := &models.Role{Name: &testRoleName, Permissions: []*models.Permission{ + { + Action: String(authorization.CreateCollections), + Collections: &models.PermissionCollections{ + Collection: String("*"), + }, + }, + }} + + helper.Client(t).Authz.DeleteRole( + authz.NewDeleteRoleParams().WithID(testRoleName), + adminAuth, + ) + helper.DeleteRole(t, adminKey, testRoleName) + helper.CreateRole(t, adminKey, testRole) + + roles := helper.GetRoles(t, adminKey) + require.Len(t, roles, 0) + + // as read only user you can cannot do anything + _, err = helper.Client(t).Authz.GetRoles(authz.NewGetRolesParams(), helper.CreateAuth(readonlyKey)) + require.NoError(t, err) + + _, err = helper.Client(t).Authz.CreateRole(authz.NewCreateRoleParams().WithBody(testRole), helper.CreateAuth(readonlyKey)) + require.Error(t, err) + + // if you are not admin or readonly you also cannot do anything + _, err = helper.Client(t).Authz.GetRoles(authz.NewGetRolesParams(), helper.CreateAuth(customKey)) + require.NoError(t, err) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/aliases_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/aliases_test.go new file mode 100644 index 0000000000000000000000000000000000000000..8042a8d3f0036e14fc27302b72db8deda9fdc89b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/aliases_test.go @@ -0,0 +1,100 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/articles" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestAuthzAliases(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + + customUser := "custom-user" + customKey := "custom-key" + limitedUser := "limited-user" + limitedKey := "limited-key" + customRole := "custom" + onlyAliasesStartingWithAliasRole := "only-aliases" + + _, down := composeUp(t, + map[string]string{adminUser: adminKey}, + map[string]string{customUser: customKey, limitedUser: limitedKey}, + nil, + ) + defer down() + + clsA := articles.ArticlesClass() + + helper.CreateClassAuth(t, articles.ParagraphsClass(), adminKey) + helper.CreateClassAuth(t, clsA, adminKey) + helper.CreateObjectsBatchAuth(t, []*models.Object{articles.NewArticle().WithTitle("article1").Object()}, adminKey) + + alias1 := "AliasParagraph1" + alias2 := "AliasParagraph2" + alias3 := "AliasParagraph3" + otherAlias1 := "OtherAliasForParagraph1" + + for _, alias := range []string{alias1, alias2, alias3, otherAlias1} { + helper.CreateAliasAuth(t, &models.Alias{Alias: alias, Class: articles.ParagraphsClass().Class}, adminKey) + } + + // make custom role with read_aliases to read all aliases + helper.CreateRole(t, adminKey, &models.Role{Name: &customRole, Permissions: []*models.Permission{ + helper.NewAliasesPermission().WithAction(authorization.ReadAliases).Permission(), + }}) + + // create a role that is able to list aliases starting with Alias* + helper.CreateRole(t, adminKey, &models.Role{Name: &onlyAliasesStartingWithAliasRole, Permissions: []*models.Permission{ + helper.NewAliasesPermission().WithAction(authorization.ReadAliases).WithAlias("Alias*").Permission(), + }}) + + t.Run("fail to get aliaes without minimal read_aliases", func(t *testing.T) { + resp, err := helper.Client(t).Schema.AliasesGet(schema.NewAliasesGetParams(), helper.CreateAuth(customKey)) + require.Nil(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.Payload) + assert.Len(t, resp.Payload.Aliases, 0) + }) + + t.Run("assign custom role to custom user", func(t *testing.T) { + helper.AssignRoleToUser(t, adminKey, customRole, customUser) + }) + + t.Run("get minimal aliases with read_aliases", func(t *testing.T) { + resp, err := helper.Client(t).Schema.AliasesGet(schema.NewAliasesGetParams(), helper.CreateAuth(customKey)) + require.Nil(t, err) + require.Len(t, resp.Payload.Aliases, 4) + }) + + t.Run("assign only aliases role to limited user and try to get aliases", func(t *testing.T) { + helper.AssignRoleToUser(t, adminKey, onlyAliasesStartingWithAliasRole, limitedUser) + resp, err := helper.Client(t).Schema.AliasesGet(schema.NewAliasesGetParams(), helper.CreateAuth(limitedKey)) + require.Nil(t, err) + require.Len(t, resp.Payload.Aliases, 3) + }) + + t.Run("to get one alias with onlyAliasesStartingWithAliasRole role", func(t *testing.T) { + params := schema.NewAliasesGetAliasParams().WithAliasName(alias1) + resp, err := helper.Client(t).Schema.AliasesGetAlias(params, helper.CreateAuth(limitedKey)) + require.Nil(t, err) + require.NotNil(t, resp.Payload) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/authz_deprecated_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/authz_deprecated_test.go new file mode 100644 index 0000000000000000000000000000000000000000..62cdaae84130864f6e93a1ddbb7174625c9676db --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/authz_deprecated_test.go @@ -0,0 +1,147 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "context" + "testing" + + "github.com/weaviate/weaviate/test/docker" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/authz" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestDeprecatedEndpoints(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + clientAuth := helper.CreateAuth(adminKey) + + customUser := "custom-user" + customKey := "custom-key" + + ctx := context.Background() + + // enable OIDC to be able to assign to db and oidc separately + compose, err := docker.New(). + WithWeaviate().WithMockOIDC().WithRBAC().WithRbacRoots(adminUser). + WithApiKey().WithUserApiKey(customUser, customKey).WithUserApiKey(adminUser, adminKey). + Start(ctx) + require.NoError(t, err) + defer func() { + require.NoError(t, compose.Terminate(ctx)) + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + defer helper.ResetClient() + + testRoleName := "testRole1" + createCollectionsAction := authorization.CreateCollections + all := "*" + + testRole1 := &models.Role{ + Name: &testRoleName, + Permissions: []*models.Permission{{ + Action: &createCollectionsAction, + Collections: &models.PermissionCollections{Collection: &all}, + }}, + } + + // assign without usertype should assign to OIDC as well as db user + t.Run("assign role to user", func(t *testing.T) { + helper.DeleteRole(t, adminKey, testRoleName) + helper.CreateRole(t, adminKey, testRole1) + defer helper.DeleteRole(t, adminKey, testRoleName) + + _, err := helper.Client(t).Authz.AssignRoleToUser( + authz.NewAssignRoleToUserParams().WithID(customUser).WithBody(authz.AssignRoleToUserBody{Roles: []string{testRoleName}}), + clientAuth, + ) + require.NoError(t, err) + + RolesDbUser := helper.GetRolesForUser(t, customUser, adminKey, true) + require.Len(t, RolesDbUser, 1) + require.Equal(t, testRoleName, *RolesDbUser[0].Name) + }) + + // revoke without usertype should revoke from, OIDC as well as db user + t.Run("revoke role from user", func(t *testing.T) { + helper.DeleteRole(t, adminKey, testRoleName) + helper.CreateRole(t, adminKey, testRole1) + defer helper.DeleteRole(t, adminKey, testRoleName) + + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + helper.AssignRoleToUserOIDC(t, adminKey, testRoleName, customUser) + + RolesDbUser := helper.GetRolesForUser(t, customUser, adminKey, true) + require.Len(t, RolesDbUser, 1) + require.Equal(t, testRoleName, *RolesDbUser[0].Name) + + RolesOIDCUser := helper.GetRolesForUserOIDC(t, customUser, adminKey) + require.Len(t, RolesOIDCUser, 1) + require.Equal(t, testRoleName, *RolesOIDCUser[0].Name) + + _, err := helper.Client(t).Authz.RevokeRoleFromUser( + authz.NewRevokeRoleFromUserParams().WithID(customUser).WithBody(authz.RevokeRoleFromUserBody{Roles: []string{testRoleName}}), + clientAuth, + ) + require.NoError(t, err) + }) + + t.Run("get role for User and user for role", func(t *testing.T) { + helper.DeleteRole(t, adminKey, testRoleName) + helper.CreateRole(t, adminKey, testRole1) + defer helper.DeleteRole(t, adminKey, testRoleName) + + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + + resp, err := helper.Client(t).Authz.GetRolesForUserDeprecated(authz.NewGetRolesForUserDeprecatedParams().WithID(customUser), clientAuth) + require.NoError(t, err) + require.NotNil(t, resp) + require.Len(t, resp.Payload, 1) + require.Equal(t, testRoleName, *resp.Payload[0].Name) + + res, err := helper.Client(t).Authz.GetUsersForRoleDeprecated(authz.NewGetUsersForRoleDeprecatedParams().WithID(testRoleName), clientAuth) + require.Nil(t, err) + require.Equal(t, 1, len(res.Payload)) + require.Equal(t, customUser, res.Payload[0]) + + // no duplicates after also assigning to OIDC + helper.AssignRoleToUserOIDC(t, adminKey, testRoleName, customUser) + resp, err = helper.Client(t).Authz.GetRolesForUserDeprecated(authz.NewGetRolesForUserDeprecatedParams().WithID(customUser), clientAuth) + require.NoError(t, err) + require.NotNil(t, resp) + require.Len(t, resp.Payload, 1) + require.Equal(t, testRoleName, *resp.Payload[0].Name) + + res, err = helper.Client(t).Authz.GetUsersForRoleDeprecated(authz.NewGetUsersForRoleDeprecatedParams().WithID(testRoleName), clientAuth) + require.Nil(t, err) + require.Equal(t, 1, len(res.Payload)) + require.Equal(t, customUser, res.Payload[0]) + + // remove from DB user, OIDC still has role + helper.RevokeRoleFromUser(t, adminKey, testRoleName, customUser) + resp, err = helper.Client(t).Authz.GetRolesForUserDeprecated(authz.NewGetRolesForUserDeprecatedParams().WithID(customUser), clientAuth) + require.NoError(t, err) + require.NotNil(t, resp) + require.Len(t, resp.Payload, 1) + require.Equal(t, testRoleName, *resp.Payload[0].Name) + + res, err = helper.Client(t).Authz.GetUsersForRoleDeprecated(authz.NewGetUsersForRoleDeprecatedParams().WithID(testRoleName), clientAuth) + require.Nil(t, err) + require.Equal(t, 1, len(res.Payload)) + require.Equal(t, customUser, res.Payload[0]) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/auto_tenancy_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/auto_tenancy_test.go new file mode 100644 index 0000000000000000000000000000000000000000..4a049995766f4f08615ca3a432b4a41f83bca262 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/auto_tenancy_test.go @@ -0,0 +1,384 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "context" + "errors" + "fmt" + "testing" + "time" + + grpchelper "github.com/weaviate/weaviate/test/helper/grpc" + + "github.com/stretchr/testify/require" + "google.golang.org/grpc/metadata" + + "github.com/weaviate/weaviate/client/authz" + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/grpc/generated/protocol/v1" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/articles" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestAuthzAutoTenantActivationRBAC(t *testing.T) { + existingUser := "admin-user" + existingKey := "admin-key" + + customUser := "custom-user" + customKey := "custom-key" + + testRoleName := "test-role" + + adminAuth := helper.CreateAuth(existingKey) + + _, teardown := composeUp(t, map[string]string{existingUser: existingKey}, map[string]string{customUser: customKey}, nil) + + cls := articles.ParagraphsClass() + tenant := "tenant" + obj := articles.NewParagraph().WithID(UUID1).WithTenant(tenant).Object() + obj2 := articles.NewParagraph().WithID(UUID2).WithTenant(tenant).Object() + + defer func() { + helper.DeleteClassWithAuthz(t, cls.Class, adminAuth) + helper.DeleteRole(t, existingKey, testRoleName) + teardown() + }() + + deactivateTenant := func(t *testing.T) { + helper.UpdateTenantsWithAuthz(t, cls.Class, []*models.Tenant{{Name: obj.Tenant, ActivityStatus: models.TenantActivityStatusCOLD}}, adminAuth) + } + + cls.MultiTenancyConfig = &models.MultiTenancyConfig{ + Enabled: true, + AutoTenantActivation: true, + AutoTenantCreation: false, + } + helper.CreateClassAuth(t, cls, existingKey) + helper.CreateTenantsAuth(t, cls.Class, []*models.Tenant{{Name: obj.Tenant, ActivityStatus: models.TenantActivityStatusHOT}}, existingKey) + helper.CreateObjectAuth(t, obj2, existingKey) + deactivateTenant(t) + + t.Run("create and assign role that can CRUD objects in and read schema of tenant of collection", func(t *testing.T) { + helper.CreateRole(t, existingKey, &models.Role{ + Name: String(testRoleName), + Permissions: []*models.Permission{ + helper.NewDataPermission().WithAction(authorization.CreateData).WithCollection(cls.Class).Permission(), + helper.NewDataPermission().WithAction(authorization.ReadData).WithCollection(cls.Class).Permission(), + helper.NewDataPermission().WithAction(authorization.UpdateData).WithCollection(cls.Class).Permission(), + helper.NewDataPermission().WithAction(authorization.DeleteData).WithCollection(cls.Class).Permission(), + helper.NewCollectionsPermission().WithAction(authorization.ReadCollections).WithCollection("*").Permission(), // all needed for gql search + }, + }) + helper.AssignRoleToUser(t, existingKey, testRoleName, customUser) + }) + + t.Run("successfully create object in tenant", func(t *testing.T) { + defer deactivateTenant(t) + err := helper.CreateObjectAuth(t, obj, customKey) + helper.AssertRequestOk(t, nil, err, nil) + }) + + t.Run("successfully get object in tenant", func(t *testing.T) { + defer deactivateTenant(t) + _, err := getObject(t, obj.Class, obj.ID, &obj.Tenant, customKey) + helper.AssertRequestOk(t, nil, err, nil) + }) + + t.Run("successfully update object in tenant", func(t *testing.T) { + defer deactivateTenant(t) + obj.Properties = map[string]string{"contents": "updated"} + _, err := updateObject(t, obj, customKey) + helper.AssertRequestOk(t, nil, err, nil) + }) + + t.Run("successfully search (Get) with gql in tenant", func(t *testing.T) { + defer deactivateTenant(t) + res, err := queryGQL(t, fmt.Sprintf(`{Get{%s(tenant:%q){_additional{id}}}}`, cls.Class, obj.Tenant), customKey) + require.Nil(t, err) + require.NotNil(t, res) + require.NotEmpty(t, res.GetPayload().Data) + require.Empty(t, res.GetPayload().Errors) + }) + + t.Run("successfully search (Aggregate) with gql in tenant", func(t *testing.T) { + defer deactivateTenant(t) + res, err := queryGQL(t, fmt.Sprintf(`{Aggregate{%s(tenant:%q){meta{count}}}}`, cls.Class, obj.Tenant), customKey) + require.Nil(t, err) + require.NotNil(t, res) + require.NotEmpty(t, res.GetPayload().Data) + require.Empty(t, res.GetPayload().Errors) + }) + + t.Run("successfully search (Get) with grpc in tenant", func(t *testing.T) { + defer deactivateTenant(t) + ctx := metadata.AppendToOutgoingContext(context.Background(), "authorization", fmt.Sprintf("Bearer %s", customKey)) + resp, err := helper.ClientGRPC(t).Search(ctx, &protocol.SearchRequest{ + Collection: cls.Class, + Tenant: tenant, + }) + require.Nil(t, err) + require.NotNil(t, resp) + }) + + t.Run("successfully search (Aggregate) with grpc in tenant", func(t *testing.T) { + defer deactivateTenant(t) + ctx := metadata.AppendToOutgoingContext(context.Background(), "authorization", fmt.Sprintf("Bearer %s", customKey)) + resp, err := helper.ClientGRPC(t).Aggregate(ctx, &protocol.AggregateRequest{ + Collection: cls.Class, + Tenant: tenant, + ObjectsCount: true, + }) + require.Nil(t, err) + require.NotNil(t, resp) + }) + + t.Run("successfully delete object in tenant", func(t *testing.T) { + defer deactivateTenant(t) + _, err := deleteObject(t, obj.Class, obj.ID, &obj.Tenant, customKey) + helper.AssertRequestOk(t, nil, err, nil) + }) +} + +func TestAuthzAutoTenantActivationAdminList(t *testing.T) { + adminKey := "admin-key" + adminUser := "admin-user" + readonlyKey := "viewer-key" + readonlyUser := "viewer-user" + adminAuth := helper.CreateAuth(adminKey) + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + + builder := docker.New().WithWeaviateWithGRPC(). + WithApiKey().WithUserApiKey(adminUser, adminKey).WithUserApiKey(readonlyUser, readonlyKey). + WithAdminListAdmins(adminUser).WithAdminListUsers(readonlyUser).WithApiKey() + compose, err := builder.Start(ctx) + require.Nil(t, err) + + helper.SetupClient(compose.GetWeaviate().URI()) + helper.SetupGRPCClient(t, compose.GetWeaviate().GrpcURI()) + + teardown := func() { + helper.ResetClient() + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %v", err) + } + cancel() + } + + cls := articles.ParagraphsClass() + tenant := "tenant" + obj1 := articles.NewParagraph().WithID(UUID1).WithTenant(tenant).Object() + obj2 := articles.NewParagraph().WithID(UUID2).WithTenant(tenant).Object() + obj3 := articles.NewParagraph().WithID(UUID3).WithTenant("tenant2").Object() + obj4 := articles.NewParagraph().WithID(UUID4).WithTenant("tenant3").Object() + + defer func() { + helper.DeleteClassWithAuthz(t, cls.Class, adminAuth) + teardown() + }() + + deactivateTenants := func(t *testing.T) { + for _, obj := range []*models.Object{obj1, obj2, obj3, obj4} { + helper.UpdateTenantsWithAuthz(t, cls.Class, []*models.Tenant{{Name: obj.Tenant, ActivityStatus: models.TenantActivityStatusCOLD}}, adminAuth) + } + } + + cls.MultiTenancyConfig = &models.MultiTenancyConfig{ + Enabled: true, + AutoTenantActivation: true, + AutoTenantCreation: false, + } + helper.DeleteClassWithAuthz(t, cls.Class, adminAuth) + helper.CreateClassAuth(t, cls, adminKey) + helper.CreateTenantsAuth(t, cls.Class, []*models.Tenant{{Name: obj1.Tenant, ActivityStatus: models.TenantActivityStatusHOT}}, adminKey) + helper.CreateTenantsAuth(t, cls.Class, []*models.Tenant{{Name: obj3.Tenant, ActivityStatus: models.TenantActivityStatusHOT}}, adminKey) + helper.CreateTenantsAuth(t, cls.Class, []*models.Tenant{{Name: obj4.Tenant, ActivityStatus: models.TenantActivityStatusHOT}}, adminKey) + helper.CreateObjectAuth(t, obj2, adminKey) + deactivateTenants(t) + + t.Run("successfully create object in tenant as admin", func(t *testing.T) { + defer deactivateTenants(t) + err := helper.CreateObjectAuth(t, obj1, adminKey) + helper.AssertRequestOk(t, nil, err, nil) + }) + + t.Run("successfully update object in tenant as admin", func(t *testing.T) { + defer deactivateTenants(t) + obj1.Properties = map[string]string{"contents": "updated"} + _, err := updateObject(t, obj1, adminKey) + helper.AssertRequestOk(t, nil, err, nil) + }) + + t.Run("successfully batch-create object in tenant as admin", func(t *testing.T) { + defer deactivateTenants(t) + helper.CreateObjectsBatchAuth(t, []*models.Object{obj3}, adminKey) + }) + + t.Run("successfully GRPC batch-create object in tenant as admin", func(t *testing.T) { + defer deactivateTenants(t) + batchReply, err := grpchelper.BatchGRPCWithTenantAuth(t, []*models.Object{obj4}, adminKey) + require.NoError(t, err) + require.Len(t, batchReply.Errors, 0) + }) + + t.Run("fail to update object in tenant as read-only", func(t *testing.T) { + defer deactivateTenants(t) + obj1.Properties = map[string]string{"contents": "updated"} + _, err := updateObject(t, obj1, readonlyKey) + helper.AssertRequestFail(t, nil, err, nil) + }) + + t.Run("fail to delete object in tenant as read-only", func(t *testing.T) { + defer deactivateTenants(t) + _, err := deleteObject(t, obj1.Class, obj1.ID, &obj1.Tenant, readonlyKey) + helper.AssertRequestFail(t, nil, err, nil) + }) + + tests := []struct { + name string + key string + }{ + {"admin", adminKey}, + {"read-only", readonlyKey}, + } + + for _, tt := range tests { + t.Run(fmt.Sprintf("successfully get object in tenant as %s", tt.name), func(t *testing.T) { + defer deactivateTenants(t) + _, err := getObject(t, obj1.Class, obj1.ID, &obj1.Tenant, tt.key) + helper.AssertRequestOk(t, nil, err, nil) + }) + + t.Run(fmt.Sprintf("successfully search (Get) with gql in tenant as %s", tt.name), func(t *testing.T) { + defer deactivateTenants(t) + res, err := queryGQL(t, fmt.Sprintf(`{Get{%s(tenant:%q){_additional{id}}}}`, cls.Class, obj1.Tenant), tt.key) + require.Nil(t, err) + require.NotNil(t, res) + require.NotEmpty(t, res.GetPayload().Data) + require.Empty(t, res.GetPayload().Errors) + }) + + t.Run(fmt.Sprintf("successfully search (Aggregate) with gql in tenant as %s", tt.name), func(t *testing.T) { + defer deactivateTenants(t) + res, err := queryGQL(t, fmt.Sprintf(`{Aggregate{%s(tenant:%q){meta{count}}}}`, cls.Class, obj1.Tenant), tt.key) + require.Nil(t, err) + require.NotNil(t, res) + require.NotEmpty(t, res.GetPayload().Data) + require.Empty(t, res.GetPayload().Errors) + }) + + t.Run(fmt.Sprintf("successfully search (Get) with grpc in tenant as %s", tt.name), func(t *testing.T) { + defer deactivateTenants(t) + ctx := metadata.AppendToOutgoingContext(context.Background(), "authorization", fmt.Sprintf("Bearer %s", tt.key)) + resp, err := helper.ClientGRPC(t).Search(ctx, &protocol.SearchRequest{ + Collection: cls.Class, + Tenant: tenant, + }) + require.Nil(t, err) + require.NotNil(t, resp) + }) + + t.Run(fmt.Sprintf("successfully search (Aggregate) with grpc in tenant as %s", tt.name), func(t *testing.T) { + defer deactivateTenants(t) + ctx := metadata.AppendToOutgoingContext(context.Background(), "authorization", fmt.Sprintf("Bearer %s", tt.key)) + resp, err := helper.ClientGRPC(t).Aggregate(ctx, &protocol.AggregateRequest{ + Collection: cls.Class, + Tenant: tenant, + ObjectsCount: true, + }) + require.Nil(t, err) + require.NotNil(t, resp) + }) + } + + t.Run("successfully delete object in tenant as admin", func(t *testing.T) { + defer deactivateTenants(t) + _, err := deleteObject(t, obj1.Class, obj1.ID, &obj1.Tenant, adminKey) + helper.AssertRequestOk(t, nil, err, nil) + }) + + t.Run("fail to create object in tenant as read-only", func(t *testing.T) { + defer deactivateTenants(t) + err := helper.CreateObjectAuth(t, obj1, readonlyKey) + helper.AssertRequestFail(t, nil, err, nil) + }) +} + +func TestAuthzAutoTenantCreation(t *testing.T) { + existingUser := "existing-user" + existingKey := "existing-key" + + customUser := "custom-user" + customKey := "custom-key" + + testRoleName := "test-role" + + adminAuth := helper.CreateAuth(existingKey) + + _, teardown := composeUp(t, map[string]string{existingUser: existingKey}, map[string]string{customUser: customKey}, nil) + + cls := articles.ParagraphsClass() + tenant := "tenant" + obj := articles.NewParagraph().WithID("00000000-0000-0000-0000-000000000001").WithTenant(tenant).Object() + + defer func() { + helper.DeleteClassWithAuthz(t, cls.Class, adminAuth) + helper.DeleteRole(t, existingKey, testRoleName) + teardown() + }() + + t.Run("setup", func(*testing.T) { + cls.MultiTenancyConfig = &models.MultiTenancyConfig{ + Enabled: true, + AutoTenantActivation: false, + AutoTenantCreation: true, + } + helper.CreateClassAuth(t, cls, existingKey) + }) + + t.Run("create and assign role that can create objects in and read schema of tenant of collection", func(t *testing.T) { + helper.CreateRole(t, existingKey, &models.Role{ + Name: String(testRoleName), + Permissions: []*models.Permission{ + helper.NewDataPermission().WithAction(authorization.CreateData).WithCollection(cls.Class).Permission(), + helper.NewCollectionsPermission().WithAction(authorization.ReadCollections).WithCollection(cls.Class).Permission(), + }, + }) + helper.AssignRoleToUser(t, existingKey, testRoleName, customUser) + }) + + t.Run("fail with 403 when trying to create an object in a non-existent tenant due to lacking authorization.CreateCollections for autoTenantCreation", func(t *testing.T) { + err := helper.CreateObjectAuth(t, obj, customKey) + require.NotNil(t, err) + var parsed *objects.ObjectsCreateForbidden + require.True(t, errors.As(err, &parsed)) + require.Contains(t, parsed.Payload.Error[0].Message, "forbidden") + }) + + t.Run("add permission allowing to create tenants of collection", func(t *testing.T) { + _, err := helper.Client(t).Authz.AddPermissions(authz.NewAddPermissionsParams().WithID(testRoleName).WithBody(authz.AddPermissionsBody{ + Permissions: []*models.Permission{ + helper.NewTenantsPermission().WithAction(authorization.CreateTenants).WithCollection(cls.Class).WithTenant(obj.Tenant).Permission(), + }, + }), adminAuth) + require.Nil(t, err) + }) + + t.Run("successfully create object in tenant after adding permission for autoTenantCreation", func(t *testing.T) { + err := helper.CreateObjectAuth(t, obj, customKey) + helper.AssertRequestOk(t, nil, err, nil) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/autoschema_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/autoschema_test.go new file mode 100644 index 0000000000000000000000000000000000000000..cfddce2fd5dc6b8fbf229633b8c25ced5775ff04 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/autoschema_test.go @@ -0,0 +1,201 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "context" + "errors" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/client/authz" + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestAutoschemaAuthZ(t *testing.T) { + customUser := "custom-user" + customKey := "custom-key" + adminKey := "admin-key" + adminUser := "admin-user" + adminAuth := helper.CreateAuth(adminKey) + + createDataAction := authorization.CreateData + updateSchemaAction := authorization.UpdateCollections + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + defer cancel() + + compose, err := docker. + New(). + WithWeaviate(). + WithApiKey().WithUserApiKey(adminUser, adminKey).WithUserApiKey(customUser, customKey). + WithRBAC().WithRbacRoots(adminUser). + WithAutoschema(). + Start(ctx) + + require.Nil(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %v", err) + } + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + defer helper.ResetClient() + + className := "Class" + classNameNew := "ClassNew" + deleteObjectClass(t, className, adminAuth) + deleteObjectClass(t, classNameNew, adminAuth) + c := &models.Class{ + Class: className, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + helper.CreateClassAuth(t, c, adminKey) + + // user needs to be able to create objects and read the configs + all := "*" + readSchemaAndCreateDataRoleName := "readSchemaAndCreateData" + readSchemaRole := &models.Role{ + Name: &readSchemaAndCreateDataRoleName, + Permissions: []*models.Permission{ + {Action: &authorization.ReadCollections, Collections: &models.PermissionCollections{Collection: &all}}, + {Action: &createDataAction, Data: &models.PermissionData{Collection: &all}}, + }, + } + + // create roles to assign to user later: + updateSchemaRoleName := "updateSchema" + updateSchemaRole := &models.Role{ + Name: &updateSchemaRoleName, + Permissions: []*models.Permission{{Action: &updateSchemaAction, Collections: &models.PermissionCollections{Collection: &className}}}, + } + createSchemaRoleName := "createSchema" + createSchemaRole := &models.Role{ + Name: &createSchemaRoleName, + Permissions: []*models.Permission{ + {Action: &authorization.CreateCollections, Collections: &models.PermissionCollections{Collection: &classNameNew}}, + }, + } + + helper.DeleteRole(t, adminKey, *readSchemaRole.Name) + helper.DeleteRole(t, adminKey, *createSchemaRole.Name) + helper.DeleteRole(t, adminKey, *updateSchemaRole.Name) + helper.CreateRole(t, adminKey, readSchemaRole) + helper.CreateRole(t, adminKey, updateSchemaRole) + helper.CreateRole(t, adminKey, createSchemaRole) + defer helper.DeleteRole(t, adminKey, *readSchemaRole.Name) + defer helper.DeleteRole(t, adminKey, *updateSchemaRole.Name) + defer helper.DeleteRole(t, adminKey, *createSchemaRole.Name) + + // all tests need read schema + helper.AssignRoleToUser(t, adminKey, readSchemaAndCreateDataRoleName, customUser) + + t.Run("Only read rights for schema", func(t *testing.T) { + // object which does NOT introduce a new prop => no failure + _, err = createObject(t, &models.Object{ + ID: UUID1, + Class: className, + Properties: map[string]interface{}{"name": "prop"}, + Tenant: "", + }, customKey) + require.NoError(t, err) + + // object which does introduce a new prop => failure + _, err = createObject(t, &models.Object{ + ID: UUID2, + Class: className, + Properties: map[string]interface{}{"other": "prop"}, + Tenant: "", + }, customKey) + require.Error(t, err) + + var batchObjectsDeleteUnauthorized *objects.ObjectsCreateForbidden + require.True(t, errors.As(err, &batchObjectsDeleteUnauthorized)) + }) + + t.Run("read and update rights for schema", func(t *testing.T) { + _, err := helper.Client(t).Authz.AssignRoleToUser( + authz.NewAssignRoleToUserParams().WithID(customUser).WithBody(authz.AssignRoleToUserBody{Roles: []string{updateSchemaRoleName}}), + adminAuth, + ) + require.NoError(t, err) + + // object which does NOT introduce a new prop => no failure + _, err = createObject(t, &models.Object{ + ID: UUID2, + Class: className, + Properties: map[string]interface{}{"name": "prop"}, + Tenant: "", + }, customKey) + require.NoError(t, err) + + // object which does introduce a new prop => also no failure + _, err = createObject(t, &models.Object{ + ID: UUID3, + Class: className, + Properties: map[string]interface{}{"different": "prop"}, + Tenant: "", + }, customKey) + require.NoError(t, err) + + // object which does introduce a new class => failure + _, err = createObject(t, &models.Object{ + ID: UUID4, + Class: classNameNew, + Properties: map[string]interface{}{"different": "prop"}, + Tenant: "", + }, customKey) + require.Error(t, err) + + var batchObjectsDeleteUnauthorized *objects.ObjectsCreateForbidden + require.True(t, errors.As(err, &batchObjectsDeleteUnauthorized)) + }) + + t.Run("create rights for schema", func(t *testing.T) { + _, err := helper.Client(t).Authz.AssignRoleToUser( + authz.NewAssignRoleToUserParams().WithID(customUser).WithBody(authz.AssignRoleToUserBody{Roles: []string{updateSchemaRoleName, createSchemaRoleName}}), + adminAuth, + ) + require.NoError(t, err) + + // object which does NOT introduce a new class + _, err = createObject(t, &models.Object{ + ID: UUID5, + Class: className, + Properties: map[string]interface{}{"name": "prop"}, + Tenant: "", + }, customKey) + require.NoError(t, err) + + // object which does introduce a new class + _, err = createObject(t, &models.Object{ + ID: UUID6, + Class: classNameNew, + Properties: map[string]interface{}{"different": "prop"}, + Tenant: "", + }, customKey) + require.NoError(t, err) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/backup_restore_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/backup_restore_test.go new file mode 100644 index 0000000000000000000000000000000000000000..cc121b0bc71bb9f1f4519b646b5793fbe79fbade --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/backup_restore_test.go @@ -0,0 +1,176 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/client/authz" + "github.com/weaviate/weaviate/client/backups" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/articles" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +const ( + envS3UseSSL = "BACKUP_S3_USE_SSL" + s3BackupJourneyClassName = "S3Backup" + s3BackupJourneyBackupIDSingleNode = "s3-backup-single-node" + s3BackupJourneyBackupIDCluster = "s3-backup-cluster" + s3BackupJourneyRegion = "eu-west-1" + s3BackupJourneyAccessKey = "aws_access_key" + s3BackupJourneySecretKey = "aws_secret_key" +) + +func TestBackupAndRestoreRBAC(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + + customUser := "custom-user" + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + defer cancel() + + compose, err := docker. + New(). + WithWeaviate(). + WithApiKey().WithUserApiKey(adminUser, adminKey).WithUserApiKey(customUser, "custom-key"). + WithRBAC().WithRbacRoots(adminUser).WithDbUsers(). + WithBackendS3("bucket", s3BackupJourneyRegion). + WithWeaviateCluster(3). + Start(ctx) + require.Nil(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %v", err) + } + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + defer helper.ResetClient() + + backend := "s3" + testRoleName := "testRole" + testCollectionName := "TestCollection" + + // one class is needed for backup + par := articles.ParagraphsClass() + + testRole := &models.Role{ + Name: String(testRoleName), + Permissions: []*models.Permission{ + {Action: String(authorization.ReadRoles), Backups: &models.PermissionBackups{Collection: String(testCollectionName)}}, + }, + } + + t.Run("Backup and full restore", func(t *testing.T) { + backupID := "backup-1" + + helper.DeleteClassWithAuthz(t, par.Class, helper.CreateAuth(adminKey)) + helper.CreateClassAuth(t, par, adminKey) + defer helper.DeleteClassWithAuthz(t, par.Class, helper.CreateAuth(adminKey)) + + helper.DeleteRole(t, adminKey, testRoleName) + helper.CreateRole(t, adminKey, testRole) + defer helper.DeleteRole(t, adminKey, testRoleName) + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + + resp, err := helper.CreateBackupWithAuthz(t, helper.DefaultBackupConfig(), par.Class, backend, backupID, helper.CreateAuth(adminKey)) + require.Nil(t, err) + require.NotNil(t, resp.Payload) + require.Equal(t, "", resp.Payload.Error) + + helper.ExpectBackupEventuallyCreated(t, backupID, backend, helper.CreateAuth(adminKey)) + + // delete role and assignment + helper.DeleteRole(t, adminKey, testRoleName) + helper.DeleteClassWithAuthz(t, par.Class, helper.CreateAuth(adminKey)) + + all := "all" + restoreConf := helper.DefaultRestoreConfig() + restoreConf.RolesOptions = &all + respR, err := helper.RestoreBackupWithAuthz(t, restoreConf, par.Class, backend, backupID, map[string]string{}, helper.CreateAuth(adminKey)) + require.Nil(t, err) + require.NotNil(t, respR.Payload) + require.Equal(t, "", respR.Payload.Error) + + helper.ExpectBackupEventuallyRestored(t, backupID, backend, helper.CreateAuth(adminKey)) + + role := helper.GetRoleByName(t, adminKey, testRoleName) + require.NotNil(t, role) + require.Equal(t, *role.Name, testRoleName) + + user := helper.GetUser(t, customUser, adminKey) + require.NotNil(t, user) + require.Equal(t, *user.UserID, customUser) + require.Equal(t, user.Roles[0], testRoleName) + + roles := helper.GetRolesForUser(t, customUser, adminKey, false) + require.Len(t, roles, 1) + require.Equal(t, *roles[0].Name, testRoleName) + }) + + time.Sleep(2 * time.Second) // wait for the backup to be fully processed + + t.Run("Backup and restore without roles", func(t *testing.T) { + backupID := "backup-2" + + helper.DeleteClassWithAuthz(t, par.Class, helper.CreateAuth(adminKey)) + helper.CreateClassAuth(t, par, adminKey) + defer helper.DeleteClassWithAuthz(t, par.Class, helper.CreateAuth(adminKey)) + + helper.DeleteRole(t, adminKey, testRoleName) + helper.CreateRole(t, adminKey, testRole) + defer helper.DeleteRole(t, adminKey, testRoleName) + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + + resp, err := helper.CreateBackupWithAuthz(t, helper.DefaultBackupConfig(), par.Class, backend, backupID, helper.CreateAuth(adminKey)) + require.Nil(t, err) + require.NotNil(t, resp.Payload) + require.Equal(t, "", resp.Payload.Error) + + helper.ExpectBackupEventuallyCreated(t, backupID, backend, helper.CreateAuth(adminKey), helper.WithPollInterval(helper.MinPollInterval), helper.WithDeadline(helper.MaxDeadline)) + + // delete role and assignment + helper.DeleteRole(t, adminKey, testRoleName) + helper.DeleteClassWithAuthz(t, par.Class, helper.CreateAuth(adminKey)) + + noRestore := "noRestore" + restoreConf := helper.DefaultRestoreConfig() + restoreConf.RolesOptions = &noRestore + respR, err := helper.RestoreBackupWithAuthz(t, restoreConf, par.Class, backend, backupID, map[string]string{}, helper.CreateAuth(adminKey)) + if err != nil { + n := err.(interface{}) + e := n.(*backups.BackupsRestoreUnprocessableEntity) + fmt.Printf("Full error restoring backup: %+v | %+v | %+v \n", e.GetPayload().Error, e.GetPayload().Error, e.Payload) + } + require.Nil(t, err) + require.NotNil(t, respR.Payload) + require.Equal(t, "", respR.Payload.Error) + + helper.ExpectBackupEventuallyRestored(t, backupID, backend, helper.CreateAuth(adminKey)) + + respRole, err := helper.Client(t).Authz.GetRole(authz.NewGetRoleParams().WithID(testRoleName), helper.CreateAuth(adminKey)) + require.Nil(t, respRole) + require.Error(t, err) + + roles := helper.GetRolesForUser(t, customUser, adminKey, false) + require.Len(t, roles, 0) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/backups_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/backups_test.go new file mode 100644 index 0000000000000000000000000000000000000000..b1335ddb67fd924df066d63652a347758e0715ea --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/backups_test.go @@ -0,0 +1,186 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "context" + "errors" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/client/backups" + "github.com/weaviate/weaviate/entities/backup" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/articles" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestAuthZBackupsManageJourney(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + + viewerUser := "viewer-user" + viewerKey := "viewer-key" + + customUser := "custom-user" + customKey := "custom-key" + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + defer cancel() + + compose, err := docker. + New(). + WithWeaviate(). + WithApiKey().WithUserApiKey(adminUser, adminKey).WithUserApiKey(customUser, customKey).WithUserApiKey(viewerUser, viewerKey). + WithRBAC().WithRbacRoots(adminUser).WithRbacViewers(viewerUser). + WithBackendFilesystem(). + Start(ctx) + require.Nil(t, err) + + require.Nil(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %v", err) + } + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + defer helper.ResetClient() + + backend := "filesystem" + backupID := "backup-1" + testRoleName := "test-role" + + clsA := articles.ArticlesClass() + clsP := articles.ParagraphsClass() + objA := articles.NewArticle().WithTitle("Programming 101") + objP := articles.NewParagraph().WithContents("hello world") + + // cleanup + deleteObjectClass(t, clsA.Class, helper.CreateAuth(adminKey)) + deleteObjectClass(t, clsP.Class, helper.CreateAuth(adminKey)) + helper.DeleteRole(t, adminKey, testRoleName) + + helper.CreateClassAuth(t, clsP, adminKey) + helper.CreateClassAuth(t, clsA, adminKey) + helper.CreateObjectsBatchAuth(t, []*models.Object{objA.Object(), objP.Object()}, adminKey) + + t.Run("create and assign a role that does have the manage_backups permission", func(t *testing.T) { + helper.CreateRole(t, adminKey, &models.Role{ + Name: String(testRoleName), + Permissions: []*models.Permission{ + {Action: String(authorization.ReadRoles), Backups: &models.PermissionBackups{Collection: String("IDoNotExist")}}, + }, + }) + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + }) + + t.Run("viewer cannot create a backup", func(t *testing.T) { + _, err := helper.CreateBackupWithAuthz(t, helper.DefaultBackupConfig(), clsA.Class, backend, backupID, helper.CreateAuth(viewerKey)) + require.NotNil(t, err) + var parsed *backups.BackupsCreateForbidden + require.True(t, errors.As(err, &parsed)) + require.Contains(t, parsed.Payload.Error[0].Message, "forbidden") + }) + + t.Run("fail to create a backup due to missing manage_backups action", func(t *testing.T) { + _, err := helper.CreateBackupWithAuthz(t, helper.DefaultBackupConfig(), clsA.Class, backend, backupID, helper.CreateAuth(customKey)) + require.NotNil(t, err) + var parsed *backups.BackupsCreateForbidden + require.True(t, errors.As(err, &parsed)) + require.Contains(t, parsed.Payload.Error[0].Message, "forbidden") + }) + + t.Run("fail to cancel a backup due to missing manage_backups action", func(t *testing.T) { + err := helper.CancelBackupWithAuthz(t, backend, backupID, helper.CreateAuth(customKey)) + require.NotNil(t, err) + var parsed *backups.BackupsCancelForbidden + require.True(t, errors.As(err, &parsed)) + require.Contains(t, parsed.Payload.Error[0].Message, "forbidden") + }) + + t.Run("manage backups of clsA.Class collection", func(t *testing.T) { + helper.AddPermissions(t, adminKey, testRoleName, helper.NewBackupPermission().WithAction(authorization.ManageBackups).WithCollection(clsA.Class).Permission()) + }) + + t.Run("successfully create a backup with sufficient permissions", func(t *testing.T) { + resp, err := helper.CreateBackupWithAuthz(t, helper.DefaultBackupConfig(), clsA.Class, backend, backupID, helper.CreateAuth(customKey)) + require.Nil(t, err) + require.NotNil(t, resp.Payload) + require.Equal(t, "", resp.Payload.Error) + + for { + resp, err := helper.CreateBackupStatusWithAuthz(t, backend, backupID, "", "", helper.CreateAuth(customKey)) + require.Nil(t, err) + require.NotNil(t, resp.Payload) + if *resp.Payload.Status == "SUCCESS" { + break + } + if *resp.Payload.Status == "FAILED" { + t.Fatalf("backup failed: %s", resp.Payload.Error) + } + time.Sleep(time.Second / 10) + } + }) + + t.Run("delete clsA", func(t *testing.T) { + helper.DeleteClassWithAuthz(t, clsA.Class, helper.CreateAuth(adminKey)) + }) + + t.Run("viewer cannot restore a backup", func(t *testing.T) { + _, err := helper.RestoreBackupWithAuthz(t, helper.DefaultRestoreConfig(), clsA.Class, backend, backupID, map[string]string{}, helper.CreateAuth(viewerKey)) + require.Error(t, err) + + var parsed *backups.BackupsRestoreForbidden + forbidden := errors.As(err, &parsed) + require.True(t, forbidden) + require.Contains(t, parsed.Payload.Error[0].Message, "forbidden") + }) + + t.Run("successfully restore a backup with sufficient permissions", func(t *testing.T) { + resp, err := helper.RestoreBackupWithAuthz(t, helper.DefaultRestoreConfig(), clsA.Class, backend, backupID, map[string]string{}, helper.CreateAuth(customKey)) + require.Nil(t, err) + require.NotNil(t, resp.Payload) + require.Equal(t, "", resp.Payload.Error) + + helper.ExpectBackupEventuallyRestored(t, backupID, backend, helper.CreateAuth(adminKey)) + }) + + t.Run("successfully cancel an in-progress backup", func(t *testing.T) { + backupID = "backup-2" + resp, err := helper.CreateBackupWithAuthz(t, helper.DefaultBackupConfig(), clsA.Class, backend, backupID, helper.CreateAuth(customKey)) + require.Nil(t, err) + require.NotNil(t, resp.Payload) + require.Equal(t, "", resp.Payload.Error) + + err = helper.CancelBackupWithAuthz(t, backend, backupID, helper.CreateAuth(customKey)) + require.Nil(t, err) + + for { + resp, err := helper.CreateBackupStatusWithAuthz(t, backend, backupID, "", "", helper.CreateAuth(customKey)) + require.Nil(t, err) + require.NotNil(t, resp.Payload) + // handle success also in case of the backup was fast + if *resp.Payload.Status == string(backup.Cancelled) || *resp.Payload.Status == string(backup.Success) { + break + } + if *resp.Payload.Status == "FAILED" { + t.Fatalf("backup failed: %s", resp.Payload.Error) + } + time.Sleep(time.Second / 10) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/batch_delete_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/batch_delete_test.go new file mode 100644 index 0000000000000000000000000000000000000000..a0010c1affd9250714432d25e7af952db1fda116 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/batch_delete_test.go @@ -0,0 +1,383 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "context" + "errors" + "fmt" + "testing" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/status" + + "github.com/weaviate/weaviate/client/authz" + "github.com/weaviate/weaviate/client/batch" + "github.com/weaviate/weaviate/client/objects" + clschema "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + pb "github.com/weaviate/weaviate/grpc/generated/protocol/v1" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +const ( + UUIDTo = strfmt.UUID("00000000-0000-0000-0000-000000000001") + UUIDFrom = strfmt.UUID("00000000-0000-0000-0000-000000000002") + beaconStart = "weaviate://localhost/" +) + +func TestAuthZBatchDelete(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + adminAuth := helper.CreateAuth(adminKey) + customUser := "custom-user" + customKey := "custom-key" + customAuth := helper.CreateAuth(customKey) + testRoleName := "test-role" + deleteDataAction := authorization.DeleteData + readCollectionsAction := authorization.ReadCollections + readDataAction := authorization.ReadData + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey}, nil) + defer down() + + // add classes with object + classNameTarget := "AuthZBatchDeleteTestTarget" + c := &models.Class{ + Class: classNameTarget, + Properties: []*models.Property{ + { + Name: "prop", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + deleteObjectClass(t, classNameTarget, adminAuth) + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(c) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, adminAuth) + assert.NoError(t, err) + assert.NotEmpty(t, resp) + + paramsObj := objects.NewObjectsCreateParams().WithBody( + &models.Object{ + ID: UUIDTo, + Class: classNameTarget, + Properties: map[string]interface{}{ + "prop": "test", + }, + }) + + respObj, err := helper.Client(t).Objects.ObjectsCreate(paramsObj, adminAuth) + assert.NoError(t, err) + assert.NotEmpty(t, respObj) + + classNameSource := "AuthZBatchDeleteTestSource" + c2 := &models.Class{ + Class: classNameSource, + Properties: []*models.Property{ + { + Name: "someProperty", + DataType: schema.DataTypeText.PropString(), + }, + { + Name: "ref", + DataType: []string{classNameTarget}, + }, + }, + } + + deleteObjectClass(t, classNameSource, adminAuth) + params2 := clschema.NewSchemaObjectsCreateParams().WithObjectClass(c2) + resp2, err2 := helper.Client(t).Schema.SchemaObjectsCreate(params2, adminAuth) + assert.NoError(t, err2) + assert.NotEmpty(t, resp2) + + paramsObj = objects.NewObjectsCreateParams().WithBody( + &models.Object{ + ID: UUIDFrom, + Class: classNameSource, + Properties: map[string]interface{}{ + "someProperty": "test", + }, + }) + + respObj, err = helper.Client(t).Objects.ObjectsCreate(paramsObj, adminAuth) + assert.NoError(t, err) + assert.NotEmpty(t, respObj) + + // add refs + from := beaconStart + classNameSource + "/" + UUIDFrom.String() + "/ref" + to := beaconStart + UUIDTo + batchRefs := []*models.BatchReference{ + {From: strfmt.URI(from), To: strfmt.URI(to)}, + } + paramsRef := batch.NewBatchReferencesCreateParams().WithBody(batchRefs) + _, err = helper.Client(t).Batch.BatchReferencesCreate(paramsRef, adminAuth) + require.Nil(t, err) + + allNonRefPermissions := []*models.Permission{ + { + Action: &deleteDataAction, + Data: &models.PermissionData{Collection: &classNameSource}, + }, + { + Action: &readCollectionsAction, + Collections: &models.PermissionCollections{Collection: &classNameSource}, + }, + { + Action: &readDataAction, + Data: &models.PermissionData{Collection: &classNameSource}, + }, + } + t.Run("all rights without reference", func(t *testing.T) { + deleteRole := &models.Role{ + Name: &testRoleName, + Permissions: allNonRefPermissions, + } + helper.DeleteRole(t, adminKey, *deleteRole.Name) + helper.CreateRole(t, adminKey, deleteRole) + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + + params := getBatchDelete(classNameSource, []string{"someProperty"}, "something", true, nil) + resp, err := helper.Client(t).Batch.BatchObjectsDelete(params, customAuth) + require.Nil(t, err) + require.NotNil(t, resp) + require.Equal(t, resp.Payload.Results.Matches, int64(1)) + + _, err = helper.Client(t).Authz.RevokeRoleFromUser( + authz.NewRevokeRoleFromUserParams().WithID(customUser).WithBody(authz.RevokeRoleFromUserBody{Roles: []string{testRoleName}}), + adminAuth, + ) + require.Nil(t, err) + helper.DeleteRole(t, adminKey, testRoleName) + }) + + t.Run("Single class without permissions", func(t *testing.T) { + for _, permissions := range generateMissingLists(allNonRefPermissions) { + role := &models.Role{ + Name: &testRoleName, + Permissions: permissions, + } + helper.DeleteRole(t, adminKey, testRoleName) + helper.CreateRole(t, adminKey, role) + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + + params := getBatchDelete(classNameSource, []string{"someProperty"}, "something", true, nil) + _, err := helper.Client(t).Batch.BatchObjectsDelete(params, customAuth) + require.NotNil(t, err) + var batchObjectsDeleteUnauthorized *batch.BatchObjectsDeleteForbidden + require.True(t, errors.As(err, &batchObjectsDeleteUnauthorized)) + + _, err = helper.Client(t).Authz.RevokeRoleFromUser( + authz.NewRevokeRoleFromUserParams().WithID(customUser).WithBody(authz.RevokeRoleFromUserBody{Roles: []string{testRoleName}}), + adminAuth, + ) + require.Nil(t, err) + helper.DeleteRole(t, adminKey, testRoleName) + } + }) + + allRefPermissions := []*models.Permission{ + { + Action: &deleteDataAction, + Data: &models.PermissionData{Collection: &classNameSource}, + }, + { + Action: &readCollectionsAction, + Collections: &models.PermissionCollections{Collection: &classNameSource}, + }, + { + Action: &readCollectionsAction, + Collections: &models.PermissionCollections{Collection: &classNameTarget}, + }, + { + Action: &readDataAction, + Data: &models.PermissionData{Collection: &classNameTarget}, + }, + } + t.Run("all rights with reference", func(t *testing.T) { + deleteRole := &models.Role{ + Name: &testRoleName, + Permissions: allRefPermissions, + } + helper.DeleteRole(t, adminKey, *deleteRole.Name) + helper.CreateRole(t, adminKey, deleteRole) + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + + params := getBatchDelete(classNameSource, []string{"ref", classNameTarget, "prop"}, "something", true, nil) + resp, err := helper.Client(t).Batch.BatchObjectsDelete(params, customAuth) + require.Nil(t, err) + require.NotNil(t, resp) + require.Equal(t, resp.Payload.Results.Matches, int64(1)) + + _, err = helper.Client(t).Authz.RevokeRoleFromUser( + authz.NewRevokeRoleFromUserParams().WithID(customUser).WithBody(authz.RevokeRoleFromUserBody{Roles: []string{testRoleName}}), + adminAuth, + ) + require.Nil(t, err) + helper.DeleteRole(t, adminKey, testRoleName) + }) + + t.Run("No delete rights for class ref class", func(t *testing.T) { + for _, permissions := range generateMissingLists(allRefPermissions) { + deleteRole := &models.Role{ + Name: &testRoleName, + Permissions: permissions, + } + helper.DeleteRole(t, adminKey, *deleteRole.Name) + helper.CreateRole(t, adminKey, deleteRole) + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + + params := getBatchDelete(classNameSource, []string{"ref", classNameTarget, "prop"}, "something", true, nil) + _, err := helper.Client(t).Batch.BatchObjectsDelete(params, customAuth) + require.NotNil(t, err) + var batchObjectsDeleteUnauthorized *batch.BatchObjectsDeleteForbidden + require.True(t, errors.As(err, &batchObjectsDeleteUnauthorized)) + + _, err = helper.Client(t).Authz.RevokeRoleFromUser( + authz.NewRevokeRoleFromUserParams().WithID(customUser).WithBody(authz.RevokeRoleFromUserBody{Roles: []string{testRoleName}}), + adminAuth, + ) + require.Nil(t, err) + helper.DeleteRole(t, adminKey, testRoleName) + } + }) +} + +func getBatchDelete(className string, path []string, valueText string, dryRun bool, tenant *string) *batch.BatchObjectsDeleteParams { + output := "verbose" + params := batch.NewBatchObjectsDeleteParams().WithBody(&models.BatchDelete{ + Match: &models.BatchDeleteMatch{ + Class: className, + Where: &models.WhereFilter{ + Operator: "NotEqual", + Path: path, + ValueText: &valueText, + }, + }, + DryRun: &dryRun, + Output: &output, + }).WithTenant(tenant) + return params +} + +func TestAuthZBatchDeleteWithMT(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + adminAuth := helper.CreateAuth(adminKey) + customUser := "custom-user" + customKey := "custom-key" + customAuth := helper.CreateAuth(customKey) + testRoleName := "test-role" + deleteDataAction := authorization.DeleteData + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey}, nil) + defer down() + + // add classes with object + className := "AuthZBatchDeleteTestMT" + tenant1 := "tenant1" + tenant2 := "tenant2" + c := &models.Class{ + Class: className, + Properties: []*models.Property{ + { + Name: "prop", + DataType: schema.DataTypeText.PropString(), + }, + }, + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: true}, + } + err := createClass(t, c, adminAuth) + require.Nil(t, err) + err = createTenant(t, className, []*models.Tenant{{Name: tenant1}, {Name: tenant2}}, adminKey) + require.Nil(t, err) + defer deleteObjectClass(t, className, adminAuth) + + for _, tenant := range []string{tenant1, tenant2} { + _, err = createObject(t, &models.Object{ + Class: className, + Properties: map[string]any{ + "prop": "test", + }, + Tenant: tenant, + }, adminKey) + require.Nil(t, err) + } + + helper.CreateRole(t, adminKey, &models.Role{ + Name: &testRoleName, + Permissions: []*models.Permission{ + { + Action: &deleteDataAction, + Data: &models.PermissionData{Collection: &className, Tenant: &tenant1}, + }, + }, + }) + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + + t.Run(fmt.Sprintf("fail to delete object in %s using rest", tenant2), func(t *testing.T) { + params := getBatchDelete(className, []string{"prop"}, "something", true, &tenant2) + _, err := helper.Client(t).Batch.BatchObjectsDelete(params, customAuth) + require.NotNil(t, err) + var forbidden *batch.BatchObjectsDeleteForbidden + require.True(t, errors.As(err, &forbidden)) + }) + + t.Run(fmt.Sprintf("fail to delete object in %s using grpc", tenant2), func(t *testing.T) { + _, err := helper.ClientGRPC(t).BatchDelete( + metadata.AppendToOutgoingContext(context.Background(), "authorization", fmt.Sprintf("Bearer %s", customKey)), + &pb.BatchDeleteRequest{ + Collection: className, + Tenant: &tenant2, + Verbose: true, + Filters: &pb.Filters{ + Operator: pb.Filters_OPERATOR_NOT_EQUAL, + On: []string{"prop"}, + TestValue: &pb.Filters_ValueText{ValueText: "something"}, + }, + }) + require.NotNil(t, err) + require.Equal(t, status.Code(err), codes.PermissionDenied) + }) + + t.Run(fmt.Sprintf("succeed to delete object in %s using rest", tenant1), func(t *testing.T) { + params := getBatchDelete(className, []string{"prop"}, "something", true, &tenant1) + _, err := helper.Client(t).Batch.BatchObjectsDelete(params, customAuth) + require.NotNil(t, err) + var forbidden *batch.BatchObjectsDeleteForbidden + require.True(t, errors.As(err, &forbidden)) + }) + + t.Run(fmt.Sprintf("succeed to delete object in %s using grpc", tenant1), func(t *testing.T) { + _, err := helper.ClientGRPC(t).BatchDelete( + metadata.AppendToOutgoingContext(context.Background(), "authorization", fmt.Sprintf("Bearer %s", customKey)), + &pb.BatchDeleteRequest{ + Collection: className, + Tenant: &tenant1, + Verbose: true, + Filters: &pb.Filters{ + Operator: pb.Filters_OPERATOR_NOT_EQUAL, + On: []string{"prop"}, + TestValue: &pb.Filters_ValueText{ValueText: "something"}, + }, + }) + require.NotNil(t, err) + require.Equal(t, status.Code(err), codes.PermissionDenied) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/batch_objs_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/batch_objs_test.go new file mode 100644 index 0000000000000000000000000000000000000000..e30c374d74f35b1b74b014ea91463308bf152f5c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/batch_objs_test.go @@ -0,0 +1,302 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "context" + "errors" + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "google.golang.org/grpc/metadata" + "google.golang.org/protobuf/types/known/structpb" + + "github.com/weaviate/weaviate/client/authz" + "github.com/weaviate/weaviate/client/batch" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + pb "github.com/weaviate/weaviate/grpc/generated/protocol/v1" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/articles" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestAuthZBatchObjs(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + adminAuth := helper.CreateAuth(adminKey) + customUser := "custom-user" + customKey := "custom-key" + customAuth := helper.CreateAuth(customKey) + testRoleName := "test-role" + + updateDataAction := authorization.UpdateData + createDataAction := authorization.CreateData + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey}, nil) + defer down() + + tests := []struct { + name string + mtEnabled bool + tenantName string + tenantPermission *string + }{ + { + name: "with multi-tenancy", + mtEnabled: true, + tenantName: "tenant1", + tenantPermission: String("tenant1"), + }, + { + name: "without multi-tenancy", + mtEnabled: false, + tenantName: "", + tenantPermission: nil, + }, + } + + for _, tt := range tests { + // add classes with object + className1 := "AuthZBatchObjs1" + className2 := "AuthZBatchObjs2" + deleteObjectClass(t, className1, adminAuth) + deleteObjectClass(t, className2, adminAuth) + defer deleteObjectClass(t, className1, adminAuth) + defer deleteObjectClass(t, className2, adminAuth) + c1 := &models.Class{ + Class: className1, + Properties: []*models.Property{ + { + Name: "prop1", + DataType: schema.DataTypeText.PropString(), + }, + }, + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: tt.mtEnabled}, + } + c2 := &models.Class{ + Class: className2, + Properties: []*models.Property{ + { + Name: "prop2", + DataType: schema.DataTypeText.PropString(), + }, + }, + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: tt.mtEnabled}, + } + require.Nil(t, createClass(t, c1, adminAuth)) + require.Nil(t, createClass(t, c2, adminAuth)) + if tt.mtEnabled { + require.Nil(t, createTenant(t, c1.Class, []*models.Tenant{{Name: tt.tenantName}}, adminKey)) + require.Nil(t, createTenant(t, c2.Class, []*models.Tenant{{Name: tt.tenantName}}, adminKey)) + } + + allPermissions := []*models.Permission{ + { + Action: &createDataAction, + Data: &models.PermissionData{Collection: &className1, Tenant: tt.tenantPermission}, + }, + { + Action: &updateDataAction, + Data: &models.PermissionData{Collection: &className1, Tenant: tt.tenantPermission}, + }, + { + Action: &createDataAction, + Data: &models.PermissionData{Collection: &className2, Tenant: tt.tenantPermission}, + }, + { + Action: &updateDataAction, + Data: &models.PermissionData{Collection: &className2, Tenant: tt.tenantPermission}, + }, + } + restObjs := []*models.Object{ + {Class: className1, Properties: map[string]interface{}{"prop1": "test"}, Tenant: tt.tenantName}, + {Class: className2, Properties: map[string]interface{}{"prop2": "test"}, Tenant: tt.tenantName}, + } + grpcObjs := []*pb.BatchObject{ + { + Collection: className1, + Properties: &pb.BatchObject_Properties{ + NonRefProperties: &structpb.Struct{ + Fields: map[string]*structpb.Value{"prop1": {Kind: &structpb.Value_StringValue{StringValue: "test"}}}, + }, + }, + Tenant: tt.tenantName, + Uuid: string(UUID1), + }, + { + Collection: className2, + Properties: &pb.BatchObject_Properties{ + NonRefProperties: &structpb.Struct{ + Fields: map[string]*structpb.Value{"prop2": {Kind: &structpb.Value_StringValue{StringValue: "test"}}}, + }, + }, + Tenant: tt.tenantName, + Uuid: string(UUID2), + }, + } + t.Run(fmt.Sprintf("all rights for both classes %s", tt.name), func(t *testing.T) { + deleteRole := &models.Role{ + Name: &testRoleName, + Permissions: allPermissions, + } + helper.DeleteRole(t, adminKey, *deleteRole.Name) + helper.CreateRole(t, adminKey, deleteRole) + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + + params := batch.NewBatchObjectsCreateParams().WithBody(batch.BatchObjectsCreateBody{Objects: restObjs}) + rest, err := helper.Client(t).Batch.BatchObjectsCreate(params, customAuth) + require.Nil(t, err) + for _, elem := range rest.Payload { + assert.Nil(t, elem.Result.Errors) + } + + grpc, err := helper.ClientGRPC(t).BatchObjects( + metadata.AppendToOutgoingContext(context.Background(), "authorization", fmt.Sprintf("Bearer %s", customKey)), + &pb.BatchObjectsRequest{Objects: grpcObjs}, + ) + require.Nil(t, err) + require.Len(t, grpc.Errors, 0) + + _, err = helper.Client(t).Authz.RevokeRoleFromUser( + authz.NewRevokeRoleFromUserParams().WithID(customUser).WithBody(authz.RevokeRoleFromUserBody{Roles: []string{testRoleName}}), + adminAuth, + ) + require.Nil(t, err) + helper.DeleteRole(t, adminKey, testRoleName) + }) + + for _, permissions := range generateMissingLists(allPermissions) { + t.Run(fmt.Sprintf("single permission missing %s", tt.name), func(t *testing.T) { + deleteRole := &models.Role{ + Name: &testRoleName, + Permissions: permissions, + } + helper.DeleteRole(t, adminKey, *deleteRole.Name) + helper.CreateRole(t, adminKey, deleteRole) + _, err := helper.Client(t).Authz.AssignRoleToUser( + authz.NewAssignRoleToUserParams().WithID(customUser).WithBody(authz.AssignRoleToUserBody{Roles: []string{testRoleName}}), + adminAuth, + ) + require.Nil(t, err) + + params := batch.NewBatchObjectsCreateParams().WithBody(batch.BatchObjectsCreateBody{Objects: restObjs}) + _, err = helper.Client(t).Batch.BatchObjectsCreate(params, customAuth) + var batchObjectsCreateForbidden *batch.BatchObjectsCreateForbidden + require.True(t, errors.As(err, &batchObjectsCreateForbidden)) + + res, err := helper.ClientGRPC(t).BatchObjects( + metadata.AppendToOutgoingContext(context.Background(), "authorization", fmt.Sprintf("Bearer %s", customKey)), + &pb.BatchObjectsRequest{Objects: grpcObjs}, + ) + require.Nil(t, err) + require.Len(t, res.Errors, 1) // the other object is in another class so is covered by one of the permissions + for _, err := range res.Errors { + require.Contains(t, err.Error, "rbac: authorization, forbidden action: user 'custom-user' has insufficient permissions") + } + + _, err = helper.Client(t).Authz.RevokeRoleFromUser( + authz.NewRevokeRoleFromUserParams().WithID(customUser).WithBody(authz.RevokeRoleFromUserBody{Roles: []string{testRoleName}}), + adminAuth, + ) + require.Nil(t, err) + helper.DeleteRole(t, adminKey, testRoleName) + }) + } + } +} + +func TestAuthZBatchObjsTenantFiltering(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + adminAuth := helper.CreateAuth(adminKey) + customUser := "custom-user" + customKey := "custom-key" + // customAuth := helper.CreateAuth(customKey) + testRoleName := "test-role" + + updateDataAction := authorization.UpdateData + createDataAction := authorization.CreateData + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey}, nil) + defer down() + + cls := articles.ParagraphsClass() + cls.MultiTenancyConfig = &models.MultiTenancyConfig{Enabled: true} + helper.DeleteClassWithAuthz(t, cls.Class, adminAuth) + helper.CreateClassAuth(t, cls, adminKey) + defer helper.DeleteClassWithAuthz(t, cls.Class, adminAuth) + + helper.CreateTenantsAuth(t, cls.Class, []*models.Tenant{{Name: "tenant1"}, {Name: "tenant2"}}, adminKey) + + objs := []*pb.BatchObject{ + { + Collection: cls.Class, + Properties: &pb.BatchObject_Properties{ + NonRefProperties: &structpb.Struct{ + Fields: map[string]*structpb.Value{"contents": {Kind: &structpb.Value_StringValue{StringValue: "test"}}}, + }, + }, + Tenant: "tenant1", + Uuid: string(UUID1), + }, + { + Collection: cls.Class, + Properties: &pb.BatchObject_Properties{ + NonRefProperties: &structpb.Struct{ + Fields: map[string]*structpb.Value{"contents": {Kind: &structpb.Value_StringValue{StringValue: "test"}}}, + }, + }, + Tenant: "tenant2", + Uuid: string(UUID2), + }, + } + + t.Run("cannot insert into either tenant without permissions", func(t *testing.T) { + res, err := helper.ClientGRPC(t).BatchObjects( + metadata.AppendToOutgoingContext(context.Background(), "authorization", fmt.Sprintf("Bearer %s", customKey)), + &pb.BatchObjectsRequest{Objects: objs}, + ) + require.Nil(t, err) + require.Len(t, res.Errors, 2) + for _, err := range res.Errors { + require.Contains(t, err.Error, "rbac: authorization, forbidden action: user 'custom-user' has insufficient permissions") + } + }) + + t.Run("assign permissions to insert data into tenant1", func(t *testing.T) { + helper.CreateRole(t, adminKey, &models.Role{ + Name: &testRoleName, + Permissions: []*models.Permission{{ + Action: &createDataAction, + Data: &models.PermissionData{Collection: &cls.Class, Tenant: String("tenant1")}, + }, { + Action: &updateDataAction, + Data: &models.PermissionData{Collection: &cls.Class, Tenant: String("tenant1")}, + }}, + }) + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + }) + + t.Run("can insert into tenant1 but not into tenant2", func(t *testing.T) { + res, err := helper.ClientGRPC(t).BatchObjects( + metadata.AppendToOutgoingContext(context.Background(), "authorization", fmt.Sprintf("Bearer %s", customKey)), + &pb.BatchObjectsRequest{Objects: objs}, + ) + require.Nil(t, err) + require.Len(t, res.Errors, 1) + require.Contains(t, res.Errors[0].Error, "rbac: authorization, forbidden action: user 'custom-user' has insufficient permissions") + require.Equal(t, res.Errors[0].Index, int32(1)) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/batch_refs_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/batch_refs_test.go new file mode 100644 index 0000000000000000000000000000000000000000..bb69f2a84dda8789a8a6af94b1eea49ee8e36f36 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/batch_refs_test.go @@ -0,0 +1,209 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "errors" + "fmt" + "testing" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/batch" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestAuthZBatchRefAuthZCalls(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + adminAuth := helper.CreateAuth(adminKey) + + compose, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{}, nil) + defer down() + + containers := compose.Containers() + require.Len(t, containers, 1) // started only one node + + // add classes with object + className1 := "AuthZBatchObjREST1" + className2 := "AuthZBatchObjREST2" + deleteObjectClass(t, className1, adminAuth) + deleteObjectClass(t, className2, adminAuth) + defer deleteObjectClass(t, className1, adminAuth) + defer deleteObjectClass(t, className2, adminAuth) + + c1 := &models.Class{ + Class: className1, + Properties: []*models.Property{ + { + Name: "ref", + DataType: []string{className2}, + }, + }, + } + c2 := &models.Class{ + Class: className2, + Properties: []*models.Property{ + { + Name: "prop2", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + require.Nil(t, createClass(t, c2, adminAuth)) + require.Nil(t, createClass(t, c1, adminAuth)) + + // add an object to each class + helper.CreateObjectAuth(t, &models.Object{ID: UUID1, Class: className1}, adminKey) + helper.CreateObjectAuth(t, &models.Object{ID: UUID1, Class: className2}, adminKey) + + ls := newLogScanner(containers[0].Container()) + ls.GetAuthzLogs(t) // startup and object class creation logs that are irrelevant + + from := beaconStart + className1 + "/" + UUID1.String() + "/ref" + to := beaconStart + UUID1 + + var refs []*models.BatchReference + for i := 0; i < 30; i++ { + refs = append(refs, &models.BatchReference{ + From: strfmt.URI(from), To: strfmt.URI(to), + }) + } + + params := batch.NewBatchReferencesCreateParams().WithBody(refs) + res, err := helper.Client(t).Batch.BatchReferencesCreate(params, adminAuth) + require.NoError(t, err) + require.NotNil(t, res.Payload) + + authZlogs := ls.GetAuthzLogs(t) + require.LessOrEqual(t, len(authZlogs), 4) +} + +func TestAuthZBatchRefAuthZTenantFiltering(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + adminAuth := helper.CreateAuth(adminKey) + customUser := "custom-user" + customKey := "custom-key" + customAuth := helper.CreateAuth(customKey) + + compose, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey}, nil) + defer down() + + containers := compose.Containers() + require.Len(t, containers, 1) // started only one node + + // add classes with object + className1 := "AuthZBatchRefREST1" + className2 := "AuthZBatchRefREST2" + defer deleteObjectClass(t, className1, adminAuth) + defer deleteObjectClass(t, className2, adminAuth) + + c1 := &models.Class{ + Class: className1, + Properties: []*models.Property{ + { + Name: "ref", + DataType: []string{className2}, + }, + }, + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + } + c2 := &models.Class{ + Class: className2, + Properties: []*models.Property{ + { + Name: "prop2", + DataType: schema.DataTypeText.PropString(), + }, + }, + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + } + require.Nil(t, createClass(t, c2, adminAuth)) + require.Nil(t, createClass(t, c1, adminAuth)) + + tenants := []*models.Tenant{{Name: "tenant1"}, {Name: "tenant2"}} + require.Nil(t, createTenant(t, className1, tenants, adminKey)) + require.Nil(t, createTenant(t, className2, tenants, adminKey)) + + // add an object to each class + helper.CreateObjectAuth(t, &models.Object{ID: UUID1, Class: className1}, adminKey) + helper.CreateObjectAuth(t, &models.Object{ID: UUID1, Class: className2}, adminKey) + + addReferences := func(class, tenant string) (*batch.BatchReferencesCreateOK, error) { + from := beaconStart + class + "/" + UUID1.String() + "/ref" + to := beaconStart + UUID1 + + var refs []*models.BatchReference + for i := 0; i < 30; i++ { + refs = append(refs, &models.BatchReference{ + From: strfmt.URI(from), To: strfmt.URI(to), Tenant: tenant, + }) + } + + params := batch.NewBatchReferencesCreateParams().WithBody(refs) + return helper.Client(t).Batch.BatchReferencesCreate(params, customAuth) + } + + assertError := func(err error, expected *batch.BatchReferencesCreateForbidden) { + require.Error(t, err) + if !errors.As(err, &expected) { + t.Fatalf("expected error of type %T, got %T: %v", expected, err, err) + } + require.ErrorAs(t, err, &expected) + } + var errForbidden *batch.BatchReferencesCreateForbidden + + t.Run("Fail to batch references without any permissions", func(t *testing.T) { + _, err := addReferences(className1, tenants[0].Name) + assertError(err, errForbidden) + }) + + permissions := batchReferencesPermissions(className1, className2, tenants[0].Name) + role := &models.Role{ + Name: String("test-role"), + Permissions: permissions, + } + helper.CreateRole(t, adminKey, role) + helper.AssignRoleToUser(t, adminKey, *role.Name, customUser) + + t.Run(fmt.Sprintf("Succeed to batch references from %s to %s within %s", className1, className2, tenants[0].Name), func(t *testing.T) { + _, err := addReferences(className1, tenants[0].Name) + require.NoError(t, err) + }) + + t.Run(fmt.Sprintf("Fail to batch from %s to %s references within %s", className1, className2, tenants[1].Name), func(t *testing.T) { + _, err := addReferences(className1, tenants[1].Name) + assertError(err, errForbidden) + }) + + helper.RemovePermissions(t, adminKey, *role.Name, permissions[2]) + t.Run(fmt.Sprintf("Fail to batch from %s to %s references within %s due to missing %s", className1, className2, tenants[0].Name, authorization.ReadData), func(t *testing.T) { + _, err := addReferences(className1, tenants[0].Name) + assertError(err, errForbidden) + }) + helper.AddPermissions(t, adminKey, *role.Name, permissions[2]) + + helper.RemovePermissions(t, adminKey, *role.Name, permissions[1]) + t.Run(fmt.Sprintf("Fail to batch from %s to %s references within %s due to missing %s", className1, className2, tenants[0].Name, authorization.UpdateData), func(t *testing.T) { + _, err := addReferences(className1, tenants[0].Name) + assertError(err, errForbidden) + }) + helper.AddPermissions(t, adminKey, *role.Name, permissions[1]) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/classifications_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/classifications_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ef13bffdee81505da96a0c3b98eb9b771a8d435d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/classifications_test.go @@ -0,0 +1,219 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "testing" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/authz" + "github.com/weaviate/weaviate/client/classifications" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/articles" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestAuthzClassification(t *testing.T) { + adminUser := "existing-user" + adminKey := "existing-key" + + customUser := "custom-user" + customKey := "custom-key" + + postRole := "post" + getRole := "get" + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey}, nil) + defer down() + + clsA := articles.ArticlesClass() + clsB := articles.ParagraphsClass() + + classify := func() (*classifications.ClassificationsPostCreated, error) { + return helper.Client(t).Classifications.ClassificationsPost( + classifications.NewClassificationsPostParams().WithParams(&models.Classification{ + Class: clsA.Class, + ClassifyProperties: []string{"hasParagraphs"}, + BasedOnProperties: []string{"title"}, + Type: "knn", + Settings: map[string]interface{}{ + "k": 5, + }, + }), helper.CreateAuth(customKey), + ) + } + + get := func(id string) (*classifications.ClassificationsGetOK, error) { + return helper.Client(t).Classifications.ClassificationsGet( + classifications.NewClassificationsGetParams().WithID(id), helper.CreateAuth(customKey), + ) + } + + var id string + + t.Run("setup", func(t *testing.T) { + helper.CreateClassAuth(t, clsB, adminKey) + helper.CreateClassAuth(t, clsA, adminKey) + helper.CreateObjectAuth(t, articles.NewParagraph().WithID(UUID1).WithContents("Hello, World!").Object(), adminKey) + helper.CreateObjectAuth(t, articles.NewArticle().WithID(UUID1).WithTitle("Classifications").WithReferences(&models.SingleRef{ + Beacon: strfmt.URI("weaviate://localhost/" + clsB.Class + "/" + UUID1.String()), + Class: strfmt.URI(clsB.Class), + }).Object(), adminKey) + }) + + t.Run("fail to start a classification without update_collections:Article", func(t *testing.T) { + _, err := classify() + require.Error(t, err) + parsed, ok := err.(*classifications.ClassificationsPostForbidden) //nolint:errorlint + require.True(t, ok) + require.Contains(t, parsed.Payload.Error[0].Message, authorization.UpdateCollections) + require.Contains(t, parsed.Payload.Error[0].Message, clsA.Class) + }) + + t.Run("add the permission to update the collection", func(t *testing.T) { + _, err := helper.Client(t).Authz.CreateRole(authz.NewCreateRoleParams().WithBody(&models.Role{ + Name: &postRole, + Permissions: []*models.Permission{ + helper.NewCollectionsPermission().WithAction(authorization.UpdateCollections).WithCollection(clsA.Class).Permission(), + }, + }), helper.CreateAuth(adminKey)) + require.NoError(t, err) + helper.AssignRoleToUser(t, adminKey, postRole, customUser) + }) + + t.Run("fail to start a classification without read_data:Article", func(t *testing.T) { + _, err := classify() + require.Error(t, err) + parsed, ok := err.(*classifications.ClassificationsPostForbidden) //nolint:errorlint + require.True(t, ok) + require.Contains(t, parsed.Payload.Error[0].Message, authorization.ReadData) + require.Contains(t, parsed.Payload.Error[0].Message, clsA.Class) + }) + + t.Run("add the permission to read data in the collection", func(t *testing.T) { + _, err := helper.Client(t).Authz.AddPermissions( + authz.NewAddPermissionsParams().WithID(postRole).WithBody(authz.AddPermissionsBody{ + Permissions: []*models.Permission{ + helper.NewDataPermission().WithAction(authorization.ReadData).WithCollection(clsA.Class).Permission(), + }, + }), + helper.CreateAuth(adminKey), + ) + require.NoError(t, err) + }) + + t.Run("fail to start a classification without read_collections:Article", func(t *testing.T) { + _, err := classify() + require.Error(t, err) + parsed, ok := err.(*classifications.ClassificationsPostForbidden) //nolint:errorlint + require.True(t, ok) + require.Contains(t, parsed.Payload.Error[0].Message, authorization.ReadCollections) + require.Contains(t, parsed.Payload.Error[0].Message, clsA.Class) + }) + + t.Run("add the permission to read the collection", func(t *testing.T) { + _, err := helper.Client(t).Authz.AddPermissions( + authz.NewAddPermissionsParams().WithID(postRole).WithBody(authz.AddPermissionsBody{ + Permissions: []*models.Permission{ + helper.NewCollectionsPermission().WithAction(authorization.ReadCollections).WithCollection(clsA.Class).Permission(), + }, + }), + helper.CreateAuth(adminKey), + ) + require.NoError(t, err) + }) + + t.Run("fail to start a classification without read_data:Paragraph", func(t *testing.T) { + _, err := classify() + require.Error(t, err) + parsed, ok := err.(*classifications.ClassificationsPostForbidden) //nolint:errorlint + require.True(t, ok) + require.Contains(t, parsed.Payload.Error[0].Message, authorization.ReadData) + require.Contains(t, parsed.Payload.Error[0].Message, clsB.Class) + }) + + t.Run("add the permission to read data in the reference collection", func(t *testing.T) { + _, err := helper.Client(t).Authz.AddPermissions( + authz.NewAddPermissionsParams().WithID(postRole).WithBody(authz.AddPermissionsBody{ + Permissions: []*models.Permission{ + helper.NewDataPermission().WithAction(authorization.ReadData).WithCollection(clsB.Class).Permission(), + }, + }), + helper.CreateAuth(adminKey), + ) + require.NoError(t, err) + }) + + t.Run("fail to start a classification without read_collections:Paragraph", func(t *testing.T) { + _, err := classify() + require.Error(t, err) + parsed, ok := err.(*classifications.ClassificationsPostForbidden) //nolint:errorlint + require.True(t, ok) + require.Contains(t, parsed.Payload.Error[0].Message, authorization.ReadCollections) + require.Contains(t, parsed.Payload.Error[0].Message, clsB.Class) + }) + + t.Run("add the permission to read the reference collection", func(t *testing.T) { + _, err := helper.Client(t).Authz.AddPermissions( + authz.NewAddPermissionsParams().WithID(postRole).WithBody(authz.AddPermissionsBody{ + Permissions: []*models.Permission{ + helper.NewCollectionsPermission().WithAction(authorization.ReadCollections).WithCollection(clsB.Class).Permission(), + }, + }), + helper.CreateAuth(adminKey), + ) + require.NoError(t, err) + }) + + t.Run("start a classification with the correct permissions", func(t *testing.T) { + res, err := classify() + require.NoError(t, err) + id = res.Payload.ID.String() + }) + + t.Run("revoke postRole from user", func(t *testing.T) { + _, err := helper.Client(t).Authz.RevokeRoleFromUser( + authz.NewRevokeRoleFromUserParams().WithID(customUser).WithBody(authz.RevokeRoleFromUserBody{Roles: []string{postRole}}), + helper.CreateAuth(adminKey), + ) + require.NoError(t, err) + }) + + t.Run("fail to get a classification without the correct permissions", func(t *testing.T) { + _, err := get(id) + require.Error(t, err) + parsed, ok := err.(*classifications.ClassificationsGetForbidden) //nolint:errorlint + require.True(t, ok) + require.NotNil(t, parsed.Payload) + }) + + t.Run("add the permission to read the collection", func(t *testing.T) { + _, err := helper.Client(t).Authz.CreateRole( + authz.NewCreateRoleParams().WithBody(&models.Role{ + Name: &getRole, + Permissions: []*models.Permission{ + helper.NewCollectionsPermission().WithAction(authorization.ReadCollections).WithCollection(clsA.Class).Permission(), + }, + }), + helper.CreateAuth(adminKey), + ) + require.NoError(t, err) + helper.AssignRoleToUser(t, adminKey, getRole, customUser) + }) + + t.Run("get a classification with the correct permissions", func(t *testing.T) { + _, err := get(id) + require.NoError(t, err) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/gql_batch_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/gql_batch_test.go new file mode 100644 index 0000000000000000000000000000000000000000..7fb2001be6a1b6dced0503da93ee1d4c161324f0 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/gql_batch_test.go @@ -0,0 +1,111 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/client/authz" + "github.com/weaviate/weaviate/client/graphql" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestAuthZGQLBatchValidate(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + adminAuth := helper.CreateAuth(adminKey) + customUser := "custom-user" + customKey := "custom-key" + customAuth := helper.CreateAuth(customKey) + + readDataAction := authorization.ReadData + readCollectionsAction := authorization.ReadCollections + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey}, nil) + defer down() + + roleName := "AuthZGQLBatchTestRole" + className := "AuthZGQLBatchTestClass" + deleteObjectClass(t, className, adminAuth) + require.NoError(t, createClass(t, &models.Class{ + Class: className, + Properties: []*models.Property{ + { + Name: "prop", + DataType: schema.DataTypeText.PropString(), + }, + }, + }, adminAuth)) + + all := "*" + + t.Run("All rights", func(t *testing.T) { + role := &models.Role{ + Name: &roleName, + Permissions: []*models.Permission{ + { + Action: &readDataAction, + Data: &models.PermissionData{Collection: &all}, + }, + { + Action: &readCollectionsAction, + Collections: &models.PermissionCollections{Collection: &all}, + }, + }, + } + helper.DeleteRole(t, adminKey, *role.Name) + helper.CreateRole(t, adminKey, role) + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + + paramsObj := graphql.NewGraphqlBatchParams().WithBody( + models.GraphQLQueries{{Query: "mutation assign role $role: AssignRoleToUserInput!", OperationName: "POST"}}) + _, err := helper.Client(t).Graphql.GraphqlBatch(paramsObj, customAuth) + require.Nil(t, err) + + _, err = helper.Client(t).Authz.RevokeRoleFromUser( + authz.NewRevokeRoleFromUserParams().WithID(customUser).WithBody(authz.RevokeRoleFromUserBody{Roles: []string{roleName}}), + adminAuth, + ) + require.Nil(t, err) + helper.DeleteRole(t, adminKey, roleName) + }) + + permissionsAll := [][]*models.Permission{ + {{Action: &readDataAction, Data: &models.PermissionData{Collection: &all}}, {Action: &readCollectionsAction, Collections: &models.PermissionCollections{Collection: &className}}}, + {{Action: &readDataAction, Data: &models.PermissionData{Collection: &className}}, {Action: &readCollectionsAction, Collections: &models.PermissionCollections{Collection: &all}}}, + } + for _, permissions := range permissionsAll { + t.Run("Only read data action for a single class", func(t *testing.T) { + role := &models.Role{ + Name: &roleName, + Permissions: permissions, + } + helper.DeleteRole(t, adminKey, roleName) + helper.CreateRole(t, adminKey, role) + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + + paramsObj := graphql.NewGraphqlBatchParams().WithBody( + models.GraphQLQueries{{Query: "mutation assign role $role: AssignRoleToUserInput!", OperationName: "POST"}}) + resp, err := helper.Client(t).Graphql.GraphqlBatch(paramsObj, customAuth) + require.NotNil(t, err) + require.Nil(t, resp) + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + + helper.DeleteRole(t, adminKey, roleName) + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/gql_refs_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/gql_refs_test.go new file mode 100644 index 0000000000000000000000000000000000000000..703abe356620a718ceb26dc44159cbb1db026339 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/gql_refs_test.go @@ -0,0 +1,258 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "encoding/json" + "fmt" + "testing" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/client/graphql" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/authz" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/articles" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestAuthZGraphQLRefs(t *testing.T) { + adminUser := "existing-user" + adminKey := "existing-key" + + customUser := "custom-user" + customKey := "custom-key" + + _, teardown := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey}, nil) + defer teardown() + + articlesCls := articles.ArticlesClass() + paragraphsCls := articles.ParagraphsClass() + + roleName := "can-query-articles" + + t.Run("create classes", func(t *testing.T) { + helper.CreateClassAuth(t, paragraphsCls, adminKey) + helper.CreateClassAuth(t, articlesCls, adminKey) + }) + + t.Run("import objects", func(t *testing.T) { + paragraphObjs := make([]*models.Object, 0) + paragraphObjs = append(paragraphObjs, articles.NewParagraph().WithID(UUID1).Object()) + paragraphObjs = append(paragraphObjs, articles.NewParagraph().WithID(UUID2).Object()) + helper.CreateObjectsBatchAuth(t, paragraphObjs, adminKey) + + articleObjs := make([]*models.Object, 0) + articleObjs = append(articleObjs, articles.NewArticle().WithTitle("Article 1").WithReferences( + &models.SingleRef{Beacon: strfmt.URI("weaviate://localhost/" + UUID1.String())}, + &models.SingleRef{Beacon: strfmt.URI("weaviate://localhost/" + UUID2.String())}, + ).Object()) + articleObjs = append(articleObjs, articles.NewArticle().WithTitle("Article 2").Object()) + helper.CreateObjectsBatchAuth(t, articleObjs, adminKey) + }) + + t.Run("create and assign a role that can query for articles", func(t *testing.T) { + helper.CreateRole(t, adminKey, &models.Role{Name: String(roleName), Permissions: []*models.Permission{ + {Action: String(authorization.ReadCollections), Collections: &models.PermissionCollections{Collection: authorization.All}}, + {Action: String(authorization.ReadData), Data: &models.PermissionData{Collection: String(articlesCls.Class)}}, + }}) + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + }) + + t.Run("successfully query with Get for just the articles", func(t *testing.T) { + res := assertGQL(t, "{ Get { Article { title } } }", customKey) + data, ok := res.Data["Get"].(map[string]any) + require.True(t, ok) + require.Len(t, data["Article"], 2) + }) + + t.Run("successfully query with Aggregate for just the articles data", func(t *testing.T) { + res := assertGQL(t, "{ Aggregate { Article { meta { count } } } }", customKey) + data, ok := res.Data["Aggregate"].(map[string]any) + require.True(t, ok) + dataL, ok := data["Article"].([]any) + require.True(t, ok) + require.Equal(t, json.Number("2"), dataL[0].(map[string]any)["meta"].(map[string]any)["count"]) + }) + + t.Run("fail to query with Get for articles when filtering on paragraphs", func(t *testing.T) { + query := fmt.Sprintf(`{ Get { Article(where: {operator: Equal, path: ["hasParagraphs", "Paragraph", "_id"], valueText: "%s"}) { title } } }`, UUID1) + resp, err := queryGQL(t, query, customKey) + require.Nil(t, err) + require.Equal(t, 1, len(resp.Payload.Errors)) + require.Contains(t, resp.Payload.Errors[0].Message, "forbidden") + }) + + t.Run("fail to query with Aggregate for articles when filtering on paragraphs", func(t *testing.T) { + query := fmt.Sprintf(`{ Aggregate { Article(where: {operator: Equal, path: ["hasParagraphs", "Paragraph", "_id"], valueText: "%s"}) { meta { count } } } }`, UUID1) + resp, err := queryGQL(t, query, customKey) + require.Nil(t, err) + require.Equal(t, 1, len(resp.Payload.Errors)) + require.Contains(t, resp.Payload.Errors[0].Message, "forbidden") + }) + + t.Run("fail to query for articles returning paragraphs", func(t *testing.T) { + query := "{ Get { Article { title hasParagraphs { ... on Paragraph { _additional { id } } } } } }" + resp, err := queryGQL(t, query, customKey) + require.Nil(t, err) + require.Equal(t, 1, len(resp.Payload.Errors)) + require.Contains(t, resp.Payload.Errors[0].Message, "forbidden") + }) + + t.Run("add permission to read data in paragraphs class", func(t *testing.T) { + _, err := helper.Client(t).Authz.AddPermissions(authz.NewAddPermissionsParams().WithID(roleName).WithBody(authz.AddPermissionsBody{ + Permissions: []*models.Permission{{ + Action: String(authorization.ReadData), + Data: &models.PermissionData{Collection: String(paragraphsCls.Class)}, + }}, + }), helper.CreateAuth(adminKey)) + require.Nil(t, err) + }) + + t.Run("successfully query for articles with Get when filtering on paragraphs returning paragraphs", func(t *testing.T) { + query := fmt.Sprintf(`{ Get { Article(where: {operator: Equal, path: ["hasParagraphs", "Paragraph", "_id"], valueText: "%s"}) { title hasParagraphs { ... on Paragraph { _additional { id } } } } } }`, UUID1) + res := assertGQL(t, query, customKey) + data, ok := res.Data["Get"].(map[string]any) + require.True(t, ok) + require.Len(t, data["Article"], 1) + art, ok := data["Article"].([]any)[0].(map[string]any) + require.True(t, ok) + require.Len(t, art["hasParagraphs"], 2) + }) + + t.Run("successfully query for articles with Aggregate when filtering on paragraphs", func(t *testing.T) { + query := fmt.Sprintf(`{ Aggregate { Article(where: {operator: Equal, path: ["hasParagraphs", "Paragraph", "_id"], valueText: "%s"}) { meta { count } } } }`, UUID1) + res := assertGQL(t, query, customKey) + data, ok := res.Data["Aggregate"].(map[string]any) + require.True(t, ok) + dataL, ok := data["Article"].([]any) + require.True(t, ok) + require.Equal(t, json.Number("1"), dataL[0].(map[string]any)["meta"].(map[string]any)["count"]) + }) +} + +func TestAuthZGraphQLRefsGroupBy(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + adminAuth := helper.CreateAuth(adminKey) + + customUser := "custom-user" + customKey := "custom-key" + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey}, nil) + defer down() + + articlesCls := articles.ArticlesClass() + paragraphsCls := articles.ParagraphsClass() + + helper.DeleteClassWithAuthz(t, paragraphsCls.Class, adminAuth) + helper.DeleteClassWithAuthz(t, articlesCls.Class, adminAuth) + helper.CreateClassAuth(t, paragraphsCls, adminKey) + helper.CreateClassAuth(t, articlesCls, adminKey) + defer helper.DeleteClassWithAuthz(t, paragraphsCls.Class, adminAuth) + defer helper.DeleteClassWithAuthz(t, articlesCls.Class, adminAuth) + + paragraphObjs := []*models.Object{articles.NewParagraph().WithID(UUID1).Object(), articles.NewParagraph().WithID(UUID2).Object()} + helper.CreateObjectsBatchAuth(t, paragraphObjs, adminKey) + + articleObjs := make([]*models.Object, 0) + articleObjs = append(articleObjs, articles.NewArticle().WithTitle("Article 1").WithReferences( + &models.SingleRef{Beacon: strfmt.URI("weaviate://localhost/" + UUID1.String())}, + &models.SingleRef{Beacon: strfmt.URI("weaviate://localhost/" + UUID2.String())}, + ).WithVector([]float32{1, 0}).Object()) + articleObjs = append(articleObjs, articles.NewArticle().WithTitle("Article 2").WithVector([]float32{1, 0}).Object()) + helper.CreateObjectsBatchAuth(t, articleObjs, adminKey) + + roleName := t.Name() + "_role" + + readCollectionsAction := authorization.ReadCollections + readDataAction := authorization.ReadData + + all := "*" + requiredPermissions := []*models.Permission{ + { + Action: &readCollectionsAction, + Collections: &models.PermissionCollections{Collection: &all}, + }, + { + Action: &readDataAction, + Data: &models.PermissionData{Collection: ¶graphsCls.Class}, + }, + { + Action: &readDataAction, + Data: &models.PermissionData{Collection: &articlesCls.Class}, + }, + } + + groupByQuery := `{ + Get { + Article( + groupBy: {groups: 10, objectsPerGroup: 10, path: "hasParagraphs"} + nearVector: {vector: [1, 0]} + ) { + _additional { + group { + hits { + _additional { + id + } + hasParagraphs { + ... on Paragraph { + contents + } + } + } + } + } + } + } + } + ` + + t.Run("create and assign a role that can query for articles", func(t *testing.T) { + role := &models.Role{Name: &roleName, Permissions: requiredPermissions} + helper.DeleteRole(t, adminKey, *role.Name) + helper.CreateRole(t, adminKey, role) + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + defer helper.DeleteRole(t, adminKey, *role.Name) + + res, err := queryGQL(t, groupByQuery, customKey) + require.NoError(t, err) + require.NotNil(t, res) + require.Nil(t, res.Payload.Errors) + data, ok := res.Payload.Data["Get"].(map[string]any) + require.True(t, ok) + require.Contains(t, data, "Article") + }) + + t.Run("One permission is missing", func(t *testing.T) { + for _, permissions := range generateMissingLists(requiredPermissions) { + role := &models.Role{Name: &roleName, Permissions: permissions} + helper.DeleteRole(t, adminKey, *role.Name) + helper.CreateRole(t, adminKey, role) + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + defer helper.DeleteRole(t, adminKey, *role.Name) + res, err := queryGQL(t, groupByQuery, customKey) + if err != nil { + require.Nil(t, res) + var errForbidden *graphql.GraphqlPostForbidden + require.True(t, errors.As(err, &errForbidden)) + } else { + require.NotNil(t, res.Payload.Errors) + require.Contains(t, res.Payload.Errors[0].Message, "forbidden") + } + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/gql_simple_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/gql_simple_test.go new file mode 100644 index 0000000000000000000000000000000000000000..f144add3ec3197fcd3cfaf511147e240d3784692 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/gql_simple_test.go @@ -0,0 +1,347 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "context" + "errors" + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/client/authz" + gql "github.com/weaviate/weaviate/client/graphql" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/books" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestAuthZGraphQLSingleTenancy(t *testing.T) { + adminUser := "existing-user" + adminKey := "existing-key" + + customUser := "custom-user" + customKey := "custom-key" + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + defer cancel() + + compose, err := docker. + New(). + WithWeaviate(). + WithApiKey().WithUserApiKey(adminUser, adminKey).WithUserApiKey(customUser, customKey). + WithRBAC().WithRbacRoots(adminUser). + WithText2VecContextionary(). + WithWeaviateEnv("API_BASED_MODULES_DISABLED", "true"). + Start(ctx) + + require.Nil(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %v", err) + } + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + defer helper.ResetClient() + + class := books.ClassContextionaryVectorizer() + readBooksRole := "read-books" + + t.Run("create books class", func(t *testing.T) { + helper.CreateClassAuth(t, class, adminKey) + }) + + t.Run("import books objects", func(t *testing.T) { + objects := books.Objects() + helper.CreateObjectsBatchAuth(t, objects, adminKey) + }) + + t.Run("create and assign a role that can only read objects in books class", func(t *testing.T) { + role := &models.Role{ + Name: String(readBooksRole), + Permissions: []*models.Permission{{ + Action: String(authorization.ReadData), + Data: &models.PermissionData{Collection: String(class.Class)}, + }}, + } + helper.CreateRole(t, adminKey, role) + helper.AssignRoleToUser(t, adminKey, *role.Name, customUser) + }) + + t.Run("fail with 403 to query with Get due to lack of read all collections permission", func(t *testing.T) { + _, err := queryGQL(t, "{ Get { Books { title } } }", customKey) + require.NotNil(t, err) + var cErr *gql.GraphqlPostForbidden + require.True(t, errors.As(err, &cErr)) + }) + + t.Run("fail with 403 to query with Aggregate due to lack of read all collections permission", func(t *testing.T) { + _, err := queryGQL(t, "{ Aggregate { Books { meta { count } } } }", customKey) + require.NotNil(t, err) + var cErr *gql.GraphqlPostForbidden + require.True(t, errors.As(err, &cErr)) + }) + + t.Run("fail with 403 to query with Explore due to lack of read all collections permission", func(t *testing.T) { + query := fmt.Sprintf("{ Explore(nearObject:{id:%s}) { className }}", books.Objects()[0].ID) + _, err = queryGQL(t, query, customKey) + require.NotNil(t, err) + var cErr *gql.GraphqlPostForbidden + require.True(t, errors.As(err, &cErr)) + }) + + t.Run("add the read all collections permission to the role", func(t *testing.T) { + _, err := helper.Client(t).Authz.AddPermissions(authz.NewAddPermissionsParams().WithID(readBooksRole).WithBody(authz.AddPermissionsBody{ + Permissions: []*models.Permission{{ + Action: String(authorization.ReadCollections), + Collections: &models.PermissionCollections{Collection: String("*")}, + }}, + }), helper.CreateAuth(adminKey)) + require.Nil(t, err) + }) + + t.Run("successfully query with Get and the sufficient permissions", func(t *testing.T) { + assertGQL(t, "{ Get { Books { title } } }", customKey) + }) + + t.Run("successfully query with Aggregate and the sufficient permissions", func(t *testing.T) { + assertGQL(t, "{ Aggregate { Books { meta { count } } } }", customKey) + }) + + t.Run("fail with 200 to query with Explore due to lack of read all objects permission", func(t *testing.T) { + query := fmt.Sprintf(`{ Explore(nearObject:{id:"%s"}) { className }}`, books.Objects()[0].ID) + resp, err := queryGQL(t, query, customKey) + require.Nil(t, err) + require.NotNil(t, resp.Payload.Errors) + require.Len(t, resp.Payload.Errors, 1) + require.Contains(t, resp.Payload.Errors[0].Message, "forbidden") + }) + + t.Run("remove the read objects in book class permission", func(t *testing.T) { + _, err := helper.Client(t).Authz.RemovePermissions(authz.NewRemovePermissionsParams().WithID(readBooksRole).WithBody(authz.RemovePermissionsBody{ + Permissions: []*models.Permission{{ + Action: String(authorization.ReadData), + Data: &models.PermissionData{Collection: String(class.Class)}, + }}, + }), helper.CreateAuth(adminKey)) + require.Nil(t, err) + }) + + t.Run("fail with 200 to query with Get due to lack of read objects permission", func(t *testing.T) { + resp, err := queryGQL(t, "{ Get { Books { title } } }", customKey) + require.Nil(t, err) + require.NotNil(t, resp.Payload.Errors) + require.Len(t, resp.Payload.Errors, 1) + require.Contains(t, resp.Payload.Errors[0].Message, "forbidden") + }) + + t.Run("fail with 200 to query with Aggregate due to lack of read objects permission", func(t *testing.T) { + resp, err := queryGQL(t, "{ Aggregate { Books { meta { count } } } }", customKey) + require.Nil(t, err) + require.NotNil(t, resp.Payload.Errors) + require.Len(t, resp.Payload.Errors, 1) + require.Contains(t, resp.Payload.Errors[0].Message, "forbidden") + }) + + t.Run("add the read all objects in all classes permission", func(t *testing.T) { + _, err := helper.Client(t).Authz.AddPermissions(authz.NewAddPermissionsParams().WithID(readBooksRole).WithBody(authz.AddPermissionsBody{ + Permissions: []*models.Permission{{ + Action: String(authorization.ReadData), + Data: &models.PermissionData{Collection: String("*")}, + }}, + }), helper.CreateAuth(adminKey)) + require.Nil(t, err) + }) + + t.Run("successfully query with Explore and the sufficient permissions", func(t *testing.T) { + query := fmt.Sprintf(`{ Explore(nearObject:{id:"%s"}) { className }}`, books.Objects()[0].ID) + assertGQL(t, query, customKey) + }) +} + +func TestAuthZGraphQLMultiTenancy(t *testing.T) { + adminUser := "existing-user" + adminKey := "existing-key" + + customUser := "custom-user" + customKey := "custom-key" + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + defer cancel() + + compose, err := docker. + New(). + WithWeaviate(). + WithApiKey().WithUserApiKey(adminUser, adminKey).WithUserApiKey(customUser, customKey). + WithRBAC().WithRbacRoots(adminUser). + WithText2VecContextionary(). + Start(ctx) + + require.Nil(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %v", err) + } + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + defer helper.ResetClient() + + class := books.ClassContextionaryVectorizer() + readBooksRole := "read-books" + + t.Run("create books class", func(t *testing.T) { + class.MultiTenancyConfig = &models.MultiTenancyConfig{Enabled: true, AutoTenantCreation: true} + helper.CreateClassAuth(t, class, adminKey) + }) + + t.Run("import books objects", func(t *testing.T) { + objects := books.Objects() + for i := range objects { + objects[i].Tenant = customUser + } + helper.CreateObjectsBatchAuth(t, objects, adminKey) + }) + + t.Run("create and assign a role that can only read objects in books class and customUser tenant", func(t *testing.T) { + role := &models.Role{ + Name: String(readBooksRole), + Permissions: []*models.Permission{{ + Action: String(authorization.ReadData), + Data: &models.PermissionData{ + Collection: String(class.Class), + Tenant: String(customUser), + }, + }}, + } + helper.CreateRole(t, adminKey, role) + helper.AssignRoleToUser(t, adminKey, *role.Name, customUser) + }) + + t.Run("fail with 403 to query with Get due to lack of read all collections permission", func(t *testing.T) { + query := fmt.Sprintf(`{ Get { %s(tenant:"%s") { title } } }`, class.Class, customUser) + _, err := queryGQL(t, query, customKey) + require.NotNil(t, err) + var cErr *gql.GraphqlPostForbidden + require.True(t, errors.As(err, &cErr)) + }) + + t.Run("fail with 403 to query with Aggregate due to lack of read all collections permission", func(t *testing.T) { + query := fmt.Sprintf(`{ Aggregate { %s(tenant:"%s") { meta { count } } } }`, class.Class, customUser) + _, err = queryGQL(t, query, customKey) + require.NotNil(t, err) + var cErr *gql.GraphqlPostForbidden + require.True(t, errors.As(err, &cErr)) + }) + + t.Run("add the read all collections permission to the role", func(t *testing.T) { + _, err := helper.Client(t).Authz.AddPermissions(authz.NewAddPermissionsParams().WithID(readBooksRole).WithBody(authz.AddPermissionsBody{ + Permissions: []*models.Permission{{ + Action: String(authorization.ReadCollections), + Collections: &models.PermissionCollections{Collection: String("*")}, + }}, + }), helper.CreateAuth(adminKey)) + require.Nil(t, err) + }) + + t.Run("successfully query with Get and sufficient permissions", func(t *testing.T) { + query := fmt.Sprintf(`{ Get { %s(tenant:"%s") { title } } }`, class.Class, customUser) + assertGQL(t, query, customKey) + }) + + t.Run("successfully query with Aggregate and sufficient permissions", func(t *testing.T) { + query := fmt.Sprintf(`{ Aggregate { %s(tenant:"%s") { meta { count } } } }`, class.Class, customUser) + assertGQL(t, query, customKey) + }) + + t.Run("remove the read objects in books class and customUser tenant permission", func(t *testing.T) { + _, err := helper.Client(t).Authz.RemovePermissions(authz.NewRemovePermissionsParams().WithID(readBooksRole).WithBody(authz.RemovePermissionsBody{ + Permissions: []*models.Permission{{ + Action: String(authorization.ReadData), + Data: &models.PermissionData{ + Collection: String(class.Class), + Tenant: String(customUser), + }, + }}, + }), helper.CreateAuth(adminKey)) + require.Nil(t, err) + }) + + t.Run("add the read objects in books class and non-existent tenant permission", func(t *testing.T) { + _, err := helper.Client(t).Authz.AddPermissions(authz.NewAddPermissionsParams().WithID(readBooksRole).WithBody(authz.AddPermissionsBody{ + Permissions: []*models.Permission{{ + Action: String(authorization.ReadData), + Data: &models.PermissionData{ + Collection: String(class.Class), + Tenant: String("non-existent-tenant"), + }, + }}, + }), helper.CreateAuth(adminKey)) + require.Nil(t, err) + }) + + t.Run("fail with 200 to query with Get due to lack of read objects and customUser tenant permission", func(t *testing.T) { + query := fmt.Sprintf(`{ Get { %s(tenant:"%s") { title } } }`, class.Class, customUser) + resp, err := queryGQL(t, query, customKey) + require.Nil(t, err) + require.NotNil(t, resp.Payload.Errors) + require.Len(t, resp.Payload.Errors, 1) + require.Contains(t, resp.Payload.Errors[0].Message, "forbidden") + }) + + t.Run("fail with 200 to query with Aggregate due to lack of read objects and non-existent tenant permission", func(t *testing.T) { + query := fmt.Sprintf(`{ Aggregate { %s(tenant:"%s") { meta { count } } } }`, class.Class, customUser) + resp, err := queryGQL(t, query, customKey) + require.Nil(t, err) + require.NotNil(t, resp.Payload.Errors) + require.Len(t, resp.Payload.Errors, 1) + require.Contains(t, resp.Payload.Errors[0].Message, "forbidden") + }) + + t.Run("remove the read objects in books class and non-existent tenant permission", func(t *testing.T) { + _, err := helper.Client(t).Authz.RemovePermissions(authz.NewRemovePermissionsParams().WithID(readBooksRole).WithBody(authz.RemovePermissionsBody{ + Permissions: []*models.Permission{{ + Action: String(authorization.ReadData), + Data: &models.PermissionData{ + Collection: String(class.Class), + Tenant: String("non-existent-tenant"), + }, + }}, + }), helper.CreateAuth(adminKey)) + require.Nil(t, err) + }) + + t.Run("add the read objects in books class permission", func(t *testing.T) { + _, err := helper.Client(t).Authz.AddPermissions(authz.NewAddPermissionsParams().WithID(readBooksRole).WithBody(authz.AddPermissionsBody{ + Permissions: []*models.Permission{{ + Action: String(authorization.ReadData), + Data: &models.PermissionData{Collection: String(class.Class)}, + }}, + }), helper.CreateAuth(adminKey)) + require.Nil(t, err) + }) + + t.Run("successfully query with Get and sufficient permissions", func(t *testing.T) { + query := fmt.Sprintf(`{ Get { %s(tenant:"%s") { title } } }`, class.Class, customUser) + assertGQL(t, query, customKey) + }) + + t.Run("successfully query with Aggregate and sufficient permissions", func(t *testing.T) { + query := fmt.Sprintf(`{ Aggregate { %s(tenant:"%s") { meta { count } } } }`, class.Class, customUser) + assertGQL(t, query, customKey) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/groups_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/groups_test.go new file mode 100644 index 0000000000000000000000000000000000000000..c95f8341706c88da64e8f14d0c59cadfec553dea --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/groups_test.go @@ -0,0 +1,237 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "errors" + "testing" + + "github.com/weaviate/weaviate/client/authz" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestAuthzRolesForGroups(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + + customUser := "custom-user" + customKey := "custom-key" + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey}, nil) + defer down() + + all := "*" + groupsStart := "Prefix*" + + groupReadName := "groupRead" + groupReadFilterName := "groupReadFilter" + groupAssignName := "groupAssign" + roleReadName := "roleRead" + groupRead := &models.Role{ + Name: &groupReadName, + Permissions: []*models.Permission{{ + Action: &authorization.ReadGroups, + Groups: &models.PermissionGroups{Group: &all, GroupType: models.GroupTypeOidc}, + }}, + } + groupReadFilter := &models.Role{ + Name: &groupReadFilterName, + Permissions: []*models.Permission{{ + Action: &authorization.ReadGroups, + Groups: &models.PermissionGroups{Group: &groupsStart, GroupType: models.GroupTypeOidc}, + }}, + } + groupAssign := &models.Role{ + Name: &groupAssignName, + Permissions: []*models.Permission{{ + Action: &authorization.AssignAndRevokeGroups, + Groups: &models.PermissionGroups{Group: &all, GroupType: models.GroupTypeOidc}, + }}, + } + roleRead := &models.Role{ + Name: &roleReadName, + Permissions: []*models.Permission{{ + Action: &authorization.ReadRoles, + Roles: &models.PermissionRoles{Role: &all}, + }}, + } + + helper.DeleteRole(t, adminKey, groupReadName) + helper.DeleteRole(t, adminKey, groupReadFilterName) + helper.DeleteRole(t, adminKey, groupAssignName) + helper.DeleteRole(t, adminKey, roleReadName) + helper.CreateRole(t, adminKey, groupRead) + helper.CreateRole(t, adminKey, groupReadFilter) + helper.CreateRole(t, adminKey, groupAssign) + helper.CreateRole(t, adminKey, roleRead) + defer helper.DeleteRole(t, adminKey, groupReadName) + defer helper.DeleteRole(t, adminKey, groupReadFilterName) + defer helper.DeleteRole(t, adminKey, groupAssignName) + defer helper.DeleteRole(t, adminKey, roleReadName) + + t.Run("test returns", func(t *testing.T) { + readRole := helper.GetRoleByName(t, adminKey, groupReadName) + require.NotNil(t, readRole) + require.Equal(t, groupReadName, *readRole.Name) + require.Len(t, readRole.Permissions, 1) + require.Equal(t, all, *readRole.Permissions[0].Groups.Group) + require.Equal(t, authorization.ReadGroups, *readRole.Permissions[0].Action) + + assignRole := helper.GetRoleByName(t, adminKey, groupAssignName) + require.NotNil(t, assignRole) + require.Equal(t, groupAssignName, *assignRole.Name) + require.Len(t, assignRole.Permissions, 1) + require.Equal(t, all, *assignRole.Permissions[0].Groups.Group) + require.Equal(t, authorization.AssignAndRevokeGroups, *assignRole.Permissions[0].Action) + }) + + t.Run("assign group", func(t *testing.T) { + group := "some-group" + _, err := helper.Client(t).Authz.AssignRoleToGroup( + authz.NewAssignRoleToGroupParams().WithID(group).WithBody(authz.AssignRoleToGroupBody{GroupType: models.GroupTypeOidc, Roles: []string{groupReadName}}), + helper.CreateAuth(customKey), + ) + require.Error(t, err) + var errType *authz.AssignRoleToGroupForbidden + require.True(t, errors.As(err, &errType)) + + helper.AssignRoleToUser(t, adminKey, groupAssignName, customUser) + + // assigning works after user has appropriate rights + helper.AssignRoleToGroup(t, customKey, groupReadName, group) + defer helper.RevokeRoleFromGroup(t, adminKey, groupReadName, group) + + groupRoles := helper.GetRolesForGroup(t, adminKey, group, false) + require.Len(t, groupRoles, 1) + require.Equal(t, groupReadName, *groupRoles[0].Name) + + helper.RevokeRoleFromUser(t, adminKey, groupReadName, customUser) + helper.RevokeRoleFromUser(t, adminKey, groupAssignName, customUser) + }) + + t.Run("revoke group", func(t *testing.T) { + group := "revoke-group" + + helper.AssignRoleToGroup(t, adminKey, groupReadName, group) + defer helper.RevokeRoleFromGroup(t, adminKey, groupReadName, group) + + defer helper.RevokeRoleFromGroup(t, adminKey, groupReadName, group) + _, err := helper.Client(t).Authz.RevokeRoleFromGroup( + authz.NewRevokeRoleFromGroupParams().WithID(group).WithBody(authz.RevokeRoleFromGroupBody{GroupType: models.GroupTypeOidc, Roles: []string{groupReadName}}), + helper.CreateAuth(customKey), + ) + require.Error(t, err) + var errType *authz.RevokeRoleFromGroupForbidden + require.True(t, errors.As(err, &errType)) + + helper.AssignRoleToUser(t, adminKey, groupAssignName, customUser) + + // revoking works after user has appropriate rights + helper.RevokeRoleFromGroup(t, customKey, groupReadName, group) + groupRoles := helper.GetRolesForGroup(t, adminKey, group, false) + require.Len(t, groupRoles, 0) + + helper.RevokeRoleFromUser(t, adminKey, groupAssignName, customUser) + }) + + t.Run("get role for group", func(t *testing.T) { + group := "revoke-group" + + helper.AssignRoleToGroup(t, adminKey, groupReadName, group) + helper.AssignRoleToGroup(t, adminKey, groupAssignName, group) + defer helper.RevokeRoleFromGroup(t, adminKey, groupReadName, group) + defer helper.RevokeRoleFromGroup(t, adminKey, groupAssignName, group) + + _, err := helper.Client(t).Authz.GetRolesForGroup( + authz.NewGetRolesForGroupParams().WithID(group).WithGroupType(string(models.GroupTypeOidc)), + helper.CreateAuth(customKey), + ) + require.Error(t, err) + var errType *authz.GetRolesForGroupForbidden + require.True(t, errors.As(err, &errType)) + + helper.AssignRoleToUser(t, adminKey, groupReadName, customUser) + roles := helper.GetRolesForGroup(t, adminKey, group, false) + require.Len(t, roles, 2) + + // get roles for groups + truep := true + _, err = helper.Client(t).Authz.GetRolesForGroup( + authz.NewGetRolesForGroupParams().WithID(group).WithGroupType(string(models.GroupTypeOidc)).WithIncludeFullRoles(&truep), + helper.CreateAuth(customKey), + ) + require.Error(t, err) + require.True(t, errors.As(err, &errType)) + + helper.AssignRoleToUser(t, adminKey, roleReadName, customUser) + roles = helper.GetRolesForGroup(t, adminKey, group, true) + require.Len(t, roles, 2) + require.NotNil(t, roles[0].Permissions) + + helper.RevokeRoleFromUser(t, adminKey, groupReadName, customUser) + helper.RevokeRoleFromUser(t, adminKey, roleReadName, customUser) + }) + + t.Run("list all known groups and groups for roles", func(t *testing.T) { + group1 := "list-group1" + group2 := "list-group2" + + helper.AssignRoleToGroup(t, adminKey, groupReadName, group1) + helper.AssignRoleToGroup(t, adminKey, groupAssignName, group2) + + groups := helper.GetKnownGroups(t, adminKey) + require.Len(t, groups, 2) + require.Contains(t, groups, group1) + require.Contains(t, groups, group2) + + groupsForRead := helper.GetGroupsForRole(t, adminKey, groupReadName) + require.Len(t, groupsForRead, 1) + require.Contains(t, groupsForRead, group1) + + groupsForAssign := helper.GetGroupsForRole(t, adminKey, groupAssignName) + require.Len(t, groupsForAssign, 1) + require.Contains(t, groupsForAssign, group2) + + helper.RevokeRoleFromGroup(t, adminKey, groupReadName, group1) + helper.RevokeRoleFromGroup(t, adminKey, groupAssignName, group2) + }) + + t.Run("list all known groups with filter", func(t *testing.T) { + helper.AssignRoleToUser(t, adminKey, groupReadFilterName, customUser) + + group1 := "list-group1" + group2 := "list-group2" + group3 := groupsStart + "3" + group4 := groupsStart + "4" + + helper.AssignRoleToGroup(t, adminKey, groupReadName, group1) + helper.AssignRoleToGroup(t, adminKey, groupAssignName, group2) + helper.AssignRoleToGroup(t, adminKey, groupReadName, group3) + helper.AssignRoleToGroup(t, adminKey, groupAssignName, group4) + + groups := helper.GetKnownGroups(t, adminKey) + require.Len(t, groups, 4) + + groups = helper.GetKnownGroups(t, customKey) + require.Len(t, groups, 2) + + helper.RevokeRoleFromGroup(t, adminKey, groupReadName, group1) + helper.RevokeRoleFromGroup(t, adminKey, groupAssignName, group2) + helper.RevokeRoleFromGroup(t, adminKey, groupReadName, group3) + helper.RevokeRoleFromGroup(t, adminKey, groupAssignName, group4) + helper.RevokeRoleFromUser(t, adminKey, groupReadFilterName, customUser) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/grpc_aggregate_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/grpc_aggregate_test.go new file mode 100644 index 0000000000000000000000000000000000000000..73c702cbe6d4b3a2613b0ddc3d20d3e71e3f3199 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/grpc_aggregate_test.go @@ -0,0 +1,160 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "context" + "fmt" + "testing" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/grpc/generated/protocol/v1" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/articles" + "github.com/weaviate/weaviate/usecases/auth/authorization" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/status" +) + +func TestAuthzAggregateWithGRPC(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + + customUser := "custom-user" + customKey := "custom-key" + + customUser2 := "custom-user2" + customKey2 := "custom-key2" + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey, customUser2: customKey2}, nil) + defer down() + + grpcClient := helper.ClientGRPC(t) + + tests := []struct { + name string + mtEnabled bool + tenantName string + tenantPermission string + }{ + { + name: "with multi-tenancy", + mtEnabled: true, + tenantName: "tenant1", + tenantPermission: "tenant1", + }, + { + name: "without multi-tenancy", + mtEnabled: false, + tenantName: "", + tenantPermission: "*", + }, + } + + helper.CreateClassAuth(t, articles.ParagraphsClass(), adminKey) + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + defer helper.DeleteClassAuth(t, articles.ArticlesClass().Class, adminKey) + + cls := articles.ArticlesClass() + if tt.mtEnabled { + cls.MultiTenancyConfig = &models.MultiTenancyConfig{Enabled: true} + } + helper.CreateClassAuth(t, cls, adminKey) + if tt.mtEnabled { + helper.CreateTenantsAuth(t, cls.Class, []*models.Tenant{{Name: tt.tenantName}}, adminKey) + } + helper.CreateObjectsBatchAuth(t, []*models.Object{articles.NewArticle().WithTitle("How to git gud").WithTenant(tt.tenantName).Object()}, adminKey) + + roleName := fmt.Sprintf("role-%v", tt.mtEnabled) + helper.DeleteClassAuth(t, roleName, adminKey) + defer helper.DeleteClassAuth(t, roleName, adminKey) + helper.CreateRole(t, adminKey, &models.Role{Name: &roleName, Permissions: []*models.Permission{ + helper.NewDataPermission(). + WithAction(authorization.ReadData). + WithCollection(articles.ArticlesClass().Class). + WithTenant(tt.tenantPermission). + Permission(), + }}) + + t.Run("correctly fail to perform a gRPC Search call without permissions", func(t *testing.T) { + ctx := metadata.AppendToOutgoingContext(context.Background(), "authorization", fmt.Sprintf("Bearer %s", customKey)) + _, err := grpcClient.Aggregate(ctx, &protocol.AggregateRequest{ + Collection: articles.ArticlesClass().Class, + Tenant: tt.tenantName, + ObjectsCount: true, + }) + require.NotNil(t, err) + require.Equal(t, status.Code(err), codes.PermissionDenied) + }) + + t.Run("correctly succeed to perform a gRPC Search call with permissions", func(t *testing.T) { + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + ctx := metadata.AppendToOutgoingContext(context.Background(), "authorization", fmt.Sprintf("Bearer %s", customKey)) + resp, err := grpcClient.Aggregate(ctx, &protocol.AggregateRequest{ + Collection: articles.ArticlesClass().Class, + Tenant: tt.tenantName, + ObjectsCount: true, + }) + if err != nil { + t.Logf("Error: %+v", err) + } + require.Nil(t, err) + require.NotNil(t, resp) + require.Equal(t, *resp.GetSingleResult().ObjectsCount, int64(1)) + }) + }) + } + + t.Run("with multi-tenancy and only partial permissions", func(t *testing.T) { + cls := articles.ArticlesClass() + cls.MultiTenancyConfig = &models.MultiTenancyConfig{Enabled: true} + helper.CreateClassAuth(t, cls, adminKey) + helper.CreateTenantsAuth(t, cls.Class, []*models.Tenant{{Name: "tenant1"}, {Name: "tenant2"}}, adminKey) + helper.CreateObjectsBatchAuth(t, []*models.Object{articles.NewArticle().WithTitle("How to git gud").WithTenant("tenant1").Object()}, adminKey) + helper.CreateObjectsBatchAuth(t, []*models.Object{articles.NewArticle().WithTitle("How to git gud").WithTenant("tenant2").Object()}, adminKey) + + // role with permission for tenant1 but not 2 + roleName := "role-tenant-1" + helper.DeleteRole(t, adminKey, roleName) + defer helper.DeleteRole(t, adminKey, roleName) + + helper.CreateRole(t, adminKey, &models.Role{Name: &roleName, Permissions: []*models.Permission{ + helper.NewDataPermission(). + WithAction(authorization.ReadData). + WithCollection(articles.ArticlesClass().Class). + WithTenant("tenant1"). + Permission(), + }}) + helper.AssignRoleToUser(t, adminKey, roleName, customUser2) + + ctx := metadata.AppendToOutgoingContext(context.Background(), "authorization", fmt.Sprintf("Bearer %s", customKey2)) + resp, err := grpcClient.Aggregate(ctx, &protocol.AggregateRequest{ + Collection: articles.ArticlesClass().Class, + Tenant: "tenant1", + ObjectsCount: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.Equal(t, *resp.GetSingleResult().ObjectsCount, int64(1)) + + _, err = grpcClient.Aggregate(ctx, &protocol.AggregateRequest{ + Collection: articles.ArticlesClass().Class, + Tenant: "tenant2", + ObjectsCount: true, + }) + require.Error(t, err) + require.Equal(t, status.Code(err), codes.PermissionDenied) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/grpc_search_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/grpc_search_test.go new file mode 100644 index 0000000000000000000000000000000000000000..d1c7c9fd02eacd0baabfafae92ef457d804134b2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/grpc_search_test.go @@ -0,0 +1,175 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "context" + "fmt" + "testing" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/grpc/generated/protocol/v1" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/articles" + "github.com/weaviate/weaviate/usecases/auth/authorization" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/status" +) + +func TestAuthzGRPCSearch(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + + customUser := "custom-user" + customKey := "custom-key" + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey}, nil) + defer down() + + grpcClient := helper.ClientGRPC(t) + + tests := []struct { + name string + mtEnabled bool + tenantName string + tenantPermission string + }{ + { + name: "with multi-tenancy", + mtEnabled: true, + tenantName: "tenant1", + tenantPermission: "tenant1", + }, + { + name: "without multi-tenancy", + mtEnabled: false, + tenantName: "", + tenantPermission: "*", + }, + } + + helper.CreateClassAuth(t, articles.ParagraphsClass(), adminKey) + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + defer helper.DeleteClassAuth(t, articles.ArticlesClass().Class, adminKey) + + cls := articles.ArticlesClass() + if tt.mtEnabled { + cls.MultiTenancyConfig = &models.MultiTenancyConfig{Enabled: true} + } + helper.CreateClassAuth(t, cls, adminKey) + if tt.mtEnabled { + helper.CreateTenantsAuth(t, cls.Class, []*models.Tenant{{Name: tt.tenantName}}, adminKey) + } + helper.CreateObjectsBatchAuth(t, []*models.Object{articles.NewArticle().WithTitle("How to git gud").WithTenant(tt.tenantName).Object()}, adminKey) + + t.Run("correctly fail to perform a gRPC Search call without permissions", func(t *testing.T) { + ctx := metadata.AppendToOutgoingContext(context.Background(), "authorization", fmt.Sprintf("Bearer %s", customKey)) + _, err := grpcClient.Search(ctx, &protocol.SearchRequest{ + Collection: articles.ArticlesClass().Class, + Tenant: tt.tenantName, + }) + require.NotNil(t, err) + require.Equal(t, status.Code(err), codes.PermissionDenied) + }) + + t.Run("create role with necessary permissions on articles and assign to custom user", func(t *testing.T) { + roleName := fmt.Sprintf("role-%v", tt.mtEnabled) + helper.CreateRole(t, adminKey, &models.Role{Name: &roleName, Permissions: []*models.Permission{ + helper.NewDataPermission(). + WithAction(authorization.ReadData). + WithCollection(articles.ArticlesClass().Class). + WithTenant(tt.tenantPermission). + Permission(), + }}) + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + }) + + t.Run("correctly succeed to perform a gRPC Search call with permissions", func(t *testing.T) { + ctx := metadata.AppendToOutgoingContext(context.Background(), "authorization", fmt.Sprintf("Bearer %s", customKey)) + resp, err := grpcClient.Search(ctx, &protocol.SearchRequest{ + Collection: articles.ArticlesClass().Class, + Tenant: tt.tenantName, + }) + if err != nil { + t.Logf("Error: %+v", err) + } + require.Nil(t, err) + require.NotNil(t, resp) + require.Len(t, resp.Results, 1) + }) + }) + } +} + +func TestAuthzGRPCSearchWithMT(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + + customUser := "custom-user" + customKey := "custom-key" + roleName := "role" + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey}, nil) + defer down() + + grpcClient := helper.ClientGRPC(t) + + defer helper.DeleteClassAuth(t, articles.ArticlesClass().Class, adminKey) + + cls := articles.ArticlesClass() + cls.MultiTenancyConfig = &models.MultiTenancyConfig{Enabled: true} + tenant1 := "tenant1" + tenant2 := "tenant2" + + helper.CreateClassAuth(t, articles.ParagraphsClass(), adminKey) + helper.CreateClassAuth(t, cls, adminKey) + for _, tenant := range []string{tenant1, tenant2} { + helper.CreateTenantsAuth(t, cls.Class, []*models.Tenant{{Name: tenant}}, adminKey) + helper.CreateObjectsBatchAuth(t, []*models.Object{articles.NewArticle().WithTitle("A Treatise on the Astrolabe").WithTenant(tenant).Object()}, adminKey) + } + + helper.CreateRole(t, adminKey, &models.Role{Name: &roleName, Permissions: []*models.Permission{ + helper.NewDataPermission(). + WithAction(authorization.ReadData). + WithCollection(articles.ArticlesClass().Class). + WithTenant(tenant1). + Permission(), + }}) + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + + t.Run(fmt.Sprintf("correctly fail to perform a gRPC Search call on %s", tenant2), func(t *testing.T) { + ctx := metadata.AppendToOutgoingContext(context.Background(), "authorization", fmt.Sprintf("Bearer %s", customKey)) + _, err := grpcClient.Search(ctx, &protocol.SearchRequest{ + Collection: articles.ArticlesClass().Class, + Tenant: tenant2, + }) + require.NotNil(t, err) + require.Equal(t, status.Code(err), codes.PermissionDenied) + }) + + t.Run(fmt.Sprintf("correctly succeed to perform a gRPC Search call on %s", tenant1), func(t *testing.T) { + ctx := metadata.AppendToOutgoingContext(context.Background(), "authorization", fmt.Sprintf("Bearer %s", customKey)) + resp, err := grpcClient.Search(ctx, &protocol.SearchRequest{ + Collection: articles.ArticlesClass().Class, + Tenant: tenant1, + }) + if err != nil { + t.Logf("Error: %+v", err) + } + require.Nil(t, err) + require.NotNil(t, resp) + require.Len(t, resp.Results, 1) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/helper.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/helper.go new file mode 100644 index 0000000000000000000000000000000000000000..05bf9d242ecea23550b9bbbce2a0feff33e26dc1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/helper.go @@ -0,0 +1,258 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "bufio" + "context" + "fmt" + "strings" + "testing" + + "github.com/testcontainers/testcontainers-go" + "google.golang.org/grpc/metadata" + + "github.com/stretchr/testify/require" + gql "github.com/weaviate/weaviate/client/graphql" + "github.com/weaviate/weaviate/grpc/generated/protocol/v1" + "github.com/weaviate/weaviate/usecases/auth/authorization" + + "github.com/go-openapi/strfmt" + "github.com/weaviate/weaviate/client/objects" + + "github.com/weaviate/weaviate/entities/models" + + "github.com/go-openapi/runtime" + clschema "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/test/helper" +) + +const ( + UUID1 = strfmt.UUID("73f2eb5f-5abf-447a-81ca-74b1dd168241") + UUID2 = strfmt.UUID("73f2eb5f-5abf-447a-81ca-74b1dd168242") + UUID3 = strfmt.UUID("73f2eb5f-5abf-447a-81ca-74b1dd168243") + UUID4 = strfmt.UUID("73f2eb5f-5abf-447a-81ca-74b1dd168244") + UUID5 = strfmt.UUID("73f2eb5f-5abf-447a-81ca-74b1dd168245") + UUID6 = strfmt.UUID("73f2eb5f-5abf-447a-81ca-74b1dd168246") +) + +const NumBuildInRoles = 4 + +func deleteObjectClass(t *testing.T, class string, auth runtime.ClientAuthInfoWriter) { + delParams := clschema.NewSchemaObjectsDeleteParams().WithClassName(class) + delRes, err := helper.Client(t).Schema.SchemaObjectsDelete(delParams, auth) + helper.AssertRequestOk(t, delRes, err, nil) +} + +func createClass(t *testing.T, class *models.Class, auth runtime.ClientAuthInfoWriter) error { + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(class) + _, err := helper.Client(t).Schema.SchemaObjectsCreate(params, auth) + return err +} + +func generateMissingLists(permissions []*models.Permission) [][]*models.Permission { + var result [][]*models.Permission + + for i := range permissions { + missingList := make([]*models.Permission, 0, len(permissions)-1) + missingList = append(missingList, permissions[:i]...) + missingList = append(missingList, permissions[i+1:]...) + result = append(result, missingList) + } + + return result +} + +func createObject(t *testing.T, object *models.Object, key string) (*objects.ObjectsCreateOK, error) { + params := objects.NewObjectsCreateParams().WithBody(object) + return helper.Client(t).Objects.ObjectsCreate(params, helper.CreateAuth(key)) +} + +func getObject(t *testing.T, class string, id strfmt.UUID, tenant *string, key string) (*objects.ObjectsClassGetOK, error) { + params := objects.NewObjectsClassGetParams().WithClassName(class).WithID(id).WithTenant(tenant) + return helper.Client(t).Objects.ObjectsClassGet(params, helper.CreateAuth(key)) +} + +func getObjectDeprecated(t *testing.T, id strfmt.UUID, key string) (*objects.ObjectsGetOK, error) { + params := objects.NewObjectsGetParams().WithID(id) + return helper.Client(t).Objects.ObjectsGet(params, helper.CreateAuth(key)) +} + +func deleteObject(t *testing.T, class string, id strfmt.UUID, tenant *string, key string) (*objects.ObjectsClassDeleteNoContent, error) { + params := objects.NewObjectsClassDeleteParams().WithClassName(class).WithID(id).WithTenant(tenant) + return helper.Client(t).Objects.ObjectsClassDelete(params, helper.CreateAuth(key)) +} + +func deleteObjectDeprecated(t *testing.T, id strfmt.UUID, key string) (*objects.ObjectsDeleteNoContent, error) { + params := objects.NewObjectsDeleteParams().WithID(id) + return helper.Client(t).Objects.ObjectsDelete(params, helper.CreateAuth(key)) +} + +func updateObject(t *testing.T, object *models.Object, key string) (*objects.ObjectsClassPatchNoContent, error) { + params := objects.NewObjectsClassPatchParams().WithBody(object).WithID(object.ID).WithClassName(object.Class) + return helper.Client(t).Objects.ObjectsClassPatch(params, helper.CreateAuth(key)) +} + +func replaceObject(t *testing.T, object *models.Object, key string) (*objects.ObjectsClassPutOK, error) { + params := objects.NewObjectsClassPutParams().WithBody(object).WithID(object.ID).WithClassName(object.Class) + return helper.Client(t).Objects.ObjectsClassPut(params, helper.CreateAuth(key)) +} + +func addRef(t *testing.T, fromId strfmt.UUID, fromProp string, ref *models.SingleRef, key string) (*objects.ObjectsReferencesCreateOK, error) { + params := objects.NewObjectsReferencesCreateParams().WithBody(ref).WithID(fromId).WithPropertyName(fromProp) + return helper.Client(t).Objects.ObjectsReferencesCreate(params, helper.CreateAuth(key)) +} + +func updateRef(t *testing.T, fromId strfmt.UUID, fromProp string, ref *models.SingleRef, key string) (*objects.ObjectsReferencesUpdateOK, error) { + params := objects.NewObjectsReferencesUpdateParams().WithBody(models.MultipleRef{ref}).WithID(fromId).WithPropertyName(fromProp) + return helper.Client(t).Objects.ObjectsReferencesUpdate(params, helper.CreateAuth(key)) +} + +func deleteRef(t *testing.T, fromId strfmt.UUID, fromProp string, ref *models.SingleRef, key string) (*objects.ObjectsReferencesDeleteNoContent, error) { + params := objects.NewObjectsReferencesDeleteParams().WithBody(ref).WithID(fromId).WithPropertyName(fromProp) + return helper.Client(t).Objects.ObjectsReferencesDelete(params, helper.CreateAuth(key)) +} + +func String(s string) *string { + return &s +} + +func queryGQL(t *testing.T, query, key string) (*gql.GraphqlPostOK, error) { + params := gql.NewGraphqlPostParams().WithBody(&models.GraphQLQuery{OperationName: "", Query: query, Variables: nil}) + return helper.Client(t).Graphql.GraphqlPost(params, helper.CreateAuth(key)) +} + +func assertGQL(t *testing.T, query, key string) *models.GraphQLResponse { + params := gql.NewGraphqlPostParams().WithBody(&models.GraphQLQuery{OperationName: "", Query: query, Variables: nil}) + resp, err := helper.Client(t).Graphql.GraphqlPost(params, helper.CreateAuth(key)) + require.Nil(t, err) + if len(resp.Payload.Errors) > 0 { + t.Logf("Error: %s", resp.Payload.Errors[0].Message) + } + require.Equal(t, len(resp.Payload.Errors), 0) + return resp.Payload +} + +func readTenant(t *testing.T, class string, tenant string, key string) error { + params := clschema.NewTenantsGetOneParams().WithClassName(class).WithTenantName(tenant) + _, err := helper.Client(t).Schema.TenantsGetOne(params, helper.CreateAuth(key)) + return err +} + +func readTenantGRPC(t *testing.T, ctx context.Context, class, tenant, key string) (*protocol.TenantsGetReply, error) { + ctx = metadata.AppendToOutgoingContext(ctx, "authorization", fmt.Sprintf("Bearer %s", key)) + return helper.ClientGRPC(t).TenantsGet(ctx, &protocol.TenantsGetRequest{ + Collection: class, + Params: &protocol.TenantsGetRequest_Names{ + Names: &protocol.TenantNames{Values: []string{tenant}}, + }, + }) +} + +func readTenants(t *testing.T, class string, key string) (*clschema.TenantsGetOK, error) { + params := clschema.NewTenantsGetParams().WithClassName(class) + return helper.Client(t).Schema.TenantsGet(params, helper.CreateAuth(key)) +} + +func readTenantsGRPC(t *testing.T, ctx context.Context, class string, key string) (*protocol.TenantsGetReply, error) { + ctx = metadata.AppendToOutgoingContext(ctx, "authorization", fmt.Sprintf("Bearer %s", key)) + return helper.ClientGRPC(t).TenantsGet(ctx, &protocol.TenantsGetRequest{ + Collection: class, + }) +} + +func existsTenant(t *testing.T, class string, tenant string, key string) error { + params := clschema.NewTenantExistsParams().WithClassName(class).WithTenantName(tenant) + _, err := helper.Client(t).Schema.TenantExists(params, helper.CreateAuth(key)) + return err +} + +func createTenant(t *testing.T, class string, tenants []*models.Tenant, key string) error { + params := clschema.NewTenantsCreateParams().WithClassName(class).WithBody(tenants) + _, err := helper.Client(t).Schema.TenantsCreate(params, helper.CreateAuth(key)) + return err +} + +func deleteTenant(t *testing.T, class string, tenants []string, key string) error { + params := clschema.NewTenantsDeleteParams().WithClassName(class).WithTenants(tenants) + _, err := helper.Client(t).Schema.TenantsDelete(params, helper.CreateAuth(key)) + return err +} + +func updateTenantStatus(t *testing.T, class string, tenants []*models.Tenant, key string) error { + params := clschema.NewTenantsUpdateParams().WithClassName(class).WithBody(tenants) + _, err := helper.Client(t).Schema.TenantsUpdate(params, helper.CreateAuth(key)) + return err +} + +func batchReferencesPermissions(from, to, tenant string) []*models.Permission { + return []*models.Permission{ + helper.NewCollectionsPermission().WithAction(authorization.ReadCollections).WithCollection(from).Permission(), + helper.NewDataPermission().WithAction(authorization.UpdateData).WithCollection(from).WithTenant(tenant).Permission(), + helper.NewDataPermission().WithAction(authorization.ReadData).WithCollection(to).WithTenant(tenant).Permission(), + } +} + +func addReferencePermissions(from, to, tenant string) []*models.Permission { + return []*models.Permission{ + helper.NewCollectionsPermission().WithAction(authorization.ReadCollections).WithCollection(from).Permission(), + helper.NewCollectionsPermission().WithAction(authorization.ReadCollections).WithCollection(to).Permission(), + helper.NewDataPermission().WithAction(authorization.UpdateData).WithCollection(from).WithTenant(tenant).Permission(), + helper.NewDataPermission().WithAction(authorization.ReadData).WithCollection(to).WithTenant(tenant).Permission(), + } +} + +func deleteReferencePermissions(from, to, tenant string) []*models.Permission { + return []*models.Permission{ + helper.NewCollectionsPermission().WithAction(authorization.ReadCollections).WithCollection(from).Permission(), + helper.NewCollectionsPermission().WithAction(authorization.ReadCollections).WithCollection(to).Permission(), + helper.NewDataPermission().WithAction(authorization.UpdateData).WithCollection(from).WithTenant(tenant).Permission(), + helper.NewDataPermission().WithAction(authorization.ReadData).WithCollection(from).WithTenant(tenant).Permission(), + helper.NewDataPermission().WithAction(authorization.ReadData).WithCollection(to).WithTenant(tenant).Permission(), + } +} + +type logScanner struct { + container testcontainers.Container + pos int +} + +func newLogScanner(c testcontainers.Container) *logScanner { + return &logScanner{container: c} +} + +func (s *logScanner) GetAuthzLogs(t *testing.T) []string { + t.Helper() // produces more accurate error tracebacks + + logs, err := s.container.Logs(context.Background()) + require.Nil(t, err) + defer logs.Close() + + scanner := bufio.NewScanner(logs) + currentPosition := 0 + + var newLines []string + for scanner.Scan() { + line := scanner.Text() + if line == "" { + continue + } + if currentPosition >= s.pos && strings.Contains(line, `"action":"authorize"`) { + newLines = append(newLines, line) + } + currentPosition++ + } + + s.pos = currentPosition + + return newLines +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/no_collection_name_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/no_collection_name_test.go new file mode 100644 index 0000000000000000000000000000000000000000..bb771ffdef8f3979c9d24b08e19b458fa95c3133 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/no_collection_name_test.go @@ -0,0 +1,425 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "fmt" + "testing" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/client/objects" + + "github.com/go-openapi/strfmt" + "github.com/weaviate/weaviate/test/helper/sample-schema/articles" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +// Test create + delete - update does not seem to work without classname and we should not fix it +func TestWithoutCollectionName(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + adminAuth := helper.CreateAuth(adminKey) + + customUser := "custom-user" + customKey := "custom-key" + + readDataAction := authorization.ReadData + deleteDataAction := authorization.DeleteData + readTenantAction := authorization.ReadTenants + testRoleName := t.Name() + "role" + all := "*" + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey}, nil) + defer down() + + // add classes with object + className := t.Name() + "class" + + deleteObjectClass(t, className, adminAuth) + c := &models.Class{ + Class: className, + Properties: []*models.Property{ + { + Name: "prop", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + helper.CreateClassAuth(t, c, adminKey) + + obj := &models.Object{ + ID: UUID2, + Class: className, + Properties: map[string]interface{}{ + "prop": "test", + }, + } + err := helper.CreateObjectAuth(t, obj, adminKey) + require.NoError(t, err) + + getPermissionsClass := []*models.Permission{ + { + Action: &readDataAction, + Data: &models.PermissionData{Collection: &className}, + }, + { + Action: &readTenantAction, + Tenants: &models.PermissionTenants{Collection: &className}, + }, + } + t.Run("Test get object - fail", func(t *testing.T) { + deleteRole := &models.Role{Name: &testRoleName, Permissions: getPermissionsClass} + helper.DeleteRole(t, adminKey, *deleteRole.Name) + helper.CreateRole(t, adminKey, deleteRole) + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + + res, err := getObjectDeprecated(t, UUID2, customKey) + require.Error(t, err) + var unauthorized *objects.ObjectsGetForbidden + require.True(t, errors.As(err, &unauthorized)) + + require.Nil(t, res) + }) + + getPermissionsAll := []*models.Permission{ + { + Action: &readDataAction, + Data: &models.PermissionData{Collection: &all}, + }, + { + Action: &readTenantAction, + Tenants: &models.PermissionTenants{Collection: &all}, + }, + } + t.Run("Test get object - succeed", func(t *testing.T) { + deleteRole := &models.Role{Name: &testRoleName, Permissions: getPermissionsAll} + helper.DeleteRole(t, adminKey, *deleteRole.Name) + helper.CreateRole(t, adminKey, deleteRole) + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + + res, err := getObjectDeprecated(t, UUID2, customKey) + require.NoError(t, err) + require.NotNil(t, res) + }) + + deletePermissionsClass := []*models.Permission{ + { + Action: &deleteDataAction, + Data: &models.PermissionData{Collection: &className}, + }, + { + Action: &readTenantAction, + Tenants: &models.PermissionTenants{Collection: &className}, + }, + } + t.Run("delete object without collection name fail", func(t *testing.T) { + deleteRole := &models.Role{Name: &testRoleName, Permissions: deletePermissionsClass} + helper.DeleteRole(t, adminKey, *deleteRole.Name) + helper.CreateRole(t, adminKey, deleteRole) + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + + res, err := deleteObjectDeprecated(t, UUID2, customKey) + require.Error(t, err) + var unauthorized *objects.ObjectsDeleteForbidden + require.True(t, errors.As(err, &unauthorized)) + + require.Nil(t, res) + }) + + deletePermissionsAll := []*models.Permission{ + { + Action: &deleteDataAction, + Data: &models.PermissionData{Collection: &all}, + }, + { + Action: &readTenantAction, + Tenants: &models.PermissionTenants{Collection: &all}, + }, + } + t.Run("delete object without collection name succeed", func(t *testing.T) { + deleteRole := &models.Role{Name: &testRoleName, Permissions: deletePermissionsAll} + helper.DeleteRole(t, adminKey, *deleteRole.Name) + helper.CreateRole(t, adminKey, deleteRole) + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + + res, err := deleteObjectDeprecated(t, UUID2, customKey) + require.NoError(t, err) + require.NotNil(t, res) + }) +} + +func TestRefsWithoutCollectionNames(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + adminAuth := helper.CreateAuth(adminKey) + + customUser := "custom-user" + customKey := "custom-key" + + testRoleName := t.Name() + "role" + + readDataAction := authorization.ReadData + updateDataAction := authorization.UpdateData + readCollectionAction := authorization.ReadCollections + all := "*" + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey}, nil) + defer down() + + articlesCls := articles.ArticlesClass() + paragraphsCls := articles.ParagraphsClass() + + helper.DeleteClassWithAuthz(t, paragraphsCls.Class, adminAuth) + helper.DeleteClassWithAuthz(t, articlesCls.Class, adminAuth) + helper.CreateClassAuth(t, paragraphsCls, adminKey) + helper.CreateClassAuth(t, articlesCls, adminKey) + defer helper.DeleteClassWithAuthz(t, paragraphsCls.Class, adminAuth) + defer helper.DeleteClassWithAuthz(t, articlesCls.Class, adminAuth) + + objs := []*models.Object{articles.NewParagraph().WithID(UUID1).Object(), articles.NewParagraph().WithID(UUID2).Object()} + objs = append(objs, articles.NewArticle().WithTitle("Article 1").WithID(UUID3).Object()) + objs = append(objs, articles.NewArticle().WithTitle("Article 2").WithID(UUID4).Object()) + helper.CreateObjectsBatchAuth(t, objs, adminKey) + + addrefPermissionsClass := []*models.Permission{ + { + Action: &readDataAction, + Data: &models.PermissionData{Collection: &articlesCls.Class}, + }, + { + Action: &updateDataAction, + Data: &models.PermissionData{Collection: &articlesCls.Class}, + }, + { + Action: &readCollectionAction, + Collections: &models.PermissionCollections{Collection: &articlesCls.Class}, + }, + { + Action: &readCollectionAction, + Collections: &models.PermissionCollections{Collection: ¶graphsCls.Class}, + }, + } + t.Run("Test add ref only class permissions - fail", func(t *testing.T) { + role := &models.Role{Name: &testRoleName, Permissions: addrefPermissionsClass} + helper.DeleteRole(t, adminKey, *role.Name) + helper.CreateRole(t, adminKey, role) + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + + ref := &models.SingleRef{Beacon: strfmt.URI(fmt.Sprintf(beaconStart+"%s", UUID1.String()))} + res, err := addRef(t, UUID3, "hasParagraphs", ref, customKey) + require.Error(t, err) + var unauthorized *objects.ObjectsReferencesCreateForbidden + require.True(t, errors.As(err, &unauthorized)) + + require.Nil(t, res) + }) + + addrefPermissionsAll := []*models.Permission{ + { + Action: &readDataAction, + Data: &models.PermissionData{Collection: &all}, + }, + { + Action: &updateDataAction, + Data: &models.PermissionData{Collection: &all}, + }, + { + Action: &readCollectionAction, + Collections: &models.PermissionCollections{Collection: &all}, + }, + } + t.Run("Test add ref all permissions - succeed", func(t *testing.T) { + deleteRole := &models.Role{Name: &testRoleName, Permissions: addrefPermissionsAll} + helper.DeleteRole(t, adminKey, *deleteRole.Name) + helper.CreateRole(t, adminKey, deleteRole) + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + + ref := &models.SingleRef{Beacon: strfmt.URI(fmt.Sprintf(beaconStart+"%s", UUID1.String()))} + res, err := addRef(t, UUID3, "hasParagraphs", ref, customKey) + require.NoError(t, err) + require.NotNil(t, res) + }) + + for _, permissions := range generateMissingLists(addrefPermissionsAll) { + t.Run("Test add ref - missing permissions", func(t *testing.T) { + deleteRole := &models.Role{Name: &testRoleName, Permissions: permissions} + helper.DeleteRole(t, adminKey, *deleteRole.Name) + helper.CreateRole(t, adminKey, deleteRole) + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + + ref := &models.SingleRef{Beacon: strfmt.URI(fmt.Sprintf(beaconStart+"%s", UUID1.String()))} + res, err := addRef(t, UUID3, "hasParagraphs", ref, customKey) + require.Error(t, err) + var unauthorized *objects.ObjectsReferencesCreateForbidden + require.True(t, errors.As(err, &unauthorized)) + + require.Nil(t, res) + }) + } + + updaterefPermissionsClass := []*models.Permission{ + { + Action: &readDataAction, + Data: &models.PermissionData{Collection: &articlesCls.Class}, + }, + { + Action: &updateDataAction, + Data: &models.PermissionData{Collection: &articlesCls.Class}, + }, + { + Action: &readCollectionAction, + Collections: &models.PermissionCollections{Collection: &articlesCls.Class}, + }, + { + Action: &readCollectionAction, + Collections: &models.PermissionCollections{Collection: ¶graphsCls.Class}, + }, + } + t.Run("Test add ref only class permissions - fail", func(t *testing.T) { + role := &models.Role{Name: &testRoleName, Permissions: updaterefPermissionsClass} + helper.DeleteRole(t, adminKey, *role.Name) + helper.CreateRole(t, adminKey, role) + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + + ref := &models.SingleRef{Beacon: strfmt.URI(fmt.Sprintf(beaconStart+"%s", UUID1.String()))} + res, err := updateRef(t, UUID3, "hasParagraphs", ref, customKey) + var unauthorized *objects.ObjectsReferencesUpdateForbidden + require.True(t, errors.As(err, &unauthorized)) + + require.Error(t, err) + require.Nil(t, res) + }) + + updaterefPermissionsAll := []*models.Permission{ + { + Action: &readDataAction, + Data: &models.PermissionData{Collection: &all}, + }, + { + Action: &updateDataAction, + Data: &models.PermissionData{Collection: &all}, + }, + { + Action: &readCollectionAction, + Collections: &models.PermissionCollections{Collection: &all}, + }, + } + t.Run("Test update ref all permissions - succeed", func(t *testing.T) { + deleteRole := &models.Role{Name: &testRoleName, Permissions: updaterefPermissionsAll} + helper.DeleteRole(t, adminKey, *deleteRole.Name) + helper.CreateRole(t, adminKey, deleteRole) + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + + ref := &models.SingleRef{Beacon: strfmt.URI(fmt.Sprintf(beaconStart+"%s", UUID1.String()))} + res, err := updateRef(t, UUID3, "hasParagraphs", ref, customKey) + require.NoError(t, err) + require.NotNil(t, res) + }) + + t.Run("Test update ref - missing permissions", func(t *testing.T) { + for _, permissions := range generateMissingLists(updaterefPermissionsAll) { + deleteRole := &models.Role{Name: &testRoleName, Permissions: permissions} + helper.DeleteRole(t, adminKey, *deleteRole.Name) + helper.CreateRole(t, adminKey, deleteRole) + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + + ref := &models.SingleRef{Beacon: strfmt.URI(fmt.Sprintf(beaconStart+"%s", UUID1.String()))} + res, err := updateRef(t, UUID3, "hasParagraphs", ref, customKey) + require.Error(t, err) + var unauthorized *objects.ObjectsReferencesUpdateForbidden + require.True(t, errors.As(err, &unauthorized)) + + require.Nil(t, res) + } + }) + + deleterefPermissionsClass := []*models.Permission{ + { + Action: &readDataAction, + Data: &models.PermissionData{Collection: &articlesCls.Class}, + }, + { + Action: &updateDataAction, + Data: &models.PermissionData{Collection: &articlesCls.Class}, + }, + { + Action: &readCollectionAction, + Collections: &models.PermissionCollections{Collection: &articlesCls.Class}, + }, + { + Action: &readCollectionAction, + Collections: &models.PermissionCollections{Collection: ¶graphsCls.Class}, + }, + } + t.Run("Test delete ref only class permissions - fail", func(t *testing.T) { + role := &models.Role{Name: &testRoleName, Permissions: deleterefPermissionsClass} + helper.DeleteRole(t, adminKey, *role.Name) + helper.CreateRole(t, adminKey, role) + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + + ref := &models.SingleRef{Beacon: strfmt.URI(fmt.Sprintf(beaconStart+"%s", UUID1.String()))} + res, err := deleteRef(t, UUID3, "hasParagraphs", ref, customKey) + require.Error(t, err) + var unauthorized *objects.ObjectsReferencesDeleteForbidden + require.True(t, errors.As(err, &unauthorized)) + + require.Nil(t, res) + }) + + deleterefPermissionsAll := []*models.Permission{ + { + Action: &readDataAction, + Data: &models.PermissionData{Collection: &all}, + }, + { + Action: &updateDataAction, + Data: &models.PermissionData{Collection: &all}, + }, + { + Action: &readCollectionAction, + Collections: &models.PermissionCollections{Collection: &all}, + }, + } + t.Run("Test delete ref all permissions - succeed", func(t *testing.T) { + deleteRole := &models.Role{Name: &testRoleName, Permissions: deleterefPermissionsAll} + helper.DeleteRole(t, adminKey, *deleteRole.Name) + helper.CreateRole(t, adminKey, deleteRole) + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + + ref := &models.SingleRef{Beacon: strfmt.URI(fmt.Sprintf(beaconStart+"%s", UUID1.String()))} + res, err := deleteRef(t, UUID3, "hasParagraphs", ref, customKey) + require.NoError(t, err) + require.NotNil(t, res) + }) + + t.Run("Test delete ref - missing permissions", func(t *testing.T) { + for _, permissions := range generateMissingLists(deleterefPermissionsAll) { + deleteRole := &models.Role{Name: &testRoleName, Permissions: permissions} + helper.DeleteRole(t, adminKey, *deleteRole.Name) + helper.CreateRole(t, adminKey, deleteRole) + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + + ref := &models.SingleRef{Beacon: strfmt.URI(fmt.Sprintf(beaconStart+"%s", UUID1.String()))} + res, err := deleteRef(t, UUID3, "hasParagraphs", ref, customKey) + require.Error(t, err) + var unauthorized *objects.ObjectsReferencesDeleteForbidden + require.True(t, errors.As(err, &unauthorized)) + + require.Nil(t, res) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/nodes_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/nodes_test.go new file mode 100644 index 0000000000000000000000000000000000000000..6968270e6fafa4dbd9ef0cda2d6ee90109587ec3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/nodes_test.go @@ -0,0 +1,153 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/client/cluster" + "github.com/weaviate/weaviate/client/nodes" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/verbosity" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/articles" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestAuthzNodesFilter(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + customUser := "custom-user" + customKey := "custom-key" + roleName := "role" + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey}, nil) + defer down() + + clsA := articles.ArticlesClass() + clsP := articles.ParagraphsClass() + + helper.DeleteClassWithAuthz(t, clsP.Class, helper.CreateAuth(adminKey)) + helper.DeleteClassWithAuthz(t, clsA.Class, helper.CreateAuth(adminKey)) + + helper.CreateClassAuth(t, clsP, adminKey) + helper.CreateClassAuth(t, clsA, adminKey) + helper.CreateObjectsBatchAuth(t, []*models.Object{articles.NewArticle().WithTitle("article1").Object()}, adminKey) + + helper.DeleteRole(t, adminKey, roleName) + defer helper.DeleteRole(t, adminKey, roleName) + helper.CreateRole(t, adminKey, &models.Role{Name: &roleName, Permissions: []*models.Permission{ + helper.NewNodesPermission().WithAction(authorization.ReadNodes).WithVerbosity(verbosity.OutputVerbose).WithCollection(clsA.Class).Permission(), + }}) + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + + // only permissions for one of the classes + resp, err := helper.Client(t).Nodes.NodesGetClass(nodes.NewNodesGetClassParams().WithOutput(String(verbosity.OutputVerbose)), helper.CreateAuth(customKey)) + require.NoError(t, err) + require.Len(t, resp.Payload.Nodes[0].Shards, 1) + require.Equal(t, resp.Payload.Nodes[0].Shards[0].Class, clsA.Class) + + // admin gets back shards for two classes + resp, err = helper.Client(t).Nodes.NodesGetClass(nodes.NewNodesGetClassParams().WithOutput(String(verbosity.OutputVerbose)), helper.CreateAuth(adminKey)) + require.NoError(t, err) + require.Len(t, resp.Payload.Nodes[0].Shards, 2) +} + +func TestAuthzNodes(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + + customUser := "custom-user" + customKey := "custom-key" + customRole := "custom" + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey}, nil) + defer down() + + clsA := articles.ArticlesClass() + + helper.CreateClassAuth(t, articles.ParagraphsClass(), adminKey) + helper.CreateClassAuth(t, clsA, adminKey) + helper.CreateObjectsBatchAuth(t, []*models.Object{articles.NewArticle().WithTitle("article1").Object()}, adminKey) + + // make custom role with read_nodes and minimal nodes resource + helper.CreateRole(t, adminKey, &models.Role{Name: &customRole, Permissions: []*models.Permission{ + helper.NewNodesPermission().WithAction(authorization.ReadNodes).WithVerbosity(verbosity.OutputMinimal).Permission(), + }}) + + t.Run("fail to get nodes without minimal read_nodes", func(t *testing.T) { + _, err := helper.Client(t).Nodes.NodesGet(nodes.NewNodesGetParams(), helper.CreateAuth(customKey)) + require.NotNil(t, err) + var parsed *nodes.NodesGetForbidden + require.True(t, errors.As(err, &parsed)) + require.Contains(t, parsed.Payload.Error[0].Message, "forbidden") + }) + + t.Run("fail to get cluster stats without read_cluster", func(t *testing.T) { + _, err := helper.Client(t).Cluster.ClusterGetStatistics(cluster.NewClusterGetStatisticsParams(), helper.CreateAuth(customKey)) + require.NotNil(t, err) + var parsed *cluster.ClusterGetStatisticsForbidden + require.True(t, errors.As(err, &parsed)) + require.Contains(t, parsed.Payload.Error[0].Message, "forbidden") + }) + + t.Run("assign custom role to custom user", func(t *testing.T) { + helper.AssignRoleToUser(t, adminKey, customRole, customUser) + }) + + t.Run("get minimal nodes with read_nodes", func(t *testing.T) { + resp, err := helper.Client(t).Nodes.NodesGet(nodes.NewNodesGetParams(), helper.CreateAuth(customKey)) + require.Nil(t, err) + require.Len(t, resp.Payload.Nodes, 1) + }) + + t.Run("add read_cluster to custom role", func(t *testing.T) { + helper.AddPermissions(t, adminKey, customRole, &models.Permission{Action: &authorization.ReadCluster}) + }) + + t.Run("get cluster stats with read_cluster", func(t *testing.T) { + resp, err := helper.Client(t).Cluster.ClusterGetStatistics(cluster.NewClusterGetStatisticsParams(), helper.CreateAuth(customKey)) + require.Nil(t, err) + require.Len(t, resp.Payload.Statistics, 1) + }) + + t.Run("fail to get verbose nodes without verbose read_nodes on all collections", func(t *testing.T) { + _, err := helper.Client(t).Nodes.NodesGetClass(nodes.NewNodesGetClassParams().WithClassName(clsA.Class).WithOutput(String("verbose")), helper.CreateAuth(customKey)) + require.NotNil(t, err) + var parsed *nodes.NodesGetClassForbidden + require.True(t, errors.As(err, &parsed)) + require.Contains(t, parsed.Payload.Error[0].Message, "forbidden") + }) + + t.Run("add verbose read_nodes on class to custom role", func(t *testing.T) { + helper.AddPermissions(t, adminKey, customRole, helper.NewNodesPermission().WithAction(authorization.ReadNodes).WithVerbosity(verbosity.OutputVerbose).WithCollection(clsA.Class).Permission()) + }) + + t.Run("get verbose nodes by class with verbose read_nodes on class", func(t *testing.T) { + resp, err := helper.Client(t).Nodes.NodesGetClass(nodes.NewNodesGetClassParams().WithClassName(clsA.Class).WithOutput(String("verbose")), helper.CreateAuth(customKey)) + require.Nil(t, err) + require.Len(t, resp.Payload.Nodes, 1) + }) + + t.Run("add read_data on * to custom role", func(t *testing.T) { + helper.AddPermissions(t, adminKey, customRole, helper.NewNodesPermission().WithAction(authorization.ReadNodes).WithVerbosity(verbosity.OutputVerbose).WithCollection("*").Permission()) + }) + + t.Run("get verbose nodes on all classes with read_data on *", func(t *testing.T) { + resp, err := helper.Client(t).Nodes.NodesGet(nodes.NewNodesGetParams().WithOutput(String("verbose")), helper.CreateAuth(customKey)) + require.Nil(t, err) + require.Len(t, resp.Payload.Nodes, 1) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/objects_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/objects_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ce63386ba19dee8a1c222d822b3aac3fefc186ff --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/objects_test.go @@ -0,0 +1,446 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "errors" + "fmt" + "testing" + + "github.com/go-openapi/strfmt" + "github.com/google/uuid" + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestAuthZObjectsEndpoints(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + adminAuth := helper.CreateAuth(adminKey) + customUser := "custom-user" + customKey := "custom-key" + customAuth := helper.CreateAuth(customKey) + + createDataAction := authorization.CreateData + readDataAction := authorization.ReadData + updateDataAction := authorization.UpdateData + deleteDataAction := authorization.DeleteData + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey}, nil) + defer down() + + tests := []struct { + name string + mtEnabled bool + tenantName string + tenantPermission *string + }{ + { + name: "with multi-tenancy", + mtEnabled: true, + tenantName: "tenant-1", + tenantPermission: String("tenant-1"), + }, + { + name: "without multi-tenancy", + mtEnabled: false, + tenantName: "", + tenantPermission: nil, + }, + } + + roleName := "AuthZObjectsTestRole" + className := "AuthZObjectsTest" + tenantNames := []string{"tenant-1", "tenant-2"} + tenants := []*models.Tenant{ + {Name: tenantNames[0]}, + {Name: tenantNames[1]}, + } + + for _, tt := range tests { + obj := &models.Object{ + ID: strfmt.UUID(uuid.New().String()), + Class: className, + Properties: map[string]interface{}{ + "prop": "test", + }, + Tenant: tt.tenantName, + } + + deleteObjectClass(t, className, adminAuth) + require.NoError(t, createClass(t, &models.Class{ + Class: className, + Properties: []*models.Property{ + { + Name: "prop", + DataType: schema.DataTypeText.PropString(), + }, + }, + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: tt.mtEnabled}, + }, adminAuth)) + if tt.mtEnabled { + helper.CreateTenantsAuth(t, className, tenants, adminKey) + } + + t.Run("Objects create (POST)", func(t *testing.T) { + t.Run(fmt.Sprintf("No rights %s", tt.name), func(t *testing.T) { + _, err := createObject(t, obj, customKey) + require.NotNil(t, err) + var errNoAuth *objects.ObjectsCreateForbidden + if !errors.As(err, &errNoAuth) { + t.Fatalf("Expected error of type %T, got %T", errNoAuth, err) + } + require.True(t, errors.As(err, &errNoAuth)) + }) + + role := &models.Role{ + Name: &roleName, + Permissions: []*models.Permission{ + { + Action: &createDataAction, + Data: &models.PermissionData{Collection: &className, Tenant: tt.tenantPermission}, + }, + }, + } + helper.CreateRole(t, adminKey, role) + defer helper.DeleteRole(t, adminKey, *role.Name) + + t.Run(fmt.Sprintf("All rights %s", tt.name), func(t *testing.T) { + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + defer helper.RevokeRoleFromUser(t, adminKey, roleName, customUser) + + _, err := createObject(t, obj, customKey) + if err != nil { + t.Fatalf("Expected no error, got %v", err) + } + require.Nil(t, err) + + if tt.mtEnabled { + t.Run("Fail to create with different tenant", func(t *testing.T) { + objNew := *obj + objNew.Tenant = tenantNames[1] + _, err := createObject(t, &objNew, customKey) + require.NotNil(t, err) + var errNoAuth *objects.ObjectsCreateForbidden + if !errors.As(err, &errNoAuth) { + t.Fatalf("Expected error of type %T, got %T: %v", errNoAuth, err, err) + } + require.True(t, errors.As(err, &errNoAuth)) + }) + } + }) + }) + + t.Run("Objects get (GET)", func(t *testing.T) { + var tenant *string + if tt.mtEnabled { + tenant = &tt.tenantName + } + t.Run(fmt.Sprintf("No rights %s", tt.name), func(t *testing.T) { + _, err := getObject(t, obj.Class, obj.ID, tenant, customKey) + require.NotNil(t, err) + var errNoAuth *objects.ObjectsClassGetForbidden + if !errors.As(err, &errNoAuth) { + t.Fatalf("Expected error of type %T, got %T", errNoAuth, err) + } + require.True(t, errors.As(err, &errNoAuth)) + }) + + role := &models.Role{ + Name: &roleName, + Permissions: []*models.Permission{ + { + Action: &readDataAction, + Data: &models.PermissionData{Collection: &className, Tenant: tt.tenantPermission}, + }, + }, + } + helper.CreateRole(t, adminKey, role) + defer helper.DeleteRole(t, adminKey, *role.Name) + + t.Run(fmt.Sprintf("All rights %s", tt.name), func(t *testing.T) { + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + defer helper.RevokeRoleFromUser(t, adminKey, roleName, customUser) + + _, err := getObject(t, obj.Class, obj.ID, tenant, customKey) + if err != nil { + t.Fatalf("Expected no error, got %v", err) + } + require.Nil(t, err) + + if tt.mtEnabled { + t.Run("Fail to get with different tenant", func(t *testing.T) { + _, err := getObject(t, obj.Class, obj.ID, &tenantNames[1], customKey) + require.NotNil(t, err) + var errNoAuth *objects.ObjectsClassGetForbidden + if !errors.As(err, &errNoAuth) { + t.Fatalf("Expected error of type %T, got %T: %v", errNoAuth, err, err) + } + require.True(t, errors.As(err, &errNoAuth)) + }) + } + }) + }) + + t.Run("Objects class update (PATCH)", func(t *testing.T) { + t.Run(fmt.Sprintf("No rights %s", tt.name), func(t *testing.T) { + _, err := updateObject(t, obj, customKey) + require.NotNil(t, err) + var errNoAuth *objects.ObjectsClassPatchForbidden + if !errors.As(err, &errNoAuth) { + t.Fatalf("Expected error of type %T, got %T: %v", errNoAuth, err, err) + } + require.True(t, errors.As(err, &errNoAuth)) + }) + + role := &models.Role{ + Name: &roleName, + Permissions: []*models.Permission{ + { + Action: &updateDataAction, + Data: &models.PermissionData{Collection: &className, Tenant: tt.tenantPermission}, + }, + }, + } + helper.CreateRole(t, adminKey, role) + defer helper.DeleteRole(t, adminKey, *role.Name) + + t.Run(fmt.Sprintf("All rights %s", tt.name), func(t *testing.T) { + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + defer helper.RevokeRoleFromUser(t, adminKey, roleName, customUser) + + _, err := updateObject(t, obj, customKey) + if err != nil { + t.Fatalf("Expected no error, got %v", err) + } + require.Nil(t, err) + + if tt.mtEnabled { + t.Run("Fail to update with different tenant", func(t *testing.T) { + objNew := *obj + objNew.Tenant = tenantNames[1] + _, err := updateObject(t, &objNew, customKey) + require.NotNil(t, err) + var errNoAuth *objects.ObjectsClassPatchForbidden + if !errors.As(err, &errNoAuth) { + t.Fatalf("Expected error of type %T, got %T: %v", errNoAuth, err, err) + } + require.True(t, errors.As(err, &errNoAuth)) + }) + } + }) + }) + + t.Run("Objects class replace (PUT)", func(t *testing.T) { + t.Run(fmt.Sprintf("No rights %s", tt.name), func(t *testing.T) { + _, err := replaceObject(t, obj, customKey) + require.NotNil(t, err) + var errNoAuth *objects.ObjectsClassPutForbidden + if !errors.As(err, &errNoAuth) { + t.Fatalf("Expected error of type %T, got %T: %v", errNoAuth, err, err) + } + require.True(t, errors.As(err, &errNoAuth)) + }) + + role := &models.Role{ + Name: &roleName, + Permissions: []*models.Permission{ + { + Action: &updateDataAction, + Data: &models.PermissionData{Collection: &className, Tenant: tt.tenantPermission}, + }, + }, + } + helper.CreateRole(t, adminKey, role) + defer helper.DeleteRole(t, adminKey, *role.Name) + + t.Run(fmt.Sprintf("All rights %s", tt.name), func(t *testing.T) { + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + defer helper.RevokeRoleFromUser(t, adminKey, roleName, customUser) + + _, err := replaceObject(t, obj, customKey) + if err != nil { + t.Fatalf("Expected no error, got %v", err) + } + require.Nil(t, err) + + if tt.mtEnabled { + t.Run("Fail to update with different tenant", func(t *testing.T) { + objNew := *obj + objNew.Tenant = tenantNames[1] + _, err := replaceObject(t, &objNew, customKey) + require.NotNil(t, err) + var errNoAuth *objects.ObjectsClassPutForbidden + if !errors.As(err, &errNoAuth) { + t.Fatalf("Expected error of type %T, got %T: %v", errNoAuth, err, err) + } + require.True(t, errors.As(err, &errNoAuth)) + }) + } + }) + }) + + t.Run("Objects exists (HEAD)", func(t *testing.T) { + t.Run(fmt.Sprintf("No rights %s", tt.name), func(t *testing.T) { + paramsObj := objects.NewObjectsClassHeadParams().WithClassName(obj.Class).WithID(obj.ID) + if tt.mtEnabled { + paramsObj = paramsObj.WithTenant(&tt.tenantName) + } + _, err := helper.Client(t).Objects.ObjectsClassHead(paramsObj, customAuth) + require.NotNil(t, err) + var errNoAuth *objects.ObjectsClassHeadForbidden + require.True(t, errors.As(err, &errNoAuth)) + }) + + t.Run(fmt.Sprintf("All rights %s", tt.name), func(t *testing.T) { + role := &models.Role{ + Name: &roleName, + Permissions: []*models.Permission{ + { + Action: &readDataAction, + Data: &models.PermissionData{Collection: &className, Tenant: tt.tenantPermission}, + }, + }, + } + + helper.CreateRole(t, adminKey, role) + defer helper.DeleteRole(t, adminKey, *role.Name) + + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + defer helper.RevokeRoleFromUser(t, adminKey, roleName, customUser) + + paramsObj := objects.NewObjectsClassHeadParams().WithClassName(obj.Class).WithID(obj.ID) + if tt.mtEnabled { + paramsObj = paramsObj.WithTenant(&tt.tenantName) + } + _, err := helper.Client(t).Objects.ObjectsClassHead(paramsObj, customAuth) + require.Nil(t, err) + + if tt.mtEnabled { + t.Run("Fail to check existence with different tenant", func(t *testing.T) { + paramsObj := objects.NewObjectsClassHeadParams().WithClassName(obj.Class).WithTenant(&tenantNames[1]).WithID(obj.ID) + _, err := helper.Client(t).Objects.ObjectsClassHead(paramsObj, customAuth) + require.NotNil(t, err) + var errNoAuth *objects.ObjectsClassHeadForbidden + if !errors.As(err, &errNoAuth) { + t.Fatalf("Expected error of type %T, got %T: %v", errNoAuth, err, err) + } + require.True(t, errors.As(err, &errNoAuth)) + }) + } + }) + }) + + t.Run("Objects validate (POST /validate)", func(t *testing.T) { + t.Run(fmt.Sprintf("No rights %s", tt.name), func(t *testing.T) { + paramsObj := objects.NewObjectsValidateParams().WithBody(obj) + _, err := helper.Client(t).Objects.ObjectsValidate(paramsObj, customAuth) + require.NotNil(t, err) + var errNoAuth *objects.ObjectsValidateForbidden + require.True(t, errors.As(err, &errNoAuth)) + }) + + t.Run(fmt.Sprintf("All rights %s", tt.name), func(t *testing.T) { + role := &models.Role{ + Name: &roleName, + Permissions: []*models.Permission{ + { + Action: &readDataAction, + Data: &models.PermissionData{Collection: &className, Tenant: tt.tenantPermission}, + }, + }, + } + + helper.CreateRole(t, adminKey, role) + defer helper.DeleteRole(t, adminKey, *role.Name) + + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + defer helper.RevokeRoleFromUser(t, adminKey, roleName, customUser) + + paramsObj := objects.NewObjectsValidateParams().WithBody(obj) + _, err := helper.Client(t).Objects.ObjectsValidate(paramsObj, customAuth) + require.Nil(t, err) + + if tt.mtEnabled { + t.Run("Fail to validate with different tenant", func(t *testing.T) { + objNew := *obj + objNew.Tenant = tenantNames[1] + paramsObj := objects.NewObjectsValidateParams().WithBody(&objNew) + _, err := helper.Client(t).Objects.ObjectsValidate(paramsObj, customAuth) + require.NotNil(t, err) + var errNoAuth *objects.ObjectsValidateForbidden + if !errors.As(err, &errNoAuth) { + t.Fatalf("Expected error of type %T, got %T: %v", errNoAuth, err, err) + } + require.True(t, errors.As(err, &errNoAuth)) + }) + } + }) + }) + + t.Run("Objects class delete (DELETE)", func(t *testing.T) { + t.Run(fmt.Sprintf("No rights %s", tt.name), func(t *testing.T) { + paramsObj := objects.NewObjectsClassDeleteParams().WithClassName(obj.Class).WithID(obj.ID) + if tt.mtEnabled { + paramsObj = paramsObj.WithTenant(&tt.tenantName) + } + _, err := helper.Client(t).Objects.ObjectsClassDelete(paramsObj, customAuth) + require.NotNil(t, err) + var errNoAuth *objects.ObjectsClassDeleteForbidden + require.True(t, errors.As(err, &errNoAuth)) + }) + + t.Run(fmt.Sprintf("All rights %s", tt.name), func(t *testing.T) { + role := &models.Role{ + Name: &roleName, + Permissions: []*models.Permission{ + { + Action: &deleteDataAction, + Data: &models.PermissionData{Collection: &className, Tenant: tt.tenantPermission}, + }, + }, + } + + helper.CreateRole(t, adminKey, role) + defer helper.DeleteRole(t, adminKey, *role.Name) + + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + defer helper.RevokeRoleFromUser(t, adminKey, roleName, customUser) + + paramsObj := objects.NewObjectsClassDeleteParams().WithClassName(obj.Class).WithID(obj.ID) + if tt.mtEnabled { + paramsObj = paramsObj.WithTenant(&tt.tenantName) + } + _, err := helper.Client(t).Objects.ObjectsClassDelete(paramsObj, customAuth) + require.Nil(t, err) + + if tt.mtEnabled { + t.Run("Fail to delete with different tenant", func(t *testing.T) { + paramsObj := objects.NewObjectsClassDeleteParams().WithClassName(obj.Class).WithID(obj.ID).WithTenant(&tenantNames[1]) + _, err := helper.Client(t).Objects.ObjectsClassDelete(paramsObj, customAuth) + require.NotNil(t, err) + var errNoAuth *objects.ObjectsClassDeleteForbidden + if !errors.As(err, &errNoAuth) { + t.Fatalf("Expected error of type %T, got %T: %v", errNoAuth, err, err) + } + require.True(t, errors.As(err, &errNoAuth)) + }) + } + }) + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/oidc_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/oidc_test.go new file mode 100644 index 0000000000000000000000000000000000000000..1c427ad6c7d4ed8bc29c58f4d86063af14946d46 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/oidc_test.go @@ -0,0 +1,480 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "context" + "crypto/rand" + "crypto/rsa" + "errors" + "fmt" + "net" + "strings" + "testing" + "time" + + "github.com/weaviate/mockoidc" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/authz" + clschema "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestRbacWithOIDC(t *testing.T) { + customKey := "custom-key" + customUser := "custom-user" + tests := []struct { + name string + image *docker.Compose + nameCollision bool // same username for DB and OIDC + onlyOIDC bool + }{ + { + name: "RBAC with OIDC", + image: docker.New(). + WithWeaviate().WithMockOIDC().WithRBAC().WithRbacRoots("admin-user"), + nameCollision: false, + onlyOIDC: true, + }, + { + name: "RBAC with OIDC and API key", + image: docker.New(). + WithWeaviate().WithMockOIDC().WithRBAC().WithRbacRoots("admin-user"). + WithApiKey().WithUserApiKey("other", "random-key"), + nameCollision: false, + }, + { + name: "RBAC with OIDC and API key overlapping user names", + image: docker.New(). + WithWeaviate().WithMockOIDC(). + WithRBAC().WithRbacRoots("admin-user"). + WithApiKey().WithUserApiKey("other", "random-key"). + WithApiKey().WithUserApiKey("custom-user", customKey), + nameCollision: true, + }, + { + name: "RBAC with OIDC with certificate", + image: docker.New(). + WithWeaviate().WithMockOIDCWithCertificate().WithRBAC().WithRbacRoots("admin-user"), + nameCollision: false, + onlyOIDC: true, + }, + { + name: "RBAC with OIDC with certificate and API key", + image: docker.New(). + WithWeaviate().WithMockOIDCWithCertificate().WithRBAC().WithRbacRoots("admin-user"). + WithApiKey().WithUserApiKey("other", "random-key"), + nameCollision: false, + }, + { + name: "RBAC with OIDC with certificate and API key overlapping user names", + image: docker.New(). + WithWeaviate().WithMockOIDCWithCertificate(). + WithRBAC().WithRbacRoots("admin-user"). + WithApiKey().WithUserApiKey("other", "random-key"). + WithApiKey().WithUserApiKey("custom-user", customKey), + nameCollision: true, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + ctx := context.Background() + compose, err := test.image.Start(ctx) + require.NoError(t, err) + defer func() { + require.NoError(t, compose.Terminate(ctx)) + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + defer helper.ResetClient() + + // the oidc mock server returns first the token for the admin user and then for the custom-user. See its + // description for details + tokenAdmin, _ := docker.GetTokensFromMockOIDCWithHelper(t, compose.GetMockOIDCHelper().URI()) + tokenCustom, _ := docker.GetTokensFromMockOIDCWithHelper(t, compose.GetMockOIDCHelper().URI()) + + // prepare roles to assign later + all := "*" + readSchemaAction := authorization.ReadCollections + createSchemaAction := authorization.CreateCollections + createSchemaRoleName := "createSchema" + createSchemaRole := &models.Role{ + Name: &createSchemaRoleName, + Permissions: []*models.Permission{ + {Action: &readSchemaAction, Collections: &models.PermissionCollections{Collection: &all}}, + {Action: &createSchemaAction, Collections: &models.PermissionCollections{Collection: &all}}, + }, + } + helper.DeleteRole(t, tokenAdmin, createSchemaRoleName) + helper.CreateRole(t, tokenAdmin, createSchemaRole) + defer helper.DeleteRole(t, tokenAdmin, createSchemaRoleName) + + // custom-user does not have any roles/permissions + err = createClass(t, &models.Class{Class: "testingOidc"}, helper.CreateAuth(tokenCustom)) + require.Error(t, err) + var forbidden *clschema.SchemaObjectsCreateForbidden + require.True(t, errors.As(err, &forbidden)) + + // assigning to OIDC user + helper.AssignRoleToUserOIDC(t, tokenAdmin, createSchemaRoleName, customUser) + err = createClass(t, &models.Class{Class: "testingOidc"}, helper.CreateAuth(tokenCustom)) + require.NoError(t, err) + + // only OIDC user has role assigned + rolesOIDC := helper.GetRolesForUserOIDC(t, customUser, tokenAdmin) + require.Len(t, rolesOIDC, 1) + + if test.onlyOIDC || !test.nameCollision { + // validation check for existence will fail + _, err := helper.Client(t).Authz.GetRolesForUser(authz.NewGetRolesForUserParams().WithID(customUser).WithUserType(string(models.UserTypeInputDb)), helper.CreateAuth(tokenAdmin)) + require.Error(t, err) + var notFound *authz.GetRolesForUserNotFound + require.True(t, errors.As(err, ¬Found)) + } else { + rolesDB := helper.GetRolesForUser(t, customUser, tokenAdmin, true) + require.Len(t, rolesDB, 0) + } + + usersOidc := helper.GetUserForRolesBoth(t, createSchemaRoleName, tokenAdmin) + require.Len(t, usersOidc, 1) + if test.onlyOIDC || !test.nameCollision { + _, err := helper.Client(t).Authz.GetRolesForUser(authz.NewGetRolesForUserParams().WithID(customUser).WithUserType(string(models.UserTypeInputDb)), helper.CreateAuth(tokenAdmin)) + require.Error(t, err) + var notFound *authz.GetRolesForUserNotFound + require.True(t, errors.As(err, ¬Found)) + } else { + usersDB := helper.GetUserForRoles(t, createSchemaRoleName, tokenAdmin) + require.Len(t, usersDB, 0) + } + + // assign role to non-existing user => no error (if OIDC is enabled) + helper.AssignRoleToUserOIDC(t, tokenAdmin, createSchemaRoleName, "i-dont-exist") + + // only oidc root user, as api-keys are either not enabled or do not have a root user + users := helper.GetUserForRolesBoth(t, "root", tokenAdmin) + for _, user := range users { + require.Equal(t, *user.UserType, models.UserTypeOutputOidc) + } + + if test.nameCollision { + // api key user does NOT have the rights, even though it has the same name + err = createClass(t, &models.Class{Class: "testingApiKey"}, helper.CreateAuth(customKey)) + require.Error(t, err) + var forbidden *clschema.SchemaObjectsCreateForbidden + require.True(t, errors.As(err, &forbidden)) + + helper.AssignRoleToUser(t, tokenAdmin, createSchemaRoleName, "custom-user") + err = createClass(t, &models.Class{Class: "testingApiKey"}, helper.CreateAuth(customKey)) + require.NoError(t, err) + } + + if test.onlyOIDC { + // cannot assign/revoke to/from db users + resp, err := helper.Client(t).Authz.AssignRoleToUser( + authz.NewAssignRoleToUserParams().WithID("random-user").WithBody(authz.AssignRoleToUserBody{Roles: []string{createSchemaRoleName}, UserType: models.UserTypeInputDb}), + helper.CreateAuth(tokenAdmin), + ) + require.Nil(t, resp) + require.Error(t, err) + + resp2, err := helper.Client(t).Authz.RevokeRoleFromUser( + authz.NewRevokeRoleFromUserParams().WithID("random-user").WithBody(authz.RevokeRoleFromUserBody{Roles: []string{createSchemaRoleName}, UserType: models.UserTypeInputDb}), + helper.CreateAuth(tokenAdmin), + ) + require.Nil(t, resp2) + require.Error(t, err) + + // no validation for deprecated path when OIDC is enabled: + _, err = helper.Client(t).Authz.AssignRoleToUser( + authz.NewAssignRoleToUserParams().WithID("random-user").WithBody(authz.AssignRoleToUserBody{Roles: []string{createSchemaRoleName}}), + helper.CreateAuth(tokenAdmin), + ) + require.NoError(t, err) + + _, err = helper.Client(t).Authz.RevokeRoleFromUser( + authz.NewRevokeRoleFromUserParams().WithID("random-user").WithBody(authz.RevokeRoleFromUserBody{Roles: []string{createSchemaRoleName}}), + helper.CreateAuth(tokenAdmin), + ) + require.NoError(t, err) + } + }) + } +} + +func TestRbacWithOIDCGroups(t *testing.T) { + ctx := context.Background() + tests := []struct { + name string + image *docker.Compose + }{ + { + name: "without certificate", + image: docker.New().WithWeaviate().WithMockOIDC().WithRBAC().WithRbacRoots("admin-user"), + }, + { + name: "with certificate", + image: docker.New().WithWeaviate().WithMockOIDCWithCertificate().WithRBAC().WithRbacRoots("admin-user"), + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + compose, err := test.image.Start(ctx) + require.NoError(t, err) + defer func() { + require.NoError(t, compose.Terminate(ctx)) + }() + helper.SetupClient(compose.GetWeaviate().URI()) + defer helper.ResetClient() + + // the oidc mock server returns first the token for the admin user and then for the custom-user. See its + // description for details + tokenAdmin, _ := docker.GetTokensFromMockOIDCWithHelper(t, compose.GetMockOIDCHelper().URI()) + tokenCustom, _ := docker.GetTokensFromMockOIDCWithHelper(t, compose.GetMockOIDCHelper().URI()) + + // prepare roles to assign later + className := strings.Replace(t.Name(), "/", "", 1) + "Class" + readSchemaAction := authorization.ReadCollections + createSchemaAction := authorization.CreateCollections + createSchemaRoleName := "createSchema" + createSchemaRole := &models.Role{ + Name: &createSchemaRoleName, + Permissions: []*models.Permission{ + {Action: &readSchemaAction, Collections: &models.PermissionCollections{Collection: &className}}, + {Action: &createSchemaAction, Collections: &models.PermissionCollections{Collection: &className}}, + }, + } + helper.DeleteRole(t, tokenAdmin, createSchemaRoleName) + helper.CreateRole(t, tokenAdmin, createSchemaRole) + defer helper.DeleteRole(t, tokenAdmin, createSchemaRoleName) + helper.DeleteClassWithAuthz(t, className, helper.CreateAuth(tokenAdmin)) + + roles := helper.GetRolesForGroup(t, tokenAdmin, "custom-group", false) + require.Len(t, roles, 0) + + // custom-user does not have any roles/permissions + err = createClass(t, &models.Class{Class: className}, helper.CreateAuth(tokenCustom)) + require.Error(t, err) + var forbidden *clschema.SchemaObjectsCreateForbidden + require.True(t, errors.As(err, &forbidden)) + + ownInfo := helper.GetInfoForOwnUser(t, tokenCustom) + require.Contains(t, ownInfo.Groups, "custom-group") + require.Len(t, ownInfo.Roles, 0) + + // assigning role to group and now user has permission + helper.AssignRoleToGroup(t, tokenAdmin, createSchemaRoleName, "custom-group") + err = createClass(t, &models.Class{Class: className}, helper.CreateAuth(tokenCustom)) + require.NoError(t, err) + + ownInfo = helper.GetInfoForOwnUser(t, tokenCustom) + require.Contains(t, ownInfo.Groups, "custom-group") + require.Len(t, ownInfo.Roles, 1) + require.Equal(t, *ownInfo.Roles[0].Name, createSchemaRoleName) + + rolesWithRoles := helper.GetRolesForGroup(t, tokenAdmin, "custom-group", true) + require.Len(t, rolesWithRoles, 1) + require.Equal(t, *rolesWithRoles[0].Name, createSchemaRoleName) + require.Len(t, rolesWithRoles[0].Permissions, 2) + + // delete class to test again after revocation + helper.DeleteClassWithAuthz(t, className, helper.CreateAuth(tokenAdmin)) + helper.RevokeRoleFromGroup(t, tokenAdmin, createSchemaRoleName, "custom-group") + err = createClass(t, &models.Class{Class: className}, helper.CreateAuth(tokenCustom)) + require.Error(t, err) + }) + } +} + +func TestRbacWithOIDCRootGroups(t *testing.T) { + ctx := context.Background() + tests := []struct { + name string + image *docker.Compose + }{ + { + name: "without certificate", + image: docker.New().WithWeaviate().WithMockOIDC().WithRBAC().WithRbacRoots("admin-user"), + }, + { + name: "with certificate", + image: docker.New().WithWeaviate().WithMockOIDCWithCertificate().WithRBAC().WithRbacRoots("admin-user"), + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + compose, err := test.image.WithRbacRootGroups("custom-group").Start(ctx) + require.NoError(t, err) + defer func() { + require.NoError(t, compose.Terminate(ctx)) + }() + helper.SetupClient(compose.GetWeaviate().URI()) + defer helper.ResetClient() + + // the oidc mock server returns first the token for the admin user and then for the custom-user. See its + // description for details + tokenAdmin, _ := docker.GetTokensFromMockOIDCWithHelper(t, compose.GetMockOIDCHelper().URI()) + tokenCustom, _ := docker.GetTokensFromMockOIDCWithHelper(t, compose.GetMockOIDCHelper().URI()) + + className := strings.Replace(t.Name(), "/", "", 1) + "Class" + helper.DeleteClassWithAuthz(t, className, helper.CreateAuth(tokenAdmin)) + + // custom user can create collection without any extra roles, because of membership in root group + err = createClass(t, &models.Class{Class: className}, helper.CreateAuth(tokenCustom)) + require.NoError(t, err) + }) + } +} + +func TestRbacWithOIDCViewerGroups(t *testing.T) { + ctx := context.Background() + image := docker.New().WithWeaviate().WithMockOIDC().WithRBAC().WithRbacRoots("admin-user") + + compose, err := image.WithRbacViewerGroups("custom-group").Start(ctx) + require.NoError(t, err) + defer func() { + require.NoError(t, compose.Terminate(ctx)) + }() + helper.SetupClient(compose.GetWeaviate().URI()) + defer helper.ResetClient() + + // the oidc mock server returns first the token for the admin user and then for the custom-user. See its + // description for details + tokenAdmin, _ := docker.GetTokensFromMockOIDCWithHelper(t, compose.GetMockOIDCHelper().URI()) + tokenCustom, _ := docker.GetTokensFromMockOIDCWithHelper(t, compose.GetMockOIDCHelper().URI()) + + className := strings.Replace(t.Name(), "/", "", 1) + "Class" + helper.DeleteClassWithAuthz(t, className, helper.CreateAuth(tokenAdmin)) + + // only viewer rights => custom user can NOT create collection + err = createClass(t, &models.Class{Class: className}, helper.CreateAuth(tokenCustom)) + require.Error(t, err) + var forbidden *clschema.SchemaObjectsCreateForbidden + require.True(t, errors.As(err, &forbidden)) + + require.NoError(t, createClass(t, &models.Class{Class: className}, helper.CreateAuth(tokenAdmin))) + + // can list collection + classes := helper.GetClassAuth(t, className, tokenCustom) + require.Equal(t, classes.Class, className) + + // cannot modify assignment + _, err = helper.Client(t).Authz.RevokeRoleFromGroup( + authz.NewRevokeRoleFromGroupParams().WithID("custom-group").WithBody(authz.RevokeRoleFromGroupBody{Roles: []string{"read-only"}}), + helper.CreateAuth(tokenAdmin), + ) + require.Error(t, err) +} + +const AuthCode = "auth" + +// This test starts an oidc mock server with the same settings as the containerized one. Helpful if you want to know +// why a OIDC request fails +// use docker.GetTokensFromMockOIDCWithHelperManualTest(t, "127.0.0.1:48001") to get the tokens +func TestRbacWithOIDCManual(t *testing.T) { + t.Skip("This is for testing/debugging only") + rsaKey, _ := rsa.GenerateKey(rand.Reader, 2048) + m, _ := mockoidc.NewServer(rsaKey) + ln, _ := net.Listen("tcp", "127.0.0.1:48001") + m.Start(ln, nil) + defer m.Shutdown() + m.ClientSecret = "Secret" + m.ClientID = "mock-oidc-test" + + // allow many runs without restart + for i := 0; i < 1000; i++ { + admin := &mockoidc.MockUser{Subject: "admin-user"} + m.QueueUser(admin) + m.QueueCode(AuthCode) + + custom := &mockoidc.MockUser{Subject: "custom-user", Groups: []string{"custom-group"}} + m.QueueUser(custom) + m.QueueCode(AuthCode) + } + + // this should just run until we are done with testing + for { + fmt.Println(m.Issuer()) + fmt.Println(m.TokenEndpoint()) + time.Sleep(time.Second) + } +} + +func TestOidcRootAndDynamicUsers(t *testing.T) { + ctx := context.Background() + tests := []struct { + name string + image *docker.Compose + }{ + { + name: "without certificate", + image: docker.New().WithWeaviate().WithMockOIDC().WithDbUsers(), + }, + { + name: "with certificate", + image: docker.New().WithWeaviate().WithMockOIDCWithCertificate().WithDbUsers(), + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + compose, err := test.image.Start(ctx) + require.NoError(t, err) + defer func() { + require.NoError(t, compose.Terminate(ctx)) + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + defer helper.ResetClient() + + // the oidc mock server returns first the token for the admin user and then for the custom-user. See its + // description for details + tokenAdmin, _ := docker.GetTokensFromMockOIDCWithHelper(t, compose.GetMockOIDCHelper().URI()) + + helper.DeleteUser(t, "dynamic1", tokenAdmin) + apiKey := helper.CreateUser(t, "dynamic1", tokenAdmin) + + info := helper.GetInfoForOwnUser(t, apiKey) + require.Equal(t, *info.Username, "dynamic1") + }) + } +} + +func TestOidcWrongCertificate(t *testing.T) { + ctx := context.Background() + t.Run("wrong certificates", func(t *testing.T) { + // MockOIDC server has been created with it's own certifcates but we pass here some other certifcate, this situation should + // lead to Weaviate not being able to connect OIDC server thus not being able to start + wrongCertificate, _, err := docker.GenerateCertificateAndKey(docker.MockOIDC) + require.NoError(t, err) + compose, err := docker.New(). + WithWeaviate().WithDbUsers(). + WithMockOIDCWithCertificate(). + // pass some other certificate which is not used by MockOIDC + WithWeaviateEnv("AUTHENTICATION_OIDC_CERTIFICATE", wrongCertificate). + Start(ctx) + // Weaviate should not start in this configuration + require.Error(t, err) + require.NoError(t, compose.Terminate(ctx)) + }) + t.Run("proper certificates", func(t *testing.T) { + compose, err := docker.New(). + WithWeaviate().WithDbUsers(). + WithMockOIDCWithCertificate(). + Start(ctx) + require.NoError(t, err) + require.NoError(t, compose.Terminate(ctx)) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/permissions_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/permissions_test.go new file mode 100644 index 0000000000000000000000000000000000000000..69e4cc69afb494ed4986df24f9f6052432feb4c9 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/permissions_test.go @@ -0,0 +1,122 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestAuthzRolesWithPermissions(t *testing.T) { + adminUser := "existing-user" + adminKey := "existing-key" + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, nil, nil) + defer down() + + testClass := &models.Class{ + Class: "Foo", + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: false}, + } + + t.Run("create test collection before permissions", func(t *testing.T) { + helper.CreateClassAuth(t, testClass, adminKey) + }) + + t.Run("create and get a role to create all collections", func(t *testing.T) { + name := "create-all-collections" + helper.CreateRole(t, adminKey, &models.Role{ + Name: String(name), + Permissions: []*models.Permission{ + {Action: String(authorization.CreateCollections), Collections: &models.PermissionCollections{Collection: String("*")}}, + }, + }) + role := helper.GetRoleByName(t, adminKey, name) + require.NotNil(t, role) + require.Equal(t, name, *role.Name) + require.Len(t, role.Permissions, 1) + require.Equal(t, authorization.CreateCollections, *role.Permissions[0].Action) + require.Equal(t, "*", *role.Permissions[0].Collections.Collection) + }) + + t.Run("create and get a role to create all tenants in a collection", func(t *testing.T) { + name := "create-all-tenants-in-foo" + helper.CreateRole(t, adminKey, &models.Role{ + Name: String(name), + Permissions: []*models.Permission{ + {Action: String(authorization.CreateCollections), Collections: &models.PermissionCollections{Collection: String(testClass.Class)}}, + }, + }) + role := helper.GetRoleByName(t, adminKey, name) + require.NotNil(t, role) + require.Equal(t, name, *role.Name) + require.Len(t, role.Permissions, 1) + require.Equal(t, authorization.CreateCollections, *role.Permissions[0].Action) + require.Equal(t, testClass.Class, *role.Permissions[0].Collections.Collection) + }) + + t.Run("create and get a role to create all roles", func(t *testing.T) { + name := "manage-all-roles" + helper.CreateRole(t, adminKey, &models.Role{ + Name: String(name), + Permissions: []*models.Permission{ + {Action: String(authorization.CreateRoles), Roles: &models.PermissionRoles{Role: String("*"), Scope: String(models.PermissionRolesScopeAll)}}, + }, + }) + role := helper.GetRoleByName(t, adminKey, name) + require.NotNil(t, role) + require.Equal(t, name, *role.Name) + require.Len(t, role.Permissions, 1) + require.Equal(t, authorization.CreateRoles, *role.Permissions[0].Action) + require.Equal(t, "*", *role.Permissions[0].Roles.Role) + }) + + t.Run("create and get a role to create one role", func(t *testing.T) { + name := "manage-one-role" + helper.CreateRole(t, adminKey, &models.Role{ + Name: String(name), + Permissions: []*models.Permission{ + {Action: String(authorization.CreateRoles), Roles: &models.PermissionRoles{Role: String("foo"), Scope: String(models.PermissionRolesScopeAll)}}, + }, + }) + role := helper.GetRoleByName(t, adminKey, name) + require.NotNil(t, role) + require.Equal(t, name, *role.Name) + require.Len(t, role.Permissions, 1) + require.Equal(t, authorization.CreateRoles, *role.Permissions[0].Action) + require.Equal(t, "foo", *role.Permissions[0].Roles.Role) + }) + + t.Run("create and get a role to read two roles", func(t *testing.T) { + name := "read-one-role" + helper.CreateRole(t, adminKey, &models.Role{ + Name: String(name), + Permissions: []*models.Permission{ + {Action: String(authorization.ReadRoles), Roles: &models.PermissionRoles{Role: String("foo"), Scope: String(models.PermissionRolesScopeAll)}}, + {Action: String(authorization.ReadRoles), Roles: &models.PermissionRoles{Role: String("bar"), Scope: String(models.PermissionRolesScopeAll)}}, + }, + }) + role := helper.GetRoleByName(t, adminKey, name) + require.NotNil(t, role) + require.Equal(t, name, *role.Name) + require.Len(t, role.Permissions, 2) + require.Equal(t, authorization.ReadRoles, *role.Permissions[0].Action) + require.Equal(t, "foo", *role.Permissions[0].Roles.Role) + require.Equal(t, authorization.ReadRoles, *role.Permissions[1].Action) + require.Equal(t, "bar", *role.Permissions[1].Roles.Role) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/rbac_auto_admin_permissions_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/rbac_auto_admin_permissions_test.go new file mode 100644 index 0000000000000000000000000000000000000000..8a065c85242e99011aab6920aa049b108246f523 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/rbac_auto_admin_permissions_test.go @@ -0,0 +1,141 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "bytes" + "fmt" + "net/http" + "sort" + "strings" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +func TestAuthzAllEndpointsAdminDynamically(t *testing.T) { + adminKey := "admin-key" + adminUser := "admin-user" + + compose, down := composeUp(t, map[string]string{adminUser: adminKey}, nil, nil) + defer down() + + containers := compose.Containers() + require.Len(t, containers, 1) // started only one node + + var endpointStats endpointStatsSlice + + className := "ABC" + tenantNames := []string{ + "Tenant1", "Tenant2", "Tenant3", + } + helper.CreateClassAuth(t, &models.Class{Class: className, MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }}, adminKey) + tenants := make([]*models.Tenant, len(tenantNames)) + for i := range tenants { + tenants[i] = &models.Tenant{Name: tenantNames[i], ActivityStatus: "HOT"} + } + helper.CreateTenantsAuth(t, className, tenants, adminKey) + + col, err := newCollector() + require.Nil(t, err) + + endpoints := col.allEndpoints() + ls := newLogScanner(containers[0].Container()) + ls.GetAuthzLogs(t) // startup logs that are irrelevant + + for _, endpoint := range endpoints { + url := fmt.Sprintf("http://%s/v1%s", compose.GetWeaviate().URI(), endpoint.path) + url = strings.ReplaceAll(url, "/objects/{className}/{id}", fmt.Sprintf("/objects/%s/%s", className, UUID1.String())) + url = strings.ReplaceAll(url, "/objects/{id}", fmt.Sprintf("/objects/%s", UUID1.String())) + url = strings.ReplaceAll(url, "{className}", className) + url = strings.ReplaceAll(url, "{tenantName}", "Tenant1") + url = strings.ReplaceAll(url, "{shardName}", "Shard1") + url = strings.ReplaceAll(url, "{id}", "someId") + url = strings.ReplaceAll(url, "{backend}", "filesystem") + url = strings.ReplaceAll(url, "{propertyName}", "someProperty") + url = strings.ReplaceAll(url, "{user_id}", "random-user") + url = strings.ReplaceAll(url, "{userType}", "db") + url = strings.ReplaceAll(url, "{groupType}", "oidc") + url = strings.ReplaceAll(url, "{aliasName}", "aliasName") + + t.Run(url+"("+strings.ToUpper(endpoint.method)+")", func(t *testing.T) { + require.NotContains(t, url, "{") + require.NotContains(t, url, "}") + + var req *http.Request + var err error + + endpoint.method = strings.ToUpper(endpoint.method) + + if endpoint.method == "POST" || endpoint.method == "PUT" || endpoint.method == "PATCH" || endpoint.method == "DELETE" { + req, err = http.NewRequest(endpoint.method, url, bytes.NewBuffer(endpoint.validGeneratedBodyData)) + require.Nil(t, err) + req.Header.Set("Content-Type", "application/json") + + } else { + req, err = http.NewRequest(endpoint.method, url, nil) + require.Nil(t, err) + } + + req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", adminKey)) + client := &http.Client{} + resp, err := client.Do(req) + require.Nil(t, err) + defer resp.Body.Close() + + require.NotEqual(t, http.StatusForbidden, resp.StatusCode) + + authZlogs := ls.GetAuthzLogs(t) + endpointStats = append(endpointStats, endpointStat{ + Count: len(authZlogs), + Method: endpoint.method, + Logs: authZlogs, + Endpoint: url, + }) + }) + } + + // sort by number of authZ calls and append to log + sort.Sort(endpointStats) + t.Log("EndpointStats:", endpointStats) +} + +type endpointStat struct { + Count int + Endpoint string + Method string + Logs []string +} + +func (e endpointStat) String() string { + return fmt.Sprintf("%s %s (count: %d), Logs: %v", e.Method, e.Endpoint, e.Count, e.Logs) +} + +type endpointStatsSlice []endpointStat + +// Implement sort.Interface +func (e endpointStatsSlice) Len() int { return len(e) } +func (e endpointStatsSlice) Less(i, j int) bool { return e[i].Count < e[j].Count } +func (e endpointStatsSlice) Swap(i, j int) { e[i], e[j] = e[j], e[i] } + +func (e endpointStatsSlice) String() string { + var str string + for _, e := range e { + str += e.String() + "\n" + } + return str +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/rbac_auto_no_permissions_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/rbac_auto_no_permissions_test.go new file mode 100644 index 0000000000000000000000000000000000000000..273a8a8d675a7499054b92181f7882bd22a174a4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/rbac_auto_no_permissions_test.go @@ -0,0 +1,142 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "bytes" + "fmt" + "net/http" + "slices" + "strings" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +func TestCollectEndpoints(t *testing.T) { + // print all endpoints grouped by method + col, err := newCollector() + require.Nil(t, err) + col.prettyPrint() +} + +func TestAuthzAllEndpointsNoPermissionDynamically(t *testing.T) { + adminKey := "admin-key" + adminUser := "admin-user" + customKey := "custom-key" + customUser := "custom-user" + + compose, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey}, nil) + defer down() + + // create class via admin + className := "ABC" + tenantNames := []string{ + "Tenant1", "Tenant2", "Tenant3", + } + helper.DeleteClassWithAuthz(t, className, helper.CreateAuth(adminKey)) + helper.CreateClassAuth(t, &models.Class{Class: className, MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }}, adminKey) + tenants := make([]*models.Tenant, len(tenantNames)) + for i := range tenants { + tenants[i] = &models.Tenant{Name: tenantNames[i], ActivityStatus: "HOT"} + } + helper.CreateTenantsAuth(t, className, tenants, adminKey) + + col, err := newCollector() + require.Nil(t, err) + + endpoints := col.allEndpoints() + + ignoreEndpoints := []string{ + "/", + "/.well-known/live", + "/.well-known/openid-configuration", + "/.well-known/ready", + "/meta", + "/users/own-info", // will return info for own user + "/backups/{backend}", // we ignore backup because there is multiple endpoints doesn't need authZ and many validations + "/backups/{backend}/{id}", + "/backups/{backend}/{id}/restore", + "/replication/replicate/{id}", // for the same reason as backups above + "/replication/replicate/{id}/cancel", + "/replication/sharding-state", + "/tasks", // tasks is internal endpoint + "/classifications/{id}", // requires to get classification by id first before checking of authz permissions + } + + ignoreGetAll := []string{ + "/authz/roles", + "/objects", + "/schema", + "/schema/{className}/tenants", + "/schema/{className}/tenants/{tenantName}", + "/users/db", + "/aliases", + } + + for _, endpoint := range endpoints { + url := fmt.Sprintf("http://%s/v1%s", compose.GetWeaviate().URI(), endpoint.path) + url = strings.ReplaceAll(url, "/objects/{className}/{id}", fmt.Sprintf("/objects/%s/%s", className, UUID1.String())) + url = strings.ReplaceAll(url, "/objects/{id}", fmt.Sprintf("/objects/%s", UUID1.String())) + url = strings.ReplaceAll(url, "/replication/replicate/{id}", fmt.Sprintf("/replication/replicate/%s", UUID1.String())) + url = strings.ReplaceAll(url, "{className}", className) + url = strings.ReplaceAll(url, "{tenantName}", "Tenant1") + url = strings.ReplaceAll(url, "{shardName}", "Shard1") + url = strings.ReplaceAll(url, "{id}", "admin-user") + url = strings.ReplaceAll(url, "{backend}", "filesystem") + url = strings.ReplaceAll(url, "{propertyName}", "someProperty") + url = strings.ReplaceAll(url, "{user_id}", "admin-user") + url = strings.ReplaceAll(url, "{userType}", "db") + url = strings.ReplaceAll(url, "{aliasName}", "alias") + url = strings.ReplaceAll(url, "{groupType}", "oidc") + + t.Run(url+"("+strings.ToUpper(endpoint.method)+")", func(t *testing.T) { + require.NotContains(t, url, "{") + require.NotContains(t, url, "}") + + shallIgnore := slices.Contains(ignoreEndpoints, endpoint.path) || + (endpoint.method == http.MethodGet && slices.Contains(ignoreGetAll, endpoint.path)) + if shallIgnore { + t.Skip("Endpoint is in ignore list") + return + } + + var req *http.Request + var err error + + endpoint.method = strings.ToUpper(endpoint.method) + + if endpoint.method == http.MethodPost || endpoint.method == http.MethodPut || endpoint.method == http.MethodPatch || endpoint.method == http.MethodDelete { + req, err = http.NewRequest(endpoint.method, url, bytes.NewBuffer(endpoint.validGeneratedBodyData)) + require.Nil(t, err) + req.Header.Set("Content-Type", "application/json") + + } else { + req, err = http.NewRequest(endpoint.method, url, nil) + require.Nil(t, err) + } + + req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", customKey)) + client := &http.Client{} + resp, err := client.Do(req) + require.Nil(t, err) + defer resp.Body.Close() + + require.Equal(t, http.StatusForbidden, resp.StatusCode) + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/rbac_auto_viewer_permissions_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/rbac_auto_viewer_permissions_test.go new file mode 100644 index 0000000000000000000000000000000000000000..032f47996c6151175eb3c0492420f1c6e5d70c48 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/rbac_auto_viewer_permissions_test.go @@ -0,0 +1,128 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "bytes" + "fmt" + "net/http" + "slices" + "strings" + "testing" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +func TestAuthzAllEndpointsViewerDynamically(t *testing.T) { + adminKey := "admin-key" + adminUser := "admin-user" + viewerKey := "viewer-key" + viewerUser := "viewer-user" + + compose, down := composeUp(t, map[string]string{adminUser: adminKey}, nil, map[string]string{viewerUser: viewerKey}) + defer down() + + // create class via admin + className := "ABC" + tenantNames := []string{ + "Tenant1", "Tenant2", "Tenant3", + } + helper.AssignRoleToUser(t, adminKey, "viewer", viewerUser) + + helper.DeleteClassWithAuthz(t, className, helper.CreateAuth(adminKey)) + helper.CreateClassAuth(t, &models.Class{Class: className, MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }}, adminKey) + tenants := make([]*models.Tenant, len(tenantNames)) + for i := range tenants { + tenants[i] = &models.Tenant{Name: tenantNames[i], ActivityStatus: "HOT"} + } + helper.CreateTenantsAuth(t, className, tenants, adminKey) + + col, err := newCollector() + require.Nil(t, err) + + endpoints := col.allEndpoints() + + viewerPOSTNotForbiddenEndpoints := []string{ + "/graphql", + "/graphql/batch", + "/objects/validate", + "/backups/{backend}", // we ignore backup because there is multiple endpoints doesn't need authZ and many validations + "/backups/{backend}/{id}", + "/backups/{backend}/{id}/restore", + "/replication/replicate/{id}", // for the same reason as backups above + "/replication/replicate/{id}/cancel", + "/authz/roles/{id}/has-permission", // must be a POST rather than GET or HEAD due to need of body. but viewer can access it due to its permissions + } + + for _, endpoint := range endpoints { + url := fmt.Sprintf("http://%s/v1%s", compose.GetWeaviate().URI(), endpoint.path) + url = strings.ReplaceAll(url, "/objects/{className}/{id}", fmt.Sprintf("/objects/%s/%s", className, UUID1.String())) + url = strings.ReplaceAll(url, "/objects/{id}", fmt.Sprintf("/objects/%s", UUID1.String())) + url = strings.ReplaceAll(url, "/replication/replicate/{id}", fmt.Sprintf("/replication/replicate/%s", UUID1.String())) + url = strings.ReplaceAll(url, "{className}", className) + url = strings.ReplaceAll(url, "{tenantName}", "Tenant1") + url = strings.ReplaceAll(url, "{shardName}", "Shard1") + url = strings.ReplaceAll(url, "{id}", "someId") + url = strings.ReplaceAll(url, "{backend}", "filesystem") + url = strings.ReplaceAll(url, "{propertyName}", "someProperty") + url = strings.ReplaceAll(url, "{user_id}", "admin-user") + url = strings.ReplaceAll(url, "{userType}", "db") + url = strings.ReplaceAll(url, "{groupType}", "oidc") + url = strings.ReplaceAll(url, "{aliasName}", "aliasName") + + t.Run(url+"("+strings.ToUpper(endpoint.method)+")", func(t *testing.T) { + require.NotContains(t, url, "{") + require.NotContains(t, url, "}") + + var req *http.Request + var err error + + endpoint.method = strings.ToUpper(endpoint.method) + forbidden := false + if endpoint.method == "POST" || endpoint.method == "PUT" || endpoint.method == "PATCH" || endpoint.method == "DELETE" { + req, err = http.NewRequest(endpoint.method, url, bytes.NewBuffer(endpoint.validGeneratedBodyData)) + require.Nil(t, err) + req.Header.Set("Content-Type", "application/json") + if !strings.Contains(url, "backups/filesystem/someId/restore") { + // restore endpoint do READ permissions the backend and then + // later checks if the meta file exists, therefor we ignore + // it here because it will return 404 + forbidden = true + } + } else { + req, err = http.NewRequest(endpoint.method, url, nil) + require.Nil(t, err) + } + + req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", viewerKey)) + client := &http.Client{} + resp, err := client.Do(req) + require.Nil(t, err) + defer resp.Body.Close() + + if slices.Contains(viewerPOSTNotForbiddenEndpoints, endpoint.path) { + require.NotEqual(t, http.StatusForbidden, resp.StatusCode) + return + } + + if forbidden { + require.Equal(t, http.StatusForbidden, resp.StatusCode) + } else { + require.NotEqual(t, http.StatusForbidden, resp.StatusCode) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/rbac_viewer_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/rbac_viewer_test.go new file mode 100644 index 0000000000000000000000000000000000000000..a10cae32041b6fce157318ff3e57c49240c85757 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/rbac_viewer_test.go @@ -0,0 +1,134 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "bytes" + "fmt" + "net/http" + "strings" + "testing" + + "github.com/weaviate/weaviate/test/helper" + + "github.com/go-openapi/strfmt" + "github.com/google/uuid" + "github.com/stretchr/testify/require" +) + +func TestAuthzViewerEndpoints(t *testing.T) { + adminKey := "admin-key" + adminUser := "admin-user" + viewerKey := "viewer-key" + viewerUser := "viewer-user" + + compose, down := composeUp(t, map[string]string{adminUser: adminKey}, nil, map[string]string{viewerUser: viewerKey}) + defer down() + + weaviateUrl := compose.GetWeaviate().URI() + + uri := strfmt.URI("weaviate://localhost/Class/" + uuid.New().String()) + helper.AssignRoleToUser(t, adminKey, "viewer", viewerUser) + + endpoints := []struct { + endpoint string + methods []string + success []bool + arrayReq bool + body map[string][]byte + }{ + {endpoint: "authz/roles", methods: []string{"GET", "POST"}, success: []bool{true, false}, arrayReq: false, body: map[string][]byte{"POST": []byte("{\"name\": \"n\", \"permissions\":[{\"action\": \"read_cluster\", \"cluster\": {}}]}")}}, + {endpoint: "authz/roles/id", methods: []string{"GET", "DELETE"}, success: []bool{true, false}, arrayReq: false}, + {endpoint: "authz/roles/id/users", methods: []string{"GET"}, success: []bool{true}, arrayReq: false}, + {endpoint: "authz/users/id/roles", methods: []string{"GET"}, success: []bool{true}, arrayReq: false}, + {endpoint: "authz/users/id/assign", methods: []string{"POST"}, success: []bool{false}, arrayReq: false, body: map[string][]byte{"POST": []byte("{\"roles\": [\"abc\"]}")}}, + {endpoint: "authz/users/id/revoke", methods: []string{"POST"}, success: []bool{false}, arrayReq: false, body: map[string][]byte{"POST": []byte("{\"roles\": [\"abc\"]}")}}, + {endpoint: "batch/objects", methods: []string{"POST", "DELETE"}, success: []bool{false, false}, arrayReq: false, body: map[string][]byte{"POST": []byte("{\"objects\": [{\"class\": \"c\"}]}")}}, + {endpoint: "batch/references", methods: []string{"POST"}, success: []bool{false}, arrayReq: true, body: map[string][]byte{"POST": []byte(fmt.Sprintf("[{\"from\": %q, \"to\": %q}]", uri+"/ref", uri))}}, + {endpoint: "classifications", methods: []string{"POST"}, success: []bool{false}, arrayReq: false}, + {endpoint: "classifications/id", methods: []string{"GET"}, success: []bool{true}, arrayReq: false}, + {endpoint: "cluster/statistics", methods: []string{"GET"}, success: []bool{true}, arrayReq: false}, + {endpoint: "graphql", methods: []string{"POST"}, success: []bool{true}, arrayReq: false}, + {endpoint: "objects", methods: []string{"GET", "POST"}, success: []bool{true, false}, arrayReq: false}, + {endpoint: "objects/" + UUID1.String(), methods: []string{"GET", "HEAD", "DELETE", "PATCH", "PUT"}, success: []bool{true, true, false, false, false}, arrayReq: false, body: map[string][]byte{"PATCH": []byte(fmt.Sprintf("{\"class\": \"c\", \"id\":%q}", UUID1.String()))}}, + {endpoint: "objects/RandomClass/" + UUID1.String(), methods: []string{"GET", "HEAD", "DELETE", "PATCH", "PUT"}, success: []bool{true, true, false, false, false}, arrayReq: false, body: map[string][]byte{"PATCH": []byte(fmt.Sprintf("{\"class\": \"c\", \"id\":%q}", UUID1.String()))}}, + {endpoint: "objects/" + UUID1.String() + "/references/prop", methods: []string{"DELETE", "POST"}, success: []bool{false, false}, arrayReq: false}, + {endpoint: "objects/" + UUID1.String() + "/references/prop", methods: []string{"PUT"}, success: []bool{false}, arrayReq: true}, + {endpoint: "objects/RandomClass/" + UUID1.String() + "/references/prop", methods: []string{"DELETE", "POST"}, success: []bool{false, false}, arrayReq: false}, + {endpoint: "objects/RandomClass/" + UUID1.String() + "/references/prop", methods: []string{"PUT"}, success: []bool{false}, arrayReq: true}, + {endpoint: "objects/validate", methods: []string{"POST"}, success: []bool{true}, arrayReq: false}, + {endpoint: "meta", methods: []string{"GET"}, success: []bool{true}, arrayReq: false}, + {endpoint: "nodes", methods: []string{"GET"}, success: []bool{true}, arrayReq: false}, + {endpoint: "schema", methods: []string{"GET", "POST"}, success: []bool{true, false}, arrayReq: false}, + {endpoint: "schema/RandomClass", methods: []string{"GET", "PUT", "DELETE"}, success: []bool{true, false, false}, arrayReq: false}, + {endpoint: "schema/RandomClass/properties", methods: []string{"POST"}, success: []bool{false}, arrayReq: false}, + {endpoint: "schema/RandomClass/shards", methods: []string{"GET"}, success: []bool{true}, arrayReq: false}, + {endpoint: "schema/RandomClass/shards/name", methods: []string{"PUT"}, success: []bool{false}, arrayReq: false}, + {endpoint: "schema/RandomClass/tenants", methods: []string{"GET", "POST", "PUT", "DELETE"}, success: []bool{true, false, false, false}, arrayReq: true}, + {endpoint: "schema/RandomClass/tenants/name", methods: []string{"HEAD"}, success: []bool{true}, arrayReq: false}, + } + + for _, endpoint := range endpoints { + if len(endpoint.methods) != len(endpoint.success) { + t.Fatalf("expected %d methods and success, got %d", len(endpoint.methods), len(endpoint.success)) + } + for i, method := range endpoint.methods { + t.Run(endpoint.endpoint+"_"+method, func(t *testing.T) { + var req *http.Request + var err error + if method == "POST" || method == "PUT" || method == "PATCH" || method == "DELETE" { + var body []byte + if bodyC, ok := endpoint.body[method]; ok { + body = bodyC + } else { + if endpoint.arrayReq { + body = []byte(`[]`) + } else { + body = []byte(`{}`) + } + } + + reqBody := bytes.NewBuffer(body) + req, err = http.NewRequest(method, fmt.Sprintf("http://%s/v1/%s", weaviateUrl, endpoint.endpoint), reqBody) + require.Nil(t, err) + req.Header.Set("Content-Type", "application/json") + } else if method == "DELETE" { + reqBody := strings.NewReader("[\n \"\"\n]") + req, err = http.NewRequest(method, fmt.Sprintf("http://%s/v1/%s", weaviateUrl, endpoint.endpoint), reqBody) + require.Nil(t, err) + req.Header.Set("Content-Type", "application/json") + + } else { + req, err = http.NewRequest(method, fmt.Sprintf("http://%s/v1/%s", weaviateUrl, endpoint.endpoint), nil) + require.Nil(t, err) + } + + // Set the Authorization header with the viewer-key + req.Header.Set("Authorization", "Bearer viewer-key") + + // Perform the request + resp, err := http.DefaultClient.Do(req) + if err != nil { + t.Fatalf("request to %s failed: %v", endpoint.endpoint, err) + } + defer resp.Body.Close() + + // Check if the response succeeded or failed as expected + if endpoint.success[i] && resp.StatusCode == 403 { + t.Errorf("expected success for %s %s, but got status %d", method, endpoint.endpoint, resp.StatusCode) + } else if !endpoint.success[i] && resp.StatusCode != 403 { + t.Errorf("expected failure for %s %s, but got status %d", method, endpoint.endpoint, resp.StatusCode) + } + }) + } + } +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/references_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/references_test.go new file mode 100644 index 0000000000000000000000000000000000000000..70ba69fb43da0808ca9938616e83aafd92d43c8e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/references_test.go @@ -0,0 +1,194 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/go-openapi/strfmt" + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/articles" +) + +func TestAuthZReferencesOperations(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + // adminAuth := helper.CreateAuth(adminKey) + customUser := "custom-user" + customKey := "custom-key" + customAuth := helper.CreateAuth(customKey) + + _, teardown := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey}, nil) + defer teardown() + + tenants := []*models.Tenant{{Name: "tenant1"}, {Name: "tenant2"}} + + paragraphsCls := articles.ParagraphsClass() + paragraphsCls.MultiTenancyConfig = &models.MultiTenancyConfig{ + Enabled: true, + } + helper.CreateClassAuth(t, paragraphsCls, adminKey) + helper.CreateTenantsAuth(t, paragraphsCls.Class, tenants, adminKey) + + articlesCls := articles.ArticlesClass() + articlesCls.MultiTenancyConfig = &models.MultiTenancyConfig{ + Enabled: true, + } + helper.CreateClassAuth(t, articlesCls, adminKey) + helper.CreateTenantsAuth(t, articlesCls.Class, tenants, adminKey) + refProp := "hasParagraphs" + + roleName := "ref-ops-test" + + paragraphObjs := make([]*models.Object, 0) + paragraphObjs = append(paragraphObjs, articles.NewParagraph().WithID(UUID1).WithTenant(tenants[0].Name).Object()) + paragraphObjs = append(paragraphObjs, articles.NewParagraph().WithID(UUID2).WithTenant(tenants[1].Name).Object()) + helper.CreateObjectsBatchAuth(t, paragraphObjs, adminKey) + + ref := &models.SingleRef{Beacon: strfmt.URI("weaviate://localhost/" + UUID1.String())} + + articleObjs := make([]*models.Object, 0) + articleObjs = append(articleObjs, articles.NewArticle().WithTitle("Article 1").WithTenant(tenants[0].Name).Object()) + articleObjs = append(articleObjs, articles.NewArticle().WithTitle("Article 2").WithTenant(tenants[1].Name).Object()) + helper.CreateObjectsBatchAuth(t, articleObjs, adminKey) + + addRefInTenant1 := func() (*objects.ObjectsClassReferencesCreateOK, error) { + return helper.AddReferenceReturn(t, ref, articleObjs[0].ID, articleObjs[0].Class, refProp, tenants[0].Name, customAuth) + } + + replaceRefInTenant1 := func() (*objects.ObjectsClassReferencesPutOK, error) { + return helper.ReplaceReferencesReturn(t, []*models.SingleRef{ref}, articleObjs[0].ID, articleObjs[0].Class, refProp, tenants[0].Name, customAuth) + } + + deleteRefInTenant1 := func() (*objects.ObjectsClassReferencesDeleteNoContent, error) { + return helper.DeleteReferenceReturn(t, ref, articleObjs[0].ID, articleObjs[0].Class, refProp, tenants[0].Name, customAuth) + } + + t.Run("Reference create (POST)", func(t *testing.T) { + var forbidden *objects.ObjectsClassReferencesCreateForbidden + t.Run("fails with no permissions", func(t *testing.T) { + _, err := addRefInTenant1() + require.ErrorAs(t, err, &forbidden) + }) + + t.Run(fmt.Sprintf("succeeds with permissions on %s only", tenants[0].Name), func(t *testing.T) { + role := &models.Role{ + Name: &roleName, + Permissions: addReferencePermissions(articlesCls.Class, paragraphsCls.Class, tenants[0].Name), + } + helper.CreateRole(t, adminKey, role) + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + defer func() { + helper.RevokeRoleFromUser(t, adminKey, roleName, customUser) + helper.DeleteRole(t, adminKey, roleName) + }() + _, err := addRefInTenant1() + require.NoError(t, err) + }) + + t.Run(fmt.Sprintf("fails with permissions on %s only", tenants[1].Name), func(t *testing.T) { + role := &models.Role{ + Name: &roleName, + Permissions: addReferencePermissions(articlesCls.Class, paragraphsCls.Class, tenants[1].Name), + } + helper.CreateRole(t, adminKey, role) + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + defer func() { + helper.RevokeRoleFromUser(t, adminKey, roleName, customUser) + helper.DeleteRole(t, adminKey, roleName) + }() + _, err := addRefInTenant1() + require.ErrorAs(t, err, &forbidden) + }) + }) + + t.Run("Reference replace (PUT)", func(t *testing.T) { + var forbidden *objects.ObjectsClassReferencesPutForbidden + t.Run("fails with no permissions", func(t *testing.T) { + _, err := replaceRefInTenant1() + require.ErrorAs(t, err, &forbidden) + }) + + t.Run(fmt.Sprintf("succeeds with permissions on %s only", tenants[0].Name), func(t *testing.T) { + role := &models.Role{ + Name: &roleName, + Permissions: addReferencePermissions(articlesCls.Class, paragraphsCls.Class, tenants[0].Name), + } + helper.CreateRole(t, adminKey, role) + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + defer func() { + helper.RevokeRoleFromUser(t, adminKey, roleName, customUser) + helper.DeleteRole(t, adminKey, roleName) + }() + _, err := replaceRefInTenant1() + require.NoError(t, err) + }) + + t.Run(fmt.Sprintf("fails with permissions on %s only", tenants[1].Name), func(t *testing.T) { + role := &models.Role{ + Name: &roleName, + Permissions: addReferencePermissions(articlesCls.Class, paragraphsCls.Class, tenants[1].Name), + } + helper.CreateRole(t, adminKey, role) + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + defer func() { + helper.RevokeRoleFromUser(t, adminKey, roleName, customUser) + helper.DeleteRole(t, adminKey, roleName) + }() + _, err := replaceRefInTenant1() + require.ErrorAs(t, err, &forbidden) + }) + }) + + t.Run("Reference delete (DELETE)", func(t *testing.T) { + var forbidden *objects.ObjectsClassReferencesDeleteForbidden + t.Run("fails with no permissions", func(t *testing.T) { + _, err := deleteRefInTenant1() + require.ErrorAs(t, err, &forbidden) + }) + + t.Run(fmt.Sprintf("succeeds with permissions on %s only", tenants[0].Name), func(t *testing.T) { + role := &models.Role{ + Name: &roleName, + Permissions: deleteReferencePermissions(articlesCls.Class, paragraphsCls.Class, tenants[0].Name), + } + helper.CreateRole(t, adminKey, role) + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + defer func() { + helper.RevokeRoleFromUser(t, adminKey, roleName, customUser) + helper.DeleteRole(t, adminKey, roleName) + }() + _, err := deleteRefInTenant1() + require.NoError(t, err) + }) + + t.Run(fmt.Sprintf("fails with permissions on %s only", tenants[1].Name), func(t *testing.T) { + role := &models.Role{ + Name: &roleName, + Permissions: deleteReferencePermissions(articlesCls.Class, paragraphsCls.Class, tenants[1].Name), + } + helper.CreateRole(t, adminKey, role) + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + defer func() { + helper.RevokeRoleFromUser(t, adminKey, roleName, customUser) + helper.DeleteRole(t, adminKey, roleName) + }() + _, err := deleteRefInTenant1() + require.ErrorAs(t, err, &forbidden) + }) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/replication_replicate_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/replication_replicate_test.go new file mode 100644 index 0000000000000000000000000000000000000000..b1e2b11e18a1b044a35f9f4a687f4b1e78052792 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/replication_replicate_test.go @@ -0,0 +1,206 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "context" + "errors" + "testing" + "time" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/nodes" + "github.com/weaviate/weaviate/client/replication" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/verbosity" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/articles" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestAuthzReplicationReplicate(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + + testRoleName := "testRole" + customUser := "custom-user" + customKey := "custom-key" + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + + compose, err := docker.New(). + WithWeaviateEnv("AUTOSCHEMA_ENABLED", "false"). + With3NodeCluster(). + WithRBAC(). + WithApiKey(). + WithUserApiKey(adminUser, adminKey). + WithRbacRoots(adminUser). + WithUserApiKey(customUser, customKey). + Start(ctx) + require.Nil(t, err) + + defer func() { + helper.ResetClient() + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %v", err) + } + cancel() + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + + paragraphClass := articles.ParagraphsClass() + + helper.CreateClassAuth(t, paragraphClass, adminKey) + defer helper.DeleteClassAuth(t, paragraphClass.Class, adminKey) + + req := getReplicateRequest(t, paragraphClass.Class, adminKey) + + helper.CreateRole(t, adminKey, &models.Role{ + Name: &testRoleName, + Permissions: []*models.Permission{}, + }) + defer helper.DeleteRole(t, adminKey, testRoleName) + + helper.AssignRoleToUser(t, adminKey, testRoleName, customUser) + defer helper.RevokeRoleFromUser(t, adminKey, testRoleName, customUser) + + createReplication := &models.Permission{ + Action: &authorization.CreateReplicate, + Replicate: &models.PermissionReplicate{ + Collection: req.Collection, + Shard: req.Shard, + }, + } + readReplication := &models.Permission{ + Action: &authorization.ReadReplicate, + Replicate: &models.PermissionReplicate{ + Collection: req.Collection, + Shard: req.Shard, + }, + } + updateReplication := &models.Permission{ + Action: &authorization.UpdateReplicate, + Replicate: &models.PermissionReplicate{ + Collection: req.Collection, + Shard: req.Shard, + }, + } + deleteReplication := &models.Permission{ + Action: &authorization.DeleteReplicate, + Replicate: &models.PermissionReplicate{ + Collection: req.Collection, + Shard: req.Shard, + }, + } + + var replicationId strfmt.UUID + + t.Run("Fail to replicate a shard without CREATE permissions", func(t *testing.T) { + _, err := helper.Client(t).Replication.Replicate(replication.NewReplicateParams().WithBody(req), helper.CreateAuth(customKey)) + require.NotNil(t, err) + require.IsType(t, replication.NewReplicateForbidden(), err) + }) + + // Give permissions to replicate a shard + helper.AddPermissions(t, adminKey, testRoleName, createReplication) + + t.Run("Replicate a shard with permissions", func(t *testing.T) { + require.EventuallyWithT(t, func(ct *assert.CollectT) { + resp, err := helper.Client(t).Replication. + Replicate(replication.NewReplicateParams().WithBody(req), helper.CreateAuth(customKey)) + if err != nil && !errors.Is(err, replication.NewReplicateForbidden()) { + t.Fatalf("failed to replicate shard: %v", err) + } + require.Nil(ct, err) + replicationId = *resp.Payload.ID + }, 2*time.Second, 100*time.Millisecond, "op should be started") + }) + + t.Run("Fail to cancel a replication of a shard without UPDATE permissions", func(t *testing.T) { + _, err := helper.Client(t).Replication.CancelReplication(replication.NewCancelReplicationParams().WithID(replicationId), helper.CreateAuth(customKey)) + require.NotNil(t, err) + require.IsType(t, replication.NewCancelReplicationForbidden(), err) + }) + + // Give permissions to cancel a replication of a shard + helper.AddPermissions(t, adminKey, testRoleName, updateReplication) + + t.Run("Cancel a replication of a shard with permissions", func(t *testing.T) { + require.EventuallyWithT(t, func(ct *assert.CollectT) { + resp, err := helper.Client(t).Replication. + CancelReplication(replication.NewCancelReplicationParams().WithID(replicationId), helper.CreateAuth(customKey)) + if err != nil && !errors.Is(err, replication.NewCancelReplicationForbidden()) { + t.Fatalf("failed to cancel replication: %v", err) + } + require.Nil(ct, err) + require.IsType(ct, replication.NewCancelReplicationNoContent(), resp) + }, 2*time.Second, 100*time.Millisecond, "op should be cancelled") + }) + + t.Run("Fail to read a replication of a shard without READ permissions", func(t *testing.T) { + _, err := helper.Client(t).Replication.ReplicationDetails(replication.NewReplicationDetailsParams().WithID(replicationId), helper.CreateAuth(customKey)) + require.NotNil(t, err) + require.IsType(t, replication.NewReplicationDetailsForbidden(), err) + }) + + // Give permissions to read a replication of a shard + helper.AddPermissions(t, adminKey, testRoleName, readReplication) + + t.Run("Read a replication of a shard with permissions", func(t *testing.T) { + require.EventuallyWithT(t, func(ct *assert.CollectT) { + resp, err := helper.Client(t).Replication. + ReplicationDetails(replication.NewReplicationDetailsParams().WithID(replicationId), helper.CreateAuth(customKey)) + if err != nil && !errors.Is(err, replication.NewReplicationDetailsForbidden()) { + t.Fatalf("failed to read replication: %v", err) + } + require.Nil(ct, err) + require.Equal(ct, *resp.Payload.ID, replicationId) + }, 2*time.Second, 100*time.Millisecond, "op should be read") + }) + + t.Run("Fail to delete a replication of a shard without DELETE permissions", func(t *testing.T) { + _, err := helper.Client(t).Replication.DeleteReplication(replication.NewDeleteReplicationParams().WithID(replicationId), helper.CreateAuth(customKey)) + require.NotNil(t, err) + require.IsType(t, replication.NewDeleteReplicationForbidden(), err) + }) + + // Give permissions to delete a replication of a shard + helper.AddPermissions(t, adminKey, testRoleName, deleteReplication) + + t.Run("Delete a replication of a shard with permissions", func(t *testing.T) { + require.EventuallyWithT(t, func(ct *assert.CollectT) { + resp, err := helper.Client(t).Replication. + DeleteReplication(replication.NewDeleteReplicationParams().WithID(replicationId), helper.CreateAuth(customKey)) + if err != nil && !errors.Is(err, replication.NewDeleteReplicationForbidden()) { + t.Fatalf("failed to delete replication: %v", err) + } + require.Nil(ct, err) + require.IsType(ct, replication.NewDeleteReplicationNoContent(), resp) + }, 2*time.Second, 100*time.Millisecond, "op should be deleted") + }) +} + +func getReplicateRequest(t *testing.T, className, key string) *models.ReplicationReplicateReplicaRequest { + verbose := verbosity.OutputVerbose + nodes, err := helper.Client(t).Nodes.NodesGetClass(nodes.NewNodesGetClassParams().WithOutput(&verbose).WithClassName(className), helper.CreateAuth(key)) + require.Nil(t, err) + return &models.ReplicationReplicateReplicaRequest{ + Collection: &className, + SourceNode: &nodes.Payload.Nodes[0].Name, + TargetNode: &nodes.Payload.Nodes[1].Name, + Shard: &nodes.Payload.Nodes[0].Shards[0].Name, + } +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/roles_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/roles_test.go new file mode 100644 index 0000000000000000000000000000000000000000..1268d741f1b5f68d65fac6e32aba944f960265a2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/roles_test.go @@ -0,0 +1,1235 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "context" + "errors" + "fmt" + "sync" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/client/authz" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestUserWithSimilarBuiltInRoleName(t *testing.T) { + customUser := "custom-admin-user" + customKey := "custom-key" + customAuth := helper.CreateAuth(customKey) + testingRole := "testingOwnRole" + adminKey := "admin-key" + adminUser := "admin-user" + adminAuth := helper.CreateAuth(adminKey) + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + defer cancel() + + compose, err := docker.New().WithWeaviate().WithApiKey().WithUserApiKey(adminUser, adminKey).WithUserApiKey(customUser, customKey). + WithRBAC().WithRbacRoots(adminUser).Start(ctx) + require.Nil(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %v", err) + } + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + defer helper.ResetClient() + + helper.Client(t).Authz.DeleteRole( + authz.NewDeleteRoleParams().WithID(testingRole), + adminAuth, + ) + + t.Run("Create role with custom user - fail", func(t *testing.T) { + _, err = helper.Client(t).Authz.CreateRole( + authz.NewCreateRoleParams().WithBody(&models.Role{ + Name: &testingRole, + Permissions: []*models.Permission{ + { + Action: String(authorization.CreateCollections), + Collections: &models.PermissionCollections{ + Collection: String("*"), + }, + }, + }, + }), + customAuth, + ) + require.Error(t, err) + }) +} + +func TestAuthzBuiltInRolesJourney(t *testing.T) { + var err error + + adminUser := "admin-user" + adminKey := "admin-key" + adminRole := "admin" + + clientAuth := helper.CreateAuth(adminKey) + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, nil, nil) + defer down() + + t.Run("get all roles to check if i have perm.", func(t *testing.T) { + roles := helper.GetRoles(t, adminKey) + require.Equal(t, NumBuildInRoles, len(roles)) + }) + + t.Run("fail to create builtin role", func(t *testing.T) { + _, err = helper.Client(t).Authz.CreateRole( + authz.NewCreateRoleParams().WithBody(&models.Role{ + Name: &adminRole, + Permissions: []*models.Permission{{ + Action: String(authorization.CreateCollections), + Collections: &models.PermissionCollections{Collection: String("*")}, + }}, + }), + clientAuth, + ) + require.NotNil(t, err) + var parsed *authz.CreateRoleBadRequest + require.True(t, errors.As(err, &parsed)) + require.Contains(t, parsed.Payload.Error[0].Message, "built-in role") + }) + + t.Run("fail to delete builtin role", func(t *testing.T) { + _, err = helper.Client(t).Authz.DeleteRole( + authz.NewDeleteRoleParams().WithID(adminRole), + clientAuth, + ) + require.NotNil(t, err) + var parsed *authz.DeleteRoleBadRequest + require.True(t, errors.As(err, &parsed)) + require.Contains(t, parsed.Payload.Error[0].Message, "built-in role") + }) + + t.Run("add builtin role permission", func(t *testing.T) { + _, err = helper.Client(t).Authz.AddPermissions( + authz.NewAddPermissionsParams().WithID(adminRole).WithBody(authz.AddPermissionsBody{ + Permissions: []*models.Permission{{ + Action: String(authorization.CreateCollections), + Collections: &models.PermissionCollections{Collection: String("*")}, + }}, + }), + clientAuth, + ) + require.NotNil(t, err) + var parsed *authz.AddPermissionsBadRequest + require.True(t, errors.As(err, &parsed)) + require.Contains(t, parsed.Payload.Error[0].Message, "built-in role") + }) + + t.Run("remove builtin role permission", func(t *testing.T) { + _, err = helper.Client(t).Authz.RemovePermissions( + authz.NewRemovePermissionsParams().WithID(adminRole).WithBody(authz.RemovePermissionsBody{ + Permissions: []*models.Permission{{ + Action: String(authorization.CreateCollections), + Collections: &models.PermissionCollections{Collection: String("*")}, + }}, + }), + clientAuth, + ) + require.NotNil(t, err) + var parsed *authz.RemovePermissionsBadRequest + require.True(t, errors.As(err, &parsed)) + require.Contains(t, parsed.Payload.Error[0].Message, "built-in role") + }) +} + +func TestAuthzRolesJourney(t *testing.T) { + var err error + + adminUser := "admin-user" + adminKey := "admin-key" + existingRole := "root" + + testRoleName := "testRole" + createCollectionsAction := authorization.CreateCollections + deleteCollectionsAction := authorization.DeleteCollections + all := "*" + + testRole1 := &models.Role{ + Name: &testRoleName, + Permissions: []*models.Permission{{ + Action: &createCollectionsAction, + Collections: &models.PermissionCollections{Collection: &all}, + }}, + } + + clientAuth := helper.CreateAuth(adminKey) + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, nil, nil) + defer down() + + t.Run("get all roles before create", func(t *testing.T) { + roles := helper.GetRoles(t, adminKey) + require.Equal(t, NumBuildInRoles, len(roles)) + }) + + t.Run("create role", func(t *testing.T) { + helper.CreateRole(t, adminKey, testRole1) + }) + + t.Run("fail to create existing role", func(t *testing.T) { + _, err = helper.Client(t).Authz.CreateRole(authz.NewCreateRoleParams().WithBody(testRole1), clientAuth) + require.NotNil(t, err) + var parsed *authz.CreateRoleConflict + require.True(t, errors.As(err, &parsed)) + require.Contains(t, parsed.Payload.Error[0].Message, "already exists") + }) + + t.Run("get all roles after create", func(t *testing.T) { + roles := helper.GetRoles(t, adminKey) + require.Equal(t, NumBuildInRoles+1, len(roles)) + }) + + t.Run("get role by name", func(t *testing.T) { + role := helper.GetRoleByName(t, adminKey, testRoleName) + require.NotNil(t, role) + require.Equal(t, testRoleName, *role.Name) + require.Equal(t, 1, len(role.Permissions)) + require.Equal(t, createCollectionsAction, *role.Permissions[0].Action) + }) + + t.Run("add permission to role", func(t *testing.T) { + _, err := helper.Client(t).Authz.AddPermissions(authz.NewAddPermissionsParams().WithID(testRoleName).WithBody(authz.AddPermissionsBody{ + Permissions: []*models.Permission{{Action: &deleteCollectionsAction, Collections: &models.PermissionCollections{Collection: &all}}}, + }), clientAuth) + require.Nil(t, err) + }) + + t.Run("get role by name after adding permission", func(t *testing.T) { + res, err := helper.Client(t).Authz.GetRole(authz.NewGetRoleParams().WithID(testRoleName), clientAuth) + require.Nil(t, err) + require.Equal(t, testRoleName, *res.Payload.Name) + require.Equal(t, 2, len(res.Payload.Permissions)) + require.Equal(t, createCollectionsAction, *res.Payload.Permissions[0].Action) + require.Equal(t, deleteCollectionsAction, *res.Payload.Permissions[1].Action) + }) + + t.Run("removing all permissions from role allowed without role deletion", func(t *testing.T) { + _, err := helper.Client(t).Authz.RemovePermissions(authz.NewRemovePermissionsParams().WithID(testRoleName).WithBody(authz.RemovePermissionsBody{ + Permissions: []*models.Permission{ + helper.NewCollectionsPermission().WithAction(createCollectionsAction).WithCollection(all).Permission(), + helper.NewCollectionsPermission().WithAction(deleteCollectionsAction).WithCollection(all).Permission(), + }, + }), clientAuth) + require.Nil(t, err) + }) + + t.Run("get role by name after removing permission", func(t *testing.T) { + role := helper.GetRoleByName(t, adminKey, testRoleName) + require.NotNil(t, role) + require.Equal(t, testRoleName, *role.Name) + require.Equal(t, 0, len(role.Permissions)) + }) + + t.Run("assign role to user", func(t *testing.T) { + helper.AssignRoleToUser(t, adminKey, testRoleName, adminUser) + }) + + t.Run("get roles for user after assignment", func(t *testing.T) { + res, err := helper.Client(t).Authz.GetRolesForUser(authz.NewGetRolesForUserParams().WithID(adminUser).WithUserType(string(models.UserTypeInputDb)), clientAuth) + require.Nil(t, err) + require.Equal(t, 2, len(res.Payload)) + names := make([]string, len(res.Payload)) + for i := range res.Payload { + names[i] = *res.Payload[i].Name + } + + require.ElementsMatch(t, names, []string{existingRole, testRoleName}) + }) + + t.Run("get users for role after assignment", func(t *testing.T) { + roles := helper.GetUserForRoles(t, testRoleName, adminKey) + require.Equal(t, 1, len(roles)) + require.Equal(t, adminUser, roles[0]) + }) + + t.Run("delete role by name", func(t *testing.T) { + helper.DeleteRole(t, adminKey, testRoleName) + }) + + t.Run("get roles for user after deletion", func(t *testing.T) { + res, err := helper.Client(t).Authz.GetRolesForUser(authz.NewGetRolesForUserParams().WithID(adminUser).WithUserType(string(models.UserTypeInputDb)), clientAuth) + require.Nil(t, err) + require.Equal(t, 1, len(res.Payload)) + require.Equal(t, existingRole, *res.Payload[0].Name) + }) + + t.Run("get all roles after delete", func(t *testing.T) { + roles := helper.GetRoles(t, adminKey) + require.Equal(t, NumBuildInRoles, len(roles)) + }) + + t.Run("get non-existent role by name", func(t *testing.T) { + _, err := helper.Client(t).Authz.GetRole(authz.NewGetRoleParams().WithID(testRoleName), clientAuth) + require.NotNil(t, err) + require.ErrorIs(t, err, authz.NewGetRoleNotFound()) + }) + + t.Run("error with add permissions on non-existent role", func(t *testing.T) { + _, err = helper.Client(t).Authz.AddPermissions(authz.NewAddPermissionsParams().WithID("upsert-role").WithBody(authz.AddPermissionsBody{ + Permissions: []*models.Permission{{Action: &createCollectionsAction, Collections: &models.PermissionCollections{Collection: &all}}}, + }), clientAuth) + require.NotNil(t, err) + require.ErrorIs(t, err, authz.NewAddPermissionsNotFound()) + }) + + t.Run("error with remove permissions on non-existent role", func(t *testing.T) { + _, err = helper.Client(t).Authz.RemovePermissions(authz.NewRemovePermissionsParams().WithID("upsert-role").WithBody(authz.RemovePermissionsBody{ + Permissions: []*models.Permission{{Action: &createCollectionsAction, Collections: &models.PermissionCollections{Collection: &all}}}, + }), clientAuth) + require.NotNil(t, err) + require.ErrorIs(t, err, authz.NewRemovePermissionsNotFound()) + }) +} + +func TestAuthzRolesRemoveAlsoAssignments(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + + testRoleName := "testRole" + testUser := "test-user" + testKey := "test-key" + + testRole := &models.Role{ + Name: &testRoleName, + Permissions: []*models.Permission{{ + Action: &authorization.CreateCollections, + Collections: &models.PermissionCollections{ + Collection: authorization.All, + }, + }}, + } + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{testUser: testKey}, nil) + defer down() + + t.Run("get all roles before create", func(t *testing.T) { + roles := helper.GetRoles(t, adminKey) + require.Equal(t, NumBuildInRoles, len(roles)) + }) + + t.Run("create role", func(t *testing.T) { + helper.CreateRole(t, adminKey, testRole) + }) + + t.Run("assign role to user", func(t *testing.T) { + helper.AssignRoleToUser(t, adminKey, testRoleName, testUser) + }) + + t.Run("get role assigned to user", func(t *testing.T) { + roles := helper.GetRolesForUser(t, testUser, adminKey, true) + require.Equal(t, 1, len(roles)) + }) + + t.Run("delete role", func(t *testing.T) { + helper.DeleteRole(t, adminKey, *testRole.Name) + }) + + t.Run("create the role again", func(t *testing.T) { + helper.CreateRole(t, adminKey, testRole) + }) + + t.Run("get role assigned to user expected none", func(t *testing.T) { + roles := helper.GetRolesForUser(t, testUser, adminKey, false) + require.Equal(t, 0, len(roles)) + }) +} + +func TestAuthzRolesMultiNodeJourney(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + + testRole := "testRole" + createCollectionsAction := authorization.CreateCollections + deleteCollectionsAction := authorization.DeleteCollections + all := "*" + + clientAuth := helper.CreateAuth(adminKey) + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + defer cancel() + + compose, err := docker.New().WithWeaviateCluster(3).WithApiKey().WithUserApiKey(adminUser, adminKey).WithRBAC().WithRbacRoots(adminUser).Start(ctx) + require.Nil(t, err) + + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %v", err) + } + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + defer helper.ResetClient() + + t.Run("add role while 1 node is down", func(t *testing.T) { + t.Run("get all roles before create", func(t *testing.T) { + roles := helper.GetRoles(t, adminKey) + require.Equal(t, NumBuildInRoles, len(roles)) + }) + + t.Run("StopNode-3", func(t *testing.T) { + require.Nil(t, compose.StopAt(ctx, 2, nil)) + }) + + t.Run("create role", func(t *testing.T) { + helper.CreateRole(t, adminKey, &models.Role{ + Name: &testRole, + Permissions: []*models.Permission{{ + Action: &createCollectionsAction, + Collections: &models.PermissionCollections{Collection: &all}, + }}, + }) + }) + + t.Run("StartNode-3", func(t *testing.T) { + require.Nil(t, compose.StartAt(ctx, 2)) + }) + + helper.SetupClient(compose.GetWeaviateNode3().URI()) + + t.Run("get all roles after create", func(t *testing.T) { + roles := helper.GetRoles(t, adminKey) + require.Equal(t, NumBuildInRoles+1, len(roles)) + }) + + t.Run("get role by name", func(t *testing.T) { + role := helper.GetRoleByName(t, adminKey, testRole) + require.NotNil(t, role) + require.Equal(t, testRole, *role.Name) + require.Equal(t, 1, len(role.Permissions)) + require.Equal(t, createCollectionsAction, *role.Permissions[0].Action) + }) + + t.Run("add permission to role Node3", func(t *testing.T) { + _, err := helper.Client(t).Authz.AddPermissions(authz.NewAddPermissionsParams().WithID(testRole).WithBody(authz.AddPermissionsBody{ + Permissions: []*models.Permission{{Action: &deleteCollectionsAction, Collections: &models.PermissionCollections{Collection: &all}}}, + }), clientAuth) + require.Nil(t, err) + }) + + helper.SetupClient(compose.GetWeaviate().URI()) + + t.Run("get role by name after adding permission Node1", func(t *testing.T) { + role := helper.GetRoleByName(t, adminKey, testRole) + require.NotNil(t, role) + require.Equal(t, testRole, *role.Name) + require.Equal(t, 2, len(role.Permissions)) + require.Equal(t, createCollectionsAction, *role.Permissions[0].Action) + require.Equal(t, deleteCollectionsAction, *role.Permissions[1].Action) + }) + }) +} + +func TestAuthzRolesHasPermission(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + + customUser := "custom-user" + customKey := "custom-key" + + testRole := "testRole" + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey}, nil) + defer down() + + t.Run("create role", func(t *testing.T) { + helper.CreateRole(t, adminKey, &models.Role{ + Name: &testRole, + Permissions: []*models.Permission{{ + Action: &authorization.CreateCollections, + Collections: &models.PermissionCollections{ + Collection: authorization.All, + }, + }}, + }) + }) + + t.Run("true", func(t *testing.T) { + res, err := helper.Client(t).Authz.HasPermission(authz.NewHasPermissionParams().WithID(testRole).WithBody(&models.Permission{ + Action: &authorization.CreateCollections, + Collections: &models.PermissionCollections{ + Collection: authorization.All, + }, + }), helper.CreateAuth(adminKey)) + require.Nil(t, err) + require.True(t, res.Payload) + }) + + t.Run("false", func(t *testing.T) { + res, err := helper.Client(t).Authz.HasPermission(authz.NewHasPermissionParams().WithID(testRole).WithBody(&models.Permission{ + Action: &authorization.DeleteCollections, + Collections: &models.PermissionCollections{ + Collection: authorization.All, + }, + }), helper.CreateAuth(adminKey)) + require.Nil(t, err) + require.False(t, res.Payload) + }) + + t.Run("forbidden", func(t *testing.T) { + _, err := helper.Client(t).Authz.HasPermission(authz.NewHasPermissionParams().WithID(testRole).WithBody(&models.Permission{ + Action: &authorization.CreateCollections, + Collections: &models.PermissionCollections{ + Collection: authorization.All, + }, + }), helper.CreateAuth(customKey)) + require.NotNil(t, err) + var parsed *authz.HasPermissionForbidden + require.True(t, errors.As(err, &parsed)) + require.Contains(t, parsed.Payload.Error[0].Message, "forbidden") + }) +} + +func TestAuthzRolesHasPermissionMultipleNodes(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + + testRole := "testRole" + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + defer cancel() + + compose, err := docker.New().WithWeaviateCluster(3).WithApiKey().WithUserApiKey(adminUser, adminKey).WithRBAC().WithRbacRoots(adminUser).Start(ctx) + require.Nil(t, err) + + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %v", err) + } + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + defer helper.ResetClient() + + t.Run("StopNode-3", func(t *testing.T) { + require.Nil(t, compose.StopAt(ctx, 2, nil)) + }) + + t.Run("create role", func(t *testing.T) { + helper.CreateRole(t, adminKey, &models.Role{ + Name: &testRole, + Permissions: []*models.Permission{{ + Action: &authorization.CreateCollections, + Collections: &models.PermissionCollections{ + Collection: authorization.All, + }, + }}, + }) + }) + + t.Run("permission in node 1", func(t *testing.T) { + res, err := helper.Client(t).Authz.HasPermission(authz.NewHasPermissionParams().WithID(testRole).WithBody(&models.Permission{ + Action: &authorization.CreateCollections, + Collections: &models.PermissionCollections{ + Collection: authorization.All, + }, + }), helper.CreateAuth(adminKey)) + require.Nil(t, err) + require.True(t, res.Payload) + }) + + t.Run("permission in 2 without waiting", func(t *testing.T) { + helper.SetupClient(compose.GetWeaviateNode2().URI()) + res, err := helper.Client(t).Authz.HasPermission(authz.NewHasPermissionParams().WithID(testRole).WithBody(&models.Permission{ + Action: &authorization.CreateCollections, + Collections: &models.PermissionCollections{ + Collection: authorization.All, + }, + }), helper.CreateAuth(adminKey)) + require.Nil(t, err) + require.True(t, res.Payload) + }) + + t.Run("StartNode-3", func(t *testing.T) { + require.Nil(t, compose.StartAt(ctx, 2)) + }) + + t.Run("permission in 3 without waiting", func(t *testing.T) { + helper.SetupClient(compose.GetWeaviateNode3().URI()) + res, err := helper.Client(t).Authz.HasPermission(authz.NewHasPermissionParams().WithID(testRole).WithBody(&models.Permission{ + Action: &authorization.CreateCollections, + Collections: &models.PermissionCollections{ + Collection: authorization.All, + }, + }), helper.CreateAuth(adminKey)) + require.Nil(t, err) + require.True(t, res.Payload) + }) +} + +func TestAuthzEmptyRole(t *testing.T) { + var err error + + adminUser := "admin-user" + adminKey := "admin-key" + customEmptyRole := "customEmpty" + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, nil, nil) + defer down() + + t.Run("create empty role", func(t *testing.T) { + _, err = helper.Client(t).Authz.CreateRole( + authz.NewCreateRoleParams().WithBody(&models.Role{ + Name: &customEmptyRole, + Permissions: []*models.Permission{}, + }), + helper.CreateAuth(adminKey), + ) + require.Nil(t, err) + }) + + t.Run("get all roles, shall be 4 for the newly created empty role", func(t *testing.T) { + roles := helper.GetRoles(t, adminKey) + require.Equal(t, NumBuildInRoles+1, len(roles)) + }) +} + +func TestAuthzRoleRemoveToEmptyAndAddPermission(t *testing.T) { + var err error + + adminUser := "admin-user" + adminKey := "admin-key" + customRole := "customRole" + + clientAuth := helper.CreateAuth(adminKey) + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, nil, nil) + defer down() + + t.Run("create role", func(t *testing.T) { + _, err = helper.Client(t).Authz.CreateRole( + authz.NewCreateRoleParams().WithBody(&models.Role{ + Name: &customRole, + Permissions: []*models.Permission{{ + Action: String(authorization.CreateCollections), + Collections: &models.PermissionCollections{Collection: String("*")}, + }}, + }), + clientAuth, + ) + require.Nil(t, err) + }) + + t.Run("remove permissions", func(t *testing.T) { + _, err = helper.Client(t).Authz.RemovePermissions( + authz.NewRemovePermissionsParams().WithID(customRole).WithBody(authz.RemovePermissionsBody{ + Permissions: []*models.Permission{{ + Action: String(authorization.CreateCollections), + Collections: &models.PermissionCollections{Collection: String("*")}, + }}, + }), + clientAuth, + ) + require.Nil(t, err) + }) + + t.Run("get all roles, shall be 3 for the newly created empty role", func(t *testing.T) { + roles := helper.GetRoles(t, adminKey) + require.Equal(t, NumBuildInRoles+1, len(roles)) + }) + + t.Run("get role after deleting permission", func(t *testing.T) { + role := helper.GetRoleByName(t, adminKey, customRole) + require.Equal(t, customRole, *role.Name) + require.Equal(t, 0, len(role.Permissions)) + }) + + t.Run("add permissions", func(t *testing.T) { + _, err = helper.Client(t).Authz.AddPermissions( + authz.NewAddPermissionsParams().WithID(customRole).WithBody(authz.AddPermissionsBody{ + Permissions: []*models.Permission{{ + Action: String(authorization.CreateCollections), + Collections: &models.PermissionCollections{Collection: String("*")}, + }}, + }), + clientAuth, + ) + require.Nil(t, err) + }) + + t.Run("get role after adding permission", func(t *testing.T) { + role := helper.GetRoleByName(t, adminKey, customRole) + require.Equal(t, customRole, *role.Name) + require.Equal(t, 1, len(role.Permissions)) + require.Equal(t, authorization.CreateCollections, *role.Permissions[0].Action) + }) +} + +func TestAuthzRoleScopeMatching(t *testing.T) { + var err error + + // Setup users + adminUser := "admin-user" + adminKey := "admin-key" + adminAuth := helper.CreateAuth(adminKey) + + limitedUser := "custom-user" + limitedKey := "custom-key" + limitedAuth := helper.CreateAuth(limitedKey) + + // Setup test roles + limitedRole := "custom-role" + newRole := "new-role" + broaderRole := "broader-role" + + // Start environment with admin and limited user + _, down := composeUp(t, + map[string]string{adminUser: adminKey}, // admin users + map[string]string{limitedUser: limitedKey}, // regular users + nil, + ) + defer down() + + // Clean up any existing test roles + helper.Client(t).Authz.DeleteRole( + authz.NewDeleteRoleParams().WithID(limitedRole), + adminAuth, + ) + helper.Client(t).Authz.DeleteRole( + authz.NewDeleteRoleParams().WithID(newRole), + adminAuth, + ) + helper.Client(t).Authz.DeleteRole( + authz.NewDeleteRoleParams().WithID(broaderRole), + adminAuth, + ) + + t.Run("setup limited user role", func(t *testing.T) { + // Create role with limited permissions + _, err = helper.Client(t).Authz.CreateRole( + authz.NewCreateRoleParams().WithBody(&models.Role{ + Name: &limitedRole, + Permissions: []*models.Permission{ + // Add role management permissions with scope matching + { + Action: String(authorization.CreateRoles), + Roles: &models.PermissionRoles{Role: String("*"), Scope: String(models.PermissionRolesScopeMatch)}, + }, + { + Action: String(authorization.UpdateRoles), + Roles: &models.PermissionRoles{Role: String("*"), Scope: String(models.PermissionRolesScopeMatch)}, + }, + { + Action: String(authorization.DeleteRoles), + Roles: &models.PermissionRoles{Role: String("*"), Scope: String(models.PermissionRolesScopeMatch)}, + }, + // Add collection-specific permissions + { + Action: String(authorization.CreateCollections), + Collections: &models.PermissionCollections{ + Collection: String("Collection1"), + }, + }, + { + Action: String(authorization.UpdateCollections), + Collections: &models.PermissionCollections{ + Collection: String("Collection1"), + }, + }, + }, + }), + adminAuth, + ) + require.NoError(t, err) + + // Assign role to limited user + helper.AssignRoleToUser(t, adminKey, limitedRole, limitedUser) + + // Verify role assignment and permissions + roles := helper.GetRolesForUser(t, limitedUser, adminKey, false) + require.Equal(t, 1, len(roles)) + require.Equal(t, limitedRole, *roles[0].Name) + }) + + t.Run("limited user can create role with equal permissions", func(t *testing.T) { + _, err = helper.Client(t).Authz.CreateRole( + authz.NewCreateRoleParams().WithBody(&models.Role{ + Name: &newRole, + Permissions: []*models.Permission{ + { + Action: String(authorization.CreateCollections), + Collections: &models.PermissionCollections{ + Collection: String("Collection1"), + }, + }, + }, + }), + limitedAuth, + ) + require.NoError(t, err) + }) + + t.Run("limited user cannot create role with broader permissions", func(t *testing.T) { + _, err = helper.Client(t).Authz.CreateRole( + authz.NewCreateRoleParams().WithBody(&models.Role{ + Name: &broaderRole, + Permissions: []*models.Permission{ + { + Action: String(authorization.CreateCollections), + Collections: &models.PermissionCollections{ + Collection: String("*"), + }, + }, + }, + }), + limitedAuth, + ) + require.Error(t, err) + var parsed *authz.CreateRoleForbidden + require.True(t, errors.As(err, &parsed)) + }) + + t.Run("limited user can update role within their scope", func(t *testing.T) { + _, err = helper.Client(t).Authz.AddPermissions( + authz.NewAddPermissionsParams().WithID(newRole).WithBody(authz.AddPermissionsBody{ + Permissions: []*models.Permission{ + { + Action: String(authorization.UpdateCollections), + Collections: &models.PermissionCollections{ + Collection: String("Collection1"), + }, + }, + }, + }), + limitedAuth, + ) + require.NoError(t, err) + }) + + t.Run("limited user cannot update role beyond their scope(AddPermission)", func(t *testing.T) { + _, err = helper.Client(t).Authz.AddPermissions( + authz.NewAddPermissionsParams().WithID(newRole).WithBody(authz.AddPermissionsBody{ + Permissions: []*models.Permission{ + { + Action: String(authorization.CreateCollections), + Collections: &models.PermissionCollections{ + Collection: String("Collection2"), + }, + }, + }, + }), + limitedAuth, + ) + require.Error(t, err) + var parsed *authz.AddPermissionsForbidden + require.True(t, errors.As(err, &parsed)) + }) + + t.Run("limited user cannot update role beyond their scope(RemovePermission)", func(t *testing.T) { + _, err = helper.Client(t).Authz.RemovePermissions( + authz.NewRemovePermissionsParams().WithID(newRole).WithBody(authz.RemovePermissionsBody{ + Permissions: []*models.Permission{ + { + Action: String(authorization.UpdateCollections), + Collections: &models.PermissionCollections{ + Collection: String("Collection2"), + }, + }, + }, + }), + limitedAuth, + ) + require.Error(t, err) + var parsed *authz.RemovePermissionsForbidden + require.True(t, errors.As(err, &parsed)) + }) + + t.Run("limited user can remove permissions from role with their scope", func(t *testing.T) { + _, err = helper.Client(t).Authz.RemovePermissions( + authz.NewRemovePermissionsParams().WithID(newRole).WithBody(authz.RemovePermissionsBody{ + Permissions: []*models.Permission{ + { + Action: String(authorization.UpdateCollections), + Collections: &models.PermissionCollections{ + Collection: String("Collection1"), + }, + }, + }, + }), + limitedAuth, + ) + require.NoError(t, err) + }) + + t.Run("limited user can delete role within their scope", func(t *testing.T) { + roleToDelete := "role-to-delete" + _, err = helper.Client(t).Authz.CreateRole( + authz.NewCreateRoleParams().WithBody(&models.Role{ + Name: &roleToDelete, + Permissions: []*models.Permission{ + { + Action: String(authorization.CreateCollections), + Collections: &models.PermissionCollections{ + Collection: String("Collection1"), + }, + }, + }, + }), + limitedAuth, + ) + require.NoError(t, err) + + // Verify limited user can delete the role + _, err = helper.Client(t).Authz.DeleteRole( + authz.NewDeleteRoleParams().WithID(roleToDelete), + limitedAuth, + ) + require.NoError(t, err) + }) + + t.Run("admin can still manage all roles", func(t *testing.T) { + // Admin can delete roles created by limited user + helper.DeleteRole(t, adminKey, newRole) + // Admin can delete the limited role itself + helper.DeleteRole(t, adminKey, limitedRole) + // Clean up broader role if it was created + helper.Client(t).Authz.DeleteRole( + authz.NewDeleteRoleParams().WithID(broaderRole), + adminAuth, + ) + }) +} + +func TestAuthzRoleFilteredTenantPermissions(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + adminAuth := helper.CreateAuth(adminKey) + + limitedUser := "custom-user" + limitedKey := "custom-key" + limitedAuth := helper.CreateAuth(limitedKey) + + filteredRole := "filtered-role" + className := "FilteredTenantTestClass" + className2 := "FilteredTenantTestClass2" + allowedTenant := "tenant1" + restrictedTenant := "tenant2" + + _, down := composeUp(t, + map[string]string{adminUser: adminKey}, + map[string]string{limitedUser: limitedKey}, + nil, + ) + defer down() + + t.Run("setup collection with tenants", func(t *testing.T) { + helper.CreateClassAuth(t, &models.Class{ + Class: className, + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + }, "admin-key") + helper.CreateClassAuth(t, &models.Class{ + Class: className2, + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + }, "admin-key") + + tenants := []*models.Tenant{ + {Name: allowedTenant, ActivityStatus: models.TenantActivityStatusHOT}, + {Name: restrictedTenant, ActivityStatus: models.TenantActivityStatusHOT}, + } + helper.CreateTenantsAuth(t, className, tenants, "admin-key") + helper.CreateTenantsAuth(t, className2, tenants, "admin-key") + }) + + defer func() { + helper.DeleteClassWithAuthz(t, className, adminAuth) + }() + + t.Run("create filtered role", func(t *testing.T) { + _, err := helper.Client(t).Authz.CreateRole( + authz.NewCreateRoleParams().WithBody(&models.Role{ + Name: &filteredRole, + Permissions: []*models.Permission{ + { + Action: String(authorization.ReadTenants), + Tenants: &models.PermissionTenants{ + Collection: String(className), + Tenant: String(allowedTenant), + }, + }, + }, + }), + adminAuth, + ) + require.NoError(t, err) + + helper.AssignRoleToUser(t, adminKey, filteredRole, limitedUser) + }) + + t.Run("verify filtered tenant permissions", func(t *testing.T) { + tenants, err := helper.GetTenantsWithAuthz(t, className, limitedAuth) + require.NoError(t, err) + require.Equal(t, 1, len(tenants.Payload)) + require.Equal(t, allowedTenant, tenants.Payload[0].Name) + }) +} + +func TestRaceConcurrentRoleCreation(t *testing.T) { + ctx := context.Background() + + adminKey := "admin-key" + adminUser := "admin-user" + + compose, err := docker.New().WithWeaviate(). + WithApiKey().WithUserApiKey(adminUser, adminKey). + WithRBAC().WithRbacRoots(adminUser). + Start(ctx) + require.NoError(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %s", err.Error()) + } + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + + for i := 0; i < 10; i++ { + var err1, err2 error + name := fmt.Sprintf("role%d", i) + helper.DeleteRole(t, adminKey, name) // leftovers from previous runs + wg := sync.WaitGroup{} + wg.Add(2) + + // send off two concurrent requests with the same role name, but different permissions. + // If the race is + // - detected correctly, one of the two requests should fail and the resulting role should only have one permission + // - NOT detected correctly, bot requests succeed and the resulting role has two permissions + + go func() { + _, err1 = helper.Client(t).Authz.CreateRole(authz.NewCreateRoleParams().WithBody(&models.Role{ + Name: &name, + Permissions: []*models.Permission{ + { + Action: String(authorization.CreateCollections), + Collections: &models.PermissionCollections{ + Collection: String("Collection1"), + }, + }, + }, + }), helper.CreateAuth(adminKey)) + defer wg.Done() + }() + go func() { + _, err2 = helper.Client(t).Authz.CreateRole(authz.NewCreateRoleParams().WithBody(&models.Role{ + Name: &name, + Permissions: []*models.Permission{ + { + Action: String(authorization.DeleteCollections), + Collections: &models.PermissionCollections{ + Collection: String("Collection1"), + }, + }, + }, + }), helper.CreateAuth(adminKey)) + + defer wg.Done() + }() + + wg.Wait() + require.True(t, (err1 != nil) || (err2 != nil)) // we expect one call to fail + + role := helper.GetRoleByName(t, adminKey, name) + require.NotNil(t, role) + require.Len(t, role.Permissions, 1) + + } +} + +func TestRolesUserExistence(t *testing.T) { + adminKey := "admin-key" + adminUser := "admin-user" + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + compose, err := docker.New().WithWeaviate().WithApiKey().WithUserApiKey(adminUser, adminKey).WithDbUsers(). + WithRBAC().WithRbacRoots(adminUser).Start(ctx) + require.Nil(t, err) + + defer func() { + helper.ResetClient() + require.NoError(t, compose.Terminate(ctx)) + cancel() + }() + helper.SetupClient(compose.GetWeaviate().URI()) + + roleName := "role1" + helper.DeleteRole(t, adminKey, roleName) + helper.CreateRole(t, adminKey, &models.Role{ + Name: &roleName, + Permissions: []*models.Permission{ + { + Action: String(authorization.DeleteCollections), + Collections: &models.PermissionCollections{ + Collection: String("Collection1"), + }, + }, + }, + }) + defer helper.DeleteRole(t, adminKey, roleName) + + t.Run("Cannot assign or revoke to/from OIDC user (not enabled)", func(t *testing.T) { + resp, err := helper.Client(t).Authz.AssignRoleToUser( + authz.NewAssignRoleToUserParams().WithID("random-user").WithBody(authz.AssignRoleToUserBody{Roles: []string{roleName}, UserType: models.UserTypeInputOidc}), + helper.CreateAuth(adminKey), + ) + require.Nil(t, resp) + require.Error(t, err) + + resp2, err := helper.Client(t).Authz.RevokeRoleFromUser( + authz.NewRevokeRoleFromUserParams().WithID("random-user").WithBody(authz.RevokeRoleFromUserBody{Roles: []string{roleName}, UserType: models.UserTypeInputOidc}), + helper.CreateAuth(adminKey), + ) + require.Nil(t, resp2) + require.Error(t, err) + }) + + t.Run("Cannot assign or revoke to/from non-existent db user", func(t *testing.T) { + resp, err := helper.Client(t).Authz.AssignRoleToUser( + authz.NewAssignRoleToUserParams().WithID("random-user").WithBody(authz.AssignRoleToUserBody{Roles: []string{roleName}, UserType: models.UserTypeInputDb}), + helper.CreateAuth(adminKey), + ) + require.Nil(t, resp) + require.Error(t, err) + + resp2, err := helper.Client(t).Authz.RevokeRoleFromUser( + authz.NewRevokeRoleFromUserParams().WithID("random-user").WithBody(authz.RevokeRoleFromUserBody{Roles: []string{roleName}, UserType: models.UserTypeInputDb}), + helper.CreateAuth(adminKey), + ) + require.Nil(t, resp2) + require.Error(t, err) + }) + + t.Run("No assignment of root user to oidc when disabled", func(t *testing.T) { + users := helper.GetUserForRolesBoth(t, "root", adminKey) + for _, user := range users { + require.NotEqual(t, *user.UserType, models.UserTypeOutputOidc) + } + }) +} + +func TestGetRolesForUserPermission(t *testing.T) { + adminKey := "admin-key" + adminUser := "admin-user" + + customUser := "custom-user" + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + compose, err := docker.New().WithWeaviate().WithApiKey().WithUserApiKey(adminUser, adminKey).WithUserApiKey(customUser, "a").WithDbUsers(). + WithRBAC().WithRbacRoots(adminUser).Start(ctx) + require.Nil(t, err) + + defer func() { + helper.ResetClient() + require.NoError(t, compose.Terminate(ctx)) + cancel() + }() + helper.SetupClient(compose.GetWeaviate().URI()) + + all := "*" + + userRoleName := "userRole" + helper.DeleteRole(t, adminKey, userRoleName) + defer helper.DeleteRole(t, adminKey, userRoleName) + helper.CreateRole(t, adminKey, &models.Role{ + Name: &userRoleName, + Permissions: []*models.Permission{ + { + Action: String(authorization.ReadUsers), + Users: &models.PermissionUsers{ + Users: &all, + }, + }, + }, + }) + + roleRoleName := "roleRole" + helper.DeleteRole(t, adminKey, roleRoleName) + defer helper.DeleteRole(t, adminKey, roleRoleName) + helper.CreateRole(t, adminKey, &models.Role{ + Name: &roleRoleName, + Permissions: []*models.Permission{ + { + Action: String(authorization.ReadRoles), + Roles: &models.PermissionRoles{ + Role: &all, + }, + }, + }, + }) + + helper.AssignRoleToUser(t, adminKey, userRoleName, customUser) + + userName := "user" + userKey := helper.CreateUser(t, userName, adminKey) + defer helper.DeleteUser(t, userName, adminKey) + + falsep := false + truep := true + t.Run("No permissions", func(t *testing.T) { + _, err := helper.Client(t).Authz.GetRolesForUser(authz.NewGetRolesForUserParams().WithID(customUser).WithUserType(string(models.UserTypeInputDb)), helper.CreateAuth(userKey)) + require.Error(t, err) + }) + + t.Run("With user permission", func(t *testing.T) { + helper.AssignRoleToUser(t, adminKey, userRoleName, userName) + defer helper.RevokeRoleFromUser(t, adminKey, userRoleName, userName) + + // can get role names + resp, err := helper.Client(t).Authz.GetRolesForUser(authz.NewGetRolesForUserParams().WithID(customUser).WithUserType(string(models.UserTypeInputDb)).WithIncludeFullRoles(&falsep), helper.CreateAuth(userKey)) + require.NoError(t, err) + require.NotNil(t, resp) + require.Len(t, resp.Payload, 1) + require.Equal(t, userRoleName, *resp.Payload[0].Name) + require.Nil(t, resp.Payload[0].Permissions) + + // cannot get all roles + _, err = helper.Client(t).Authz.GetRolesForUser(authz.NewGetRolesForUserParams().WithID(customUser).WithUserType(string(models.UserTypeInputDb)).WithIncludeFullRoles(&truep), helper.CreateAuth(userKey)) + require.Error(t, err) + }) + + t.Run("With user and role permission", func(t *testing.T) { + helper.AssignRoleToUser(t, adminKey, userRoleName, userName) + helper.AssignRoleToUser(t, adminKey, roleRoleName, userName) + defer helper.RevokeRoleFromUser(t, adminKey, userRoleName, userName) + defer helper.RevokeRoleFromUser(t, adminKey, roleRoleName, userName) + + // can get role names + resp, err := helper.Client(t).Authz.GetRolesForUser(authz.NewGetRolesForUserParams().WithID(customUser).WithUserType(string(models.UserTypeInputDb)).WithIncludeFullRoles(&falsep), helper.CreateAuth(userKey)) + require.NoError(t, err) + require.NotNil(t, resp) + require.Len(t, resp.Payload, 1) + require.Equal(t, userRoleName, *resp.Payload[0].Name) + require.Nil(t, resp.Payload[0].Permissions) + + // can get all roles + resp, err = helper.Client(t).Authz.GetRolesForUser(authz.NewGetRolesForUserParams().WithID(customUser).WithUserType(string(models.UserTypeInputDb)).WithIncludeFullRoles(&truep), helper.CreateAuth(userKey)) + require.NoError(t, err) + require.NotNil(t, resp) + require.Len(t, resp.Payload, 1) + require.Equal(t, userRoleName, *resp.Payload[0].Name) + require.NotNil(t, resp.Payload[0].Permissions) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/setup.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/setup.go new file mode 100644 index 0000000000000000000000000000000000000000..dd9e8d57bbbe09a05a28e7de8e2f6c18b4ec7e20 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/setup.go @@ -0,0 +1,60 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "context" + "testing" + "time" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" +) + +func composeUp(t *testing.T, admins map[string]string, users map[string]string, viewers map[string]string) (*docker.DockerCompose, func()) { + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + + builder := docker.New().WithWeaviateEnv("AUTOSCHEMA_ENABLED", "false").WithWeaviateWithGRPC().WithRBAC().WithApiKey() + adminUserNames := make([]string, 0, len(admins)) + viewerUserNames := make([]string, 0, len(viewers)) + for userName, key := range admins { + builder = builder.WithUserApiKey(userName, key) + adminUserNames = append(adminUserNames, userName) + } + for userName, key := range viewers { + builder = builder.WithUserApiKey(userName, key) + viewerUserNames = append(viewerUserNames, userName) + } + if len(admins) > 0 { + builder = builder.WithRbacRoots(adminUserNames...) + } + if len(viewers) > 0 { + builder = builder.WithRbacViewers(viewerUserNames...) + } + for userName, key := range users { + builder = builder.WithUserApiKey(userName, key) + } + compose, err := builder.Start(ctx) + require.Nil(t, err) + + helper.SetupClient(compose.GetWeaviate().URI()) + helper.SetupGRPCClient(t, compose.GetWeaviate().GrpcURI()) + + return compose, func() { + helper.ResetClient() + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %v", err) + } + cancel() + } +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/swagger_helper.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/swagger_helper.go new file mode 100644 index 0000000000000000000000000000000000000000..912e7fd78087ea375ed37be983b3b0d77f2bbf96 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/swagger_helper.go @@ -0,0 +1,319 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "encoding/json" + "fmt" + "log" + "math/rand/v2" + "os" + "sort" + "strings" + "text/tabwriter" + + "github.com/go-openapi/loads" + "github.com/go-openapi/spec" + "github.com/go-openapi/strfmt" + "github.com/google/uuid" + + "github.com/weaviate/weaviate/entities/models" + eschema "github.com/weaviate/weaviate/entities/schema" +) + +type endpoint struct { + path string + method string + summery string + validGeneratedBodyData []byte +} + +type collector struct { + endpoints []endpoint + methodEndpoints map[string][]endpoint +} + +func newCollector() (*collector, error) { + c := &collector{ + methodEndpoints: make(map[string][]endpoint), + endpoints: make([]endpoint, 0), + } + endpoints, err := c.collectEndpoints() + if err != nil { + return nil, err + } + c.endpoints = endpoints + return c, nil +} + +func (c *collector) collectEndpoints() ([]endpoint, error) { + document, err := loads.Spec("../../../openapi-specs/schema.json") + if err != nil { + return nil, fmt.Errorf("failed to load Swagger spec: %w", err) + } + + specDoc := document.Spec() + for path, pathItem := range specDoc.Paths.Paths { + methods := map[string]*spec.Operation{ + "GET": pathItem.Get, + "POST": pathItem.Post, + "PUT": pathItem.Put, + "DELETE": pathItem.Delete, + "PATCH": pathItem.Patch, + "HEAD": pathItem.Head, + "OPTIONS": pathItem.Options, + } + + for method, operation := range methods { + if operation == nil { + continue + } + + if !strings.Contains(path, "group") || method != "POST" { + continue + } + + var requestBodyData []byte + for _, param := range operation.Parameters { + if param.In == "body" && param.Schema != nil { + requestBodyData, err = generateValidRequestBody(¶m, specDoc.Definitions) + if err != nil { + return nil, fmt.Errorf("failed to generate request body data: %w", err) + } + } + } + + endpoint := endpoint{ + path: path, + method: method, + summery: operation.Summary, + validGeneratedBodyData: requestBodyData, + } + + c.methodEndpoints[method] = append(c.methodEndpoints[method], endpoint) + c.endpoints = append(c.endpoints, endpoint) + } + } + + // NOTE: Sorting is done to keep the endpoints order deterministic, + // because the default order returned by swagger apis are random + // which can cause trouble say if GET is called after DELETE endpoints. + sort.Slice(c.endpoints, func(i, j int) bool { + if c.endpoints[i].path == c.endpoints[j].path { + return c.endpoints[i].method > c.endpoints[j].method + } + return c.endpoints[i].path < c.endpoints[j].path + }) + + return c.endpoints, nil +} + +func (c *collector) prettyPrint(endpoints ...map[string][]endpoint) { + if len(endpoints) == 0 { + print(c.methodEndpoints) + return + } + + for _, endpointsMap := range endpoints { + print(endpointsMap) + } +} + +func print(endpointsByMethod map[string][]endpoint) { + count := 0 + writer := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', tabwriter.Debug) + for method, endpoints := range endpointsByMethod { + fmt.Fprintf(writer, "\n%s Requests:\n", strings.ToUpper(method)) + fmt.Fprintln(writer, "Path\tMethod\tSummary") + for _, endpoint := range endpoints { + count++ + fmt.Fprintf(writer, "%s\t%s\t%s\n", endpoint.path, endpoint.method, endpoint.summery) + } + } + fmt.Fprintf(writer, "endpoints count: %d\n", count) + writer.Flush() +} + +func (c *collector) allEndpoints() []endpoint { + return c.endpoints +} + +func generateValidRequestBody(param *spec.Parameter, definitions map[string]spec.Schema) ([]byte, error) { + if param.In == "body" && param.Schema != nil { + return generateValidData(param.Schema, definitions) + } + return nil, fmt.Errorf("invalid parameter schema") +} + +func generateValidData(schema *spec.Schema, definitions map[string]spec.Schema) ([]byte, error) { + // needs to be at the top, because it contains a SingleRef + if strings.Contains(schema.Ref.String(), "MultipleRef") { + ref := &models.MultipleRef{ + &models.SingleRef{Beacon: strfmt.URI(fmt.Sprintf("weaviate://localhost/ABC/%s", uuid.New().String()))}, + } + jsonData, err := json.Marshal(ref) + if err != nil { + return nil, fmt.Errorf("failed to marshal mock data: %w", err) + } + return jsonData, nil + } + + if schema.Ref.String() != "" { + ref := schema.Ref.String() + + refSchema, err := resolveReference(ref, definitions) + if err != nil { + log.Printf("Failed to resolve reference: %v", err) + return nil, err + } + + return generateValidData(refSchema, definitions) + } + + if len(schema.Type) == 0 { + return []byte("{}"), nil + } + + var mockData interface{} + switch schema.Type[0] { + case "string": + if len(schema.Enum) > 0 { + mockData = schema.Enum[len(schema.Enum)-1] // important for authZ groups, where only OIDC is supported + } else if schema.Format == "uuid" { + mockData = uuid.New().String() + } else if schema.Format == "date-time" { + mockData = "2017-07-21T17:32:28Z" + } else { + mockData = "ABC" + } + case "integer": + mockData = rand.IntN(100) + case "boolean": + mockData = rand.IntN(2) == 0 + case "array": + var array []interface{} + if schema.Items != nil && schema.Items.Schema != nil { + itemSchema := schema.Items.Schema + if strings.Contains(itemSchema.Ref.String(), "WhereFilter") { + all := "*" + whereFilter := &models.WhereFilter{ + Path: []string{"id"}, + Operator: "Like", + ValueText: &all, + } + array = append(array, whereFilter) + mockData = array + jsonData, err := json.Marshal(mockData) + if err != nil { + return nil, fmt.Errorf("failed to marshal mock data: %w", err) + } + return jsonData, nil + } + + if strings.Contains(itemSchema.Ref.String(), "NestedProperty") { + vTrue := true + vFalse := false + nested := &models.NestedProperty{ + Name: "nested_int", + DataType: eschema.DataTypeInt.PropString(), + IndexFilterable: &vTrue, + IndexSearchable: &vFalse, + Tokenization: "", + } + + array = append(array, nested) + mockData = array + jsonData, err := json.Marshal(mockData) + if err != nil { + return nil, fmt.Errorf("failed to marshal mock data: %w", err) + } + return jsonData, nil + } + + if strings.Contains(itemSchema.Ref.String(), "BatchReference") { + batch := &models.BatchReference{ + From: strfmt.URI(fmt.Sprintf("weaviate://localhost/ABC/%s/ref", uuid.New().String())), + To: strfmt.URI(fmt.Sprintf("weaviate://localhost/ABC/%s", uuid.New().String())), + } + array = append(array, batch) + mockData = array + jsonData, err := json.Marshal(mockData) + if err != nil { + return nil, fmt.Errorf("failed to marshal mock data: %w", err) + } + return jsonData, nil + } + + if itemSchema.Ref.String() != "" { + refSchema, err := resolveReference(itemSchema.Ref.String(), definitions) + if err != nil { + log.Printf("Failed to resolve array item reference: %v", err) + return nil, err + } + itemSchema = refSchema + data, err := generateValidData(itemSchema, definitions) + if err != nil { + return nil, err + } + var dd interface{} + err = json.Unmarshal(data, &dd) + if err != nil { + return nil, err + } + array = append(array, dd) + } + if itemSchema.Type[0] == "string" { + data, err := generateValidData(itemSchema, definitions) + if err != nil { + return nil, err + } + var dd interface{} + err = json.Unmarshal(data, &dd) + if err != nil { + return nil, err + } + array = append(array, dd) + } + } + mockData = array + case "object": + obj := make(map[string]interface{}) + for propName, propSchema := range schema.Properties { + data, err := generateValidData(&propSchema, definitions) + if err != nil { + return nil, err + } + var dd interface{} + err = json.Unmarshal(data, &dd) + if err != nil { + return nil, err + } + obj[propName] = dd + } + mockData = obj + } + + jsonData, err := json.Marshal(mockData) + if err != nil { + return nil, fmt.Errorf("failed to marshal mock data: %w", err) + } + return jsonData, nil +} + +// resolveReference resolves a reference to a schema definition in the Swagger file +func resolveReference(ref string, definitions map[string]spec.Schema) (*spec.Schema, error) { + ref = strings.TrimPrefix(ref, "#/definitions/") + if schema, ok := definitions[ref]; ok { + return &schema, nil + } + return nil, fmt.Errorf("reference %s not found", ref) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/tenants_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/tenants_test.go new file mode 100644 index 0000000000000000000000000000000000000000..fb4e5e0acc85fafa52d9a37197072903675255db --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/tenants_test.go @@ -0,0 +1,290 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "context" + "errors" + "testing" + + "github.com/stretchr/testify/require" + clschema "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestAuthZTenants(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + adminAuth := helper.CreateAuth(adminKey) + + customUser := "custom-user" + customKey := "custom-key" + + readSchemaAction := authorization.ReadCollections + readTenantAction := authorization.ReadTenants + createTenantAction := authorization.CreateTenants + deleteTenantAction := authorization.DeleteTenants + updateTenantAction := authorization.UpdateTenants + + ctx := context.Background() + compose, err := docker.New().WithWeaviateWithGRPC(). + WithApiKey().WithUserApiKey(adminUser, adminKey).WithUserApiKey(customUser, customKey). + WithRBAC().WithRbacRoots(adminUser). + WithBackendFilesystem(). + Start(ctx) + require.NoError(t, err) + defer func() { + require.NoError(t, compose.Terminate(ctx)) + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + helper.SetupGRPCClient(t, compose.GetWeaviate().GrpcURI()) + defer helper.ResetClient() + + className := "AuthzTenantTestClass" + deleteObjectClass(t, className, adminAuth) + c := &models.Class{ + Class: className, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: true}, + } + helper.CreateClassAuth(t, c, adminKey) + defer deleteObjectClass(t, className, adminAuth) + + // user needs to be able to create objects and read the configs + all := "*" + tenant1 := "Tenant1" + tenant2 := "Tenant2" + tenants := []*models.Tenant{{Name: tenant1}, {Name: tenant2}} + tenantNames := []string{tenant1, tenant2} + + createAllTenantsRoleName := "readSchemaAndCreateAllTenants" + createAllTenantsRole := &models.Role{ + Name: &createAllTenantsRoleName, + Permissions: []*models.Permission{ + {Action: &readSchemaAction, Collections: &models.PermissionCollections{Collection: &all}}, + {Action: &createTenantAction, Tenants: &models.PermissionTenants{Collection: &all}}, + }, + } + + createSpecificTenantsRoleName := "readSchemaAndCreateSpecificTenant" + createSpecificTenantsRole := &models.Role{ + Name: &createSpecificTenantsRoleName, + Permissions: []*models.Permission{ + {Action: &readSchemaAction, Collections: &models.PermissionCollections{Collection: &all}}, + {Action: &createTenantAction, Tenants: &models.PermissionTenants{Collection: &all, Tenant: &tenants[0].Name}}, + }, + } + + readAllTenantsRoleName := "readSchemaAndReadAllTenants" + readAllTenantsRole := &models.Role{ + Name: &readAllTenantsRoleName, + Permissions: []*models.Permission{ + {Action: &readSchemaAction, Collections: &models.PermissionCollections{Collection: &all}}, + {Action: &readTenantAction, Tenants: &models.PermissionTenants{Collection: &all}}, + }, + } + + readSpecificTenantRoleName := "readSchemaAndReadSpecificTenant" + readSpecificTenantRole := &models.Role{ + Name: &readSpecificTenantRoleName, + Permissions: []*models.Permission{ + {Action: &readSchemaAction, Collections: &models.PermissionCollections{Collection: &all}}, + {Action: &readTenantAction, Tenants: &models.PermissionTenants{Collection: &all, Tenant: &tenants[0].Name}}, + }, + } + + deleteSpecificTenantRoleName := "readSchemaAndDeleteSpecificTenant" + deleteSpecificTenantRole := &models.Role{ + Name: &deleteSpecificTenantRoleName, + Permissions: []*models.Permission{ + {Action: &readSchemaAction, Collections: &models.PermissionCollections{Collection: &all}}, + {Action: &deleteTenantAction, Tenants: &models.PermissionTenants{Collection: &all, Tenant: &tenants[0].Name}}, + }, + } + + updateSpecificTenantRoleName := "readSchemaAndUpdateTenant" + updateSpecificTenantRole := &models.Role{ + Name: &updateSpecificTenantRoleName, + Permissions: []*models.Permission{ + {Action: &readSchemaAction, Collections: &models.PermissionCollections{Collection: &all}}, + {Action: &updateTenantAction, Tenants: &models.PermissionTenants{Collection: &all, Tenant: &tenants[0].Name}}, + }, + } + + roles := []*models.Role{ + createAllTenantsRole, + createSpecificTenantsRole, + readAllTenantsRole, + readSpecificTenantRole, + deleteSpecificTenantRole, + updateSpecificTenantRole, + } + + for _, role := range roles { + helper.DeleteRole(t, adminKey, *role.Name) + helper.CreateRole(t, adminKey, role) + } + defer func() { + for _, role := range roles { + helper.DeleteRole(t, adminKey, *role.Name) + } + }() + + t.Run("Create all tenants", func(t *testing.T) { + helper.AssignRoleToUser(t, adminKey, *createAllTenantsRole.Name, customUser) + defer helper.RevokeRoleFromUser(t, adminKey, *createAllTenantsRole.Name, customUser) + require.NoError(t, createTenant(t, className, tenants, customKey)) + require.NoError(t, deleteTenant(t, className, tenantNames, adminKey)) + }) + + t.Run("Create specific tenant with needed permission", func(t *testing.T) { + helper.AssignRoleToUser(t, adminKey, *createSpecificTenantsRole.Name, customUser) + defer helper.RevokeRoleFromUser(t, adminKey, *createSpecificTenantsRole.Name, customUser) + require.NoError(t, createTenant(t, className, []*models.Tenant{{Name: tenant1}}, customKey)) + require.NoError(t, deleteTenant(t, className, []string{tenant1}, adminKey)) + }) + + t.Run("Fail to create specific tenant without needed permission", func(t *testing.T) { + helper.AssignRoleToUser(t, adminKey, *createSpecificTenantsRole.Name, customUser) + defer helper.RevokeRoleFromUser(t, adminKey, *createSpecificTenantsRole.Name, customUser) + err := createTenant(t, className, []*models.Tenant{{Name: tenant2}}, customKey) + require.NotNil(t, err) + var forbidden *clschema.TenantsCreateForbidden + require.True(t, errors.As(err, &forbidden)) + }) + + setupRUDTests := func(role string) func() { + require.NoError(t, createTenant(t, className, tenants, adminKey)) + helper.AssignRoleToUser(t, adminKey, role, customUser) + return func() { + defer deleteTenant(t, className, tenantNames, adminKey) + defer helper.RevokeRoleFromUser(t, adminKey, role, customUser) + } + } + + t.Run("Tenant read and exist", func(t *testing.T) { + cleanup := setupRUDTests(readAllTenantsRoleName) + defer cleanup() + + require.NoError(t, readTenant(t, className, tenants[0].Name, customKey)) + require.NoError(t, existsTenant(t, className, tenants[0].Name, customKey)) + }) + + t.Run("Read a specific tenant with needed permission", func(t *testing.T) { + cleanup := setupRUDTests(readSpecificTenantRoleName) + defer cleanup() + + require.NoError(t, readTenant(t, className, tenants[0].Name, customKey)) + require.NoError(t, existsTenant(t, className, tenants[0].Name, customKey)) + }) + + t.Run("Fail to read a specific tenant and all tenants without needed permission", func(t *testing.T) { + cleanup := setupRUDTests(readSpecificTenantRoleName) + defer cleanup() + + err := readTenant(t, className, tenants[1].Name, customKey) + require.NotNil(t, err) + var forbiddenGetOne *clschema.TenantsGetOneForbidden + require.True(t, errors.As(err, &forbiddenGetOne)) + + err = existsTenant(t, className, tenants[1].Name, customKey) + var forbiddenExists *clschema.TenantExistsForbidden + require.True(t, errors.As(err, &forbiddenExists)) + + // tests for tenant filtering + res, err := readTenants(t, className, customKey) + require.Nil(t, err) + require.Len(t, res.Payload, 1) + require.Equal(t, tenants[0].Name, res.Payload[0].Name) + }) + + t.Run("Get specific tenant using grpc", func(t *testing.T) { + cleanup := setupRUDTests(readSpecificTenantRoleName) + defer cleanup() + + _, err := readTenantGRPC(t, ctx, className, tenants[0].Name, customKey) + require.Nil(t, err) + }) + + t.Run("Return no tenants via grpc when one is forbidden", func(t *testing.T) { + cleanup := setupRUDTests(readSpecificTenantRoleName) + defer cleanup() + + res, err := readTenantGRPC(t, ctx, className, tenants[1].Name, customKey) + require.Nil(t, err) + require.Len(t, res.Tenants, 0) + }) + + t.Run("Get filtered tenants using rest", func(t *testing.T) { + cleanup := setupRUDTests(readSpecificTenantRoleName) + defer cleanup() + + res, err := readTenants(t, className, customKey) + require.Nil(t, err) + require.Len(t, res.Payload, 1) + require.Equal(t, tenants[0].Name, res.Payload[0].Name) + }) + + t.Run("Get filtered tenants using grpc", func(t *testing.T) { + cleanup := setupRUDTests(readSpecificTenantRoleName) + defer cleanup() + + res, err := readTenantsGRPC(t, ctx, className, customKey) + require.Nil(t, err) + require.Len(t, res.Tenants, 1) + require.Equal(t, tenants[0].Name, res.Tenants[0].Name) + }) + + t.Run("Delete specific tenant with needed permission", func(t *testing.T) { + cleanup := setupRUDTests(deleteSpecificTenantRoleName) + defer cleanup() + + require.NoError(t, deleteTenant(t, className, []string{tenants[0].Name}, customKey)) + }) + + t.Run("Fail to delete specific tenant without needed permission", func(t *testing.T) { + cleanup := setupRUDTests(deleteSpecificTenantRoleName) + defer cleanup() + + err := deleteTenant(t, className, []string{tenants[1].Name}, customKey) + require.NotNil(t, err) + var forbidden *clschema.TenantsDeleteForbidden + require.True(t, errors.As(err, &forbidden)) + }) + + t.Run("Update specific tenant status with needed permission", func(t *testing.T) { + cleanup := setupRUDTests(updateSpecificTenantRoleName) + defer cleanup() + + require.NoError(t, updateTenantStatus(t, className, []*models.Tenant{{Name: tenants[0].Name, ActivityStatus: "INACTIVE"}}, customKey)) + }) + + t.Run("Fail to update specific tenant status without needed permission", func(t *testing.T) { + cleanup := setupRUDTests(updateSpecificTenantRoleName) + defer cleanup() + + err := updateTenantStatus(t, className, []*models.Tenant{{Name: tenants[1].Name, ActivityStatus: "INACTIVE"}}, customKey) + require.NotNil(t, err) + var forbidden *clschema.TenantsUpdateForbidden + require.True(t, errors.As(err, &forbidden)) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/authz/users_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/users_test.go new file mode 100644 index 0000000000000000000000000000000000000000..30b2fd140b4887aa1bf797eca9872930df6cf6b1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/authz/users_test.go @@ -0,0 +1,897 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authz + +import ( + "context" + "errors" + "fmt" + "strconv" + "strings" + "testing" + "time" + + "github.com/weaviate/weaviate/client/meta" + + "github.com/go-openapi/strfmt" + + "github.com/weaviate/weaviate/test/docker" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/client/users" + + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/client/authz" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestAuthzRolesForUsers(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + + customUser := "custom-user" + customKey := "custom-key" + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey}, nil) + defer down() + + t.Run("all roles", func(t *testing.T) { + roles := helper.GetRoles(t, adminKey) + require.Equal(t, NumBuildInRoles, len(roles)) + }) + + t.Run("role exists for admin", func(t *testing.T) { + roles := helper.GetRolesForUser(t, adminUser, adminKey, false) + require.Equal(t, 1, len(roles)) + }) + + t.Run("get empty roles for existing user without role", func(t *testing.T) { + roles := helper.GetRolesForUser(t, customUser, adminKey, false) + require.Equal(t, 0, len(roles)) + }) + + t.Run("get roles for non existing user", func(t *testing.T) { + _, err := helper.Client(t).Authz.GetRolesForUser(authz.NewGetRolesForUserParams().WithID("notExists").WithUserType(string(models.UserTypeInputDb)), helper.CreateAuth(adminKey)) + require.NotNil(t, err) + var targetErr *authz.GetRolesForUserNotFound + require.True(t, errors.As(err, &targetErr)) + require.Equal(t, 404, targetErr.Code()) + }) +} + +func TestAuthzRolesAndUserHaveTheSameName(t *testing.T) { + adminUser := "admin" + adminKey := "admin" + similar := "similarRoleKeyUserName" + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{similar: similar}, nil) + defer down() + + t.Run("create role with the same name of the user", func(t *testing.T) { + helper.CreateRole(t, adminKey, &models.Role{ + Name: String(similar), + Permissions: []*models.Permission{ + {Action: String(authorization.CreateCollections), Collections: &models.PermissionCollections{Collection: String("*")}}, + }, + }) + }) + + t.Run("assign role to user", func(t *testing.T) { + helper.AssignRoleToUser(t, adminKey, similar, similar) + }) + + t.Run("get role and user were they have the same name", func(t *testing.T) { + role := helper.GetRoleByName(t, adminKey, similar) + require.NotNil(t, role) + require.Equal(t, similar, *role.Name) + require.Len(t, role.Permissions, 1) + require.Equal(t, authorization.CreateCollections, *role.Permissions[0].Action) + require.Equal(t, "*", *role.Permissions[0].Collections.Collection) + + roles := helper.GetRolesForUser(t, similar, adminKey, true) + require.Equal(t, 1, len(roles)) + require.NotNil(t, role) + require.Equal(t, similar, *role.Name) + require.Len(t, role.Permissions, 1) + require.Equal(t, authorization.CreateCollections, *role.Permissions[0].Action) + require.Equal(t, "*", *role.Permissions[0].Collections.Collection) + }) +} + +func TestUserPermissions(t *testing.T) { + // adminUser := "admin-user" + adminKey := "admin-key" + + customUser := "custom-user" + customKey := "custom-key" + + //_, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey}, nil) + //defer down() + + helper.SetupClient("127.0.0.1:8081") + + // create roles for later + assignUserAction := authorization.AssignAndRevokeUsers + readCollectionAction := authorization.ReadCollections + readRolesAction := authorization.ReadRoles + + all := "*" + roleNameUpdate := "userRoleCreate" + otherRoleName := "collectionRead" + roleNameReadRoles := "roleRead" + + userUpdateRole := &models.Role{ + Name: &roleNameUpdate, + Permissions: []*models.Permission{{ + Action: &assignUserAction, + Users: &models.PermissionUsers{Users: &all}, + }}, + } + roleReadRole := &models.Role{ + Name: &roleNameReadRoles, + Permissions: []*models.Permission{{ + Action: &readRolesAction, + Roles: &models.PermissionRoles{Role: &all, Scope: String(models.PermissionRolesScopeAll)}, + }}, + } + otherRole := &models.Role{ + Name: &otherRoleName, + Permissions: []*models.Permission{{ + Action: &readCollectionAction, + Users: &models.PermissionUsers{Users: &all}, + }}, + } + helper.DeleteRole(t, adminKey, roleNameUpdate) + helper.DeleteRole(t, adminKey, otherRoleName) + helper.DeleteRole(t, adminKey, roleNameReadRoles) + helper.CreateRole(t, adminKey, userUpdateRole) + helper.CreateRole(t, adminKey, otherRole) + helper.CreateRole(t, adminKey, roleReadRole) + + t.Run("test returns", func(t *testing.T) { + role := helper.GetRoleByName(t, adminKey, roleNameUpdate) + require.NotNil(t, role) + require.Len(t, role.Permissions, 1) + require.Equal(t, role.Permissions[0].Users.Users, &all) + }) + + t.Run("assign users", func(t *testing.T) { + _, err := helper.Client(t).Authz.AssignRoleToUser( + authz.NewAssignRoleToUserParams().WithID(customUser).WithBody(authz.AssignRoleToUserBody{Roles: []string{otherRoleName}}), + helper.CreateAuth(customKey), + ) + require.Error(t, err) + var errType *authz.AssignRoleToUserForbidden + require.True(t, errors.As(err, &errType)) + + helper.AssignRoleToUser(t, adminKey, roleNameUpdate, customUser) + helper.AssignRoleToUser(t, adminKey, roleNameReadRoles, customUser) + + // assigning works after user has appropriate rights + helper.AssignRoleToUser(t, customKey, otherRoleName, customUser) + + // clean up + helper.RevokeRoleFromUser(t, adminKey, roleNameUpdate, customUser) + helper.RevokeRoleFromUser(t, adminKey, roleNameReadRoles, customUser) + helper.RevokeRoleFromUser(t, adminKey, otherRoleName, customUser) + }) + + t.Run("revoke users", func(t *testing.T) { + helper.AssignRoleToUser(t, adminKey, otherRoleName, customUser) + + _, err := helper.Client(t).Authz.RevokeRoleFromUser( + authz.NewRevokeRoleFromUserParams().WithID(customUser).WithBody(authz.RevokeRoleFromUserBody{Roles: []string{otherRoleName}}), + helper.CreateAuth(customKey), + ) + require.Error(t, err) + var errType *authz.RevokeRoleFromUserForbidden + require.True(t, errors.As(err, &errType)) + + helper.AssignRoleToUser(t, adminKey, roleNameUpdate, customUser) + helper.AssignRoleToUser(t, adminKey, roleNameReadRoles, customUser) + + // revoking works after user has appropriate rights + roles := helper.GetRolesForUser(t, customUser, customKey, true) + require.Len(t, roles, 3) + helper.RevokeRoleFromUser(t, customKey, otherRoleName, customUser) + roles = helper.GetRolesForUser(t, customUser, customKey, true) + require.Len(t, roles, 2) + + helper.RevokeRoleFromUser(t, adminKey, roleNameUpdate, customUser) + helper.RevokeRoleFromUser(t, adminKey, roleNameReadRoles, customUser) + }) +} + +func TestReadUserPermissions(t *testing.T) { + // adminUser := "admin-user" + adminKey := "admin-key" + + customUser := "custom-user" + customKey := "custom-key" + + secondUser := "viewer-user" + secondKey := "viewer-key" + + //_, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{customUser: customKey, secondUser: secondKey}, nil) + //defer down() + + helper.SetupClient("127.0.0.1:8081") + + // create roles for later + readUserAction := authorization.ReadUsers + readRolesAction := authorization.ReadRoles + + all := "*" + roleNameReadUsers := "userRead" + otherRoleName := "otherName" + roleNameReadRoles := "rolesRead" + + userReadRole := &models.Role{ + Name: &roleNameReadUsers, + Permissions: []*models.Permission{{ + Action: &readUserAction, + Users: &models.PermissionUsers{Users: &secondUser}, + }}, + } + roleReadRole := &models.Role{ + Name: &roleNameReadRoles, + Permissions: []*models.Permission{{ + Action: &readRolesAction, + Roles: &models.PermissionRoles{Role: &all, Scope: String(models.PermissionRolesScopeAll)}, + }}, + } + + otherRole := &models.Role{ + Name: &otherRoleName, + Permissions: []*models.Permission{{ + Action: &readUserAction, + Users: &models.PermissionUsers{Users: &all}, + }}, + } + + helper.DeleteRole(t, adminKey, roleNameReadUsers) + helper.CreateRole(t, adminKey, userReadRole) + helper.DeleteRole(t, adminKey, roleNameReadRoles) + helper.CreateRole(t, adminKey, roleReadRole) + helper.DeleteRole(t, adminKey, otherRoleName) + helper.CreateRole(t, adminKey, otherRole) + helper.AssignRoleToUser(t, adminKey, otherRoleName, secondUser) + + t.Run("admin can return roles", func(t *testing.T) { + roles := helper.GetRolesForUser(t, secondUser, adminKey, true) + require.NotNil(t, roles) + require.Len(t, roles, 1) + }) + + t.Run("user can return roles for themselves", func(t *testing.T) { + roles := helper.GetRolesForUser(t, secondUser, secondKey, true) + require.NotNil(t, roles) + require.Len(t, roles, 1) + }) + + t.Run("user cannot return roles for other user", func(t *testing.T) { + _, err := helper.Client(t).Authz.GetRolesForUser(authz.NewGetRolesForUserParams().WithID(secondUser).WithUserType(string(models.UserTypeInputDb)), helper.CreateAuth(customKey)) + require.Error(t, err) + var errType *authz.GetRolesForUserForbidden + require.True(t, errors.As(err, &errType)) + }) + + t.Run("add permission", func(t *testing.T) { + helper.AssignRoleToUser(t, adminKey, roleNameReadUsers, customUser) + helper.AssignRoleToUser(t, adminKey, roleNameReadRoles, customUser) + roles := helper.GetRolesForUser(t, secondUser, customKey, false) + require.NotNil(t, roles) + require.Len(t, roles, 1) + + helper.RevokeRoleFromUser(t, adminKey, roleNameReadUsers, customUser) + helper.RevokeRoleFromUser(t, adminKey, roleNameReadRoles, customUser) + }) + + t.Run("check returns", func(t *testing.T) { + helper.RevokeRoleFromUser(t, adminKey, roleNameReadUsers, customUser) + helper.AssignRoleToUser(t, adminKey, roleNameReadUsers, customUser) + roles := helper.GetRolesForUser(t, customUser, customKey, true) + require.NotNil(t, roles) + require.Len(t, roles, 1) + require.Len(t, roles[0].Permissions, 1) + + require.Equal(t, secondUser, *roles[0].Permissions[0].Users.Users) + require.Equal(t, readUserAction, *roles[0].Permissions[0].Action) + + helper.RevokeRoleFromUser(t, adminKey, roleNameReadUsers, customUser) + }) +} + +func TestUserEndpoint(t *testing.T) { + adminKey := "admin-key" + adminUser := "admin-user" + customUser := "custom-user" + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + compose, err := docker.New().WithWeaviate().WithApiKey().WithUserApiKey(adminUser, adminKey).WithUserApiKey(customUser, "customKey").WithDbUsers(). + WithRBAC().WithRbacRoots(adminUser).Start(ctx) + require.Nil(t, err) + + defer func() { + helper.ResetClient() + require.NoError(t, compose.Terminate(ctx)) + cancel() + }() + helper.SetupClient(compose.GetWeaviate().URI()) + + testUser := "test-user" + helper.DeleteUser(t, testUser, adminKey) + testKey := helper.CreateUser(t, testUser, adminKey) + + // create roles for later + readUserAction := authorization.ReadUsers + createUsersAction := authorization.CreateUsers + updateUsersAction := authorization.UpdateUsers + deleteUsersAction := authorization.DeleteUsers + + all := "*" + readUserRoleName := "userRead" + createUserRoleName := "userCreate" + updateUserRoleName := "userUpdate" + deleteUserRoleName := "userDel" + + createUserRole := &models.Role{ + Name: &createUserRoleName, + Permissions: []*models.Permission{{ + Action: &createUsersAction, + Users: &models.PermissionUsers{Users: &all}, + }}, + } + readUserRole := &models.Role{ + Name: &readUserRoleName, + Permissions: []*models.Permission{{ + Action: &readUserAction, + Users: &models.PermissionUsers{Users: &all}, + }}, + } + updateUserRole := &models.Role{ + Name: &updateUserRoleName, + Permissions: []*models.Permission{{ + Action: &updateUsersAction, + Users: &models.PermissionUsers{Users: &all}, + }}, + } + deleteUserRole := &models.Role{ + Name: &deleteUserRoleName, + Permissions: []*models.Permission{{ + Action: &deleteUsersAction, + Users: &models.PermissionUsers{Users: &all}, + }}, + } + + roles := []*models.Role{deleteUserRole, createUserRole, updateUserRole, readUserRole} + for _, role := range roles { + helper.DeleteRole(t, adminKey, *role.Name) + helper.CreateRole(t, adminKey, role) + } + defer func() { + for _, role := range roles { + helper.DeleteRole(t, adminKey, *role.Name) + } + }() + + t.Run("Create User", func(t *testing.T) { + otherTestUser := "otherTestUser" + defer helper.DeleteUser(t, otherTestUser, adminKey) + _, err := helper.Client(t).Users.CreateUser(users.NewCreateUserParams().WithUserID(otherTestUser), helper.CreateAuth(testKey)) + require.Error(t, err) + var createUserForbidden *users.CreateUserForbidden + ok := errors.As(err, &createUserForbidden) + assert.True(t, ok) + + helper.AssignRoleToUser(t, adminKey, createUserRoleName, testUser) + defer helper.RevokeRoleFromUser(t, adminKey, createUserRoleName, testUser) + + otherTestUserApiKey := helper.CreateUser(t, otherTestUser, testKey) + require.Greater(t, len(otherTestUserApiKey), 10) + }) + + t.Run("Read User", func(t *testing.T) { + start := time.Now() + otherTestUserName := "otherTestUser" + helper.DeleteUser(t, otherTestUserName, adminKey) + defer helper.DeleteUser(t, otherTestUserName, adminKey) + helper.CreateUser(t, otherTestUserName, adminKey) + + _, err := helper.Client(t).Users.GetUserInfo(users.NewGetUserInfoParams().WithUserID(otherTestUserName), helper.CreateAuth(testKey)) + require.Error(t, err) + var getUserForbidden *users.GetUserInfoForbidden + ok := errors.As(err, &getUserForbidden) + assert.True(t, ok) + + helper.AssignRoleToUser(t, adminKey, readUserRoleName, testUser) + defer helper.RevokeRoleFromUser(t, adminKey, readUserRoleName, testUser) + + otherTestUser := helper.GetUser(t, otherTestUserName, testKey) + require.Equal(t, *otherTestUser.UserID, otherTestUserName) + require.Less(t, strfmt.DateTime(start), otherTestUser.CreatedAt) + require.Less(t, otherTestUser.CreatedAt, strfmt.DateTime(time.Now())) + }) + + t.Run("Get static user", func(t *testing.T) { + staticUser := helper.GetUser(t, customUser, adminKey) + require.Equal(t, *staticUser.UserID, customUser) + require.Less(t, staticUser.CreatedAt, strfmt.DateTime(time.Now().Add(-1000*time.Hour))) // static user have minimum time + }) + + t.Run("Update (rotate, Deactivate, activate) user", func(t *testing.T) { + otherTestUser := "otherTestUser" + helper.DeleteUser(t, otherTestUser, adminKey) + defer helper.DeleteUser(t, otherTestUser, adminKey) + apiKey := helper.CreateUser(t, otherTestUser, adminKey) + + // rotate, Deactivate and activate are all update + _, err := helper.Client(t).Users.RotateUserAPIKey(users.NewRotateUserAPIKeyParams().WithUserID(otherTestUser), helper.CreateAuth(testKey)) + require.Error(t, err) + var rotateUserForbidden *users.RotateUserAPIKeyForbidden + assert.True(t, errors.As(err, &rotateUserForbidden)) + + _, err = helper.Client(t).Users.DeactivateUser(users.NewDeactivateUserParams().WithUserID(otherTestUser), helper.CreateAuth(testKey)) + require.Error(t, err) + var DeactivateUserForbidden *users.DeactivateUserForbidden + assert.True(t, errors.As(err, &DeactivateUserForbidden)) + + _, err = helper.Client(t).Users.ActivateUser(users.NewActivateUserParams().WithUserID(otherTestUser), helper.CreateAuth(testKey)) + require.Error(t, err) + var activateUserForbidden *users.ActivateUserForbidden + assert.True(t, errors.As(err, &activateUserForbidden)) + + helper.AssignRoleToUser(t, adminKey, updateUserRoleName, testUser) + defer helper.RevokeRoleFromUser(t, adminKey, updateUserRoleName, testUser) + + // with update role all three operations work + otherTestUserApiKey := helper.RotateKey(t, otherTestUser, testKey) + require.Greater(t, len(otherTestUserApiKey), 10) + + // key is not valid anymore + _, err = helper.Client(t).Users.GetOwnInfo(users.NewGetOwnInfoParams(), helper.CreateAuth(apiKey)) + var ownInfoUnauthorized *users.GetOwnInfoUnauthorized + assert.True(t, errors.As(err, &ownInfoUnauthorized)) + + helper.DeactivateUser(t, testKey, otherTestUser, false) + helper.ActivateUser(t, testKey, otherTestUser) + }) + + t.Run("Delete user", func(t *testing.T) { + otherTestUser := "otherTestUser" + helper.DeleteUser(t, otherTestUser, adminKey) + defer helper.DeleteUser(t, otherTestUser, adminKey) + helper.CreateUser(t, otherTestUser, adminKey) + + _, err := helper.Client(t).Users.DeleteUser(users.NewDeleteUserParams().WithUserID(otherTestUser), helper.CreateAuth(testKey)) + require.Error(t, err) + var createUserForbidden *users.DeleteUserForbidden + assert.True(t, errors.As(err, &createUserForbidden)) + + helper.AssignRoleToUser(t, adminKey, deleteUserRoleName, testUser) + defer helper.RevokeRoleFromUser(t, adminKey, deleteUserRoleName, testUser) + + helper.DeleteUser(t, otherTestUser, testKey) + // user does not exist after deleting + resp, err := helper.Client(t).Users.GetUserInfo(users.NewGetUserInfoParams().WithUserID(otherTestUser), helper.CreateAuth(adminKey)) + require.Nil(t, resp) + require.Error(t, err) + var getUserNotFound *users.GetUserInfoNotFound + assert.True(t, errors.As(err, &getUserNotFound)) + }) + + t.Run("delete user revokes roles", func(t *testing.T) { + testUserName := "DeleteUserTestUser" + helper.DeleteUser(t, testUserName, adminKey) + + // create user and assign roles + helper.CreateUser(t, testUserName, adminKey) + helper.AssignRoleToUser(t, adminKey, deleteUserRoleName, testUserName) + rolesRet := helper.GetRolesForUser(t, testUserName, adminKey, false) + require.Len(t, rolesRet, 1) + + // delete user and recreate with same name => role assignment should be gone + helper.DeleteUser(t, testUserName, adminKey) + helper.CreateUser(t, testUserName, adminKey) + rolesRet = helper.GetRolesForUser(t, testUserName, adminKey, false) + require.Len(t, rolesRet, 0) + }) +} + +func TestDynamicUsers(t *testing.T) { + adminKey := "admin-key" + adminUser := "admin-user" + + customUser := "custom-user" + customKey := "custom-key" + + viewerUser := "viewer-user" + viewerKey := "viewer-key" + + // match what is defined in the docker-compose file to allow switching between them + staticUsers := map[string]string{customUser: customKey, viewerUser: viewerKey, "editor-user": "editor-key"} + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + compose, err := docker.New().WithWeaviate(). + WithApiKey().WithUserApiKey(adminUser, adminKey).WithUserApiKey(customUser, customKey).WithUserApiKey(viewerUser, viewerKey).WithUserApiKey("editor-user", "editor-key"). + WithRBAC().WithRbacRoots(adminUser). + WithDbUsers().Start(ctx) + require.Nil(t, err) + + defer func() { + helper.ResetClient() + require.NoError(t, compose.Terminate(ctx)) + cancel() + }() + helper.SetupClient(compose.GetWeaviate().URI()) + + helper.AssignRoleToUser(t, adminKey, "viewer", viewerUser) + t.Run("List all users", func(t *testing.T) { + start := time.Now() + userNames := make([]string, 0, 10) + for i := 0; i < cap(userNames); i++ { + userNames = append(userNames, fmt.Sprintf("user-%d", i)) + } + + apiKeys := make([]string, 0, len(userNames)) + for i, userName := range userNames { + helper.DeleteUser(t, userName, adminKey) + apiKey := helper.CreateUser(t, userName, adminKey) + apiKeys = append(apiKeys, apiKey) + defer helper.DeleteUser(t, userName, adminKey) // runs at end of test function to clear everything + if i%2 == 0 { + helper.AssignRoleToUser(t, adminKey, "viewer", userName) + } + if i%5 == 0 { + helper.DeactivateUser(t, adminKey, userName, false) + } + } + + allUsersAdmin := helper.ListAllUsers(t, adminKey) + require.Len(t, allUsersAdmin, len(userNames)+len(staticUsers)+1) + + for _, user := range allUsersAdmin { + name := *user.UserID + + if *user.DbUserType == models.DBUserInfoDbUserTypeDbEnvUser { + require.Less(t, user.CreatedAt, strfmt.DateTime(start)) // minimum time for static users + continue + } + + number, err := strconv.Atoi(strings.Split(name, "-")[1]) + require.NoError(t, err) + if number%2 == 0 { + require.Len(t, user.Roles, 1) + require.Equal(t, user.Roles[0], "viewer") + } + + require.Equal(t, number%5 != 0, *user.Active) + require.Less(t, strfmt.DateTime(start), user.CreatedAt) + require.Less(t, user.CreatedAt, strfmt.DateTime(time.Now())) + require.Len(t, user.APIKeyFirstLetters, 3) + require.Equal(t, user.APIKeyFirstLetters, apiKeys[number][:3]) + } + + allUsersViewer := helper.ListAllUsers(t, viewerKey) + require.Len(t, allUsersViewer, len(userNames)) + }) + + t.Run("List all users using non-admin", func(t *testing.T) { + userNames := make([]string, 0, 10) + for i := 0; i < cap(userNames); i++ { + userNames = append(userNames, fmt.Sprintf("user-%d", i)) + } + + for i, userName := range userNames { + helper.DeleteUser(t, userName, adminKey) + helper.CreateUser(t, userName, adminKey) + defer helper.DeleteUser(t, userName, adminKey) // runs at end of test function to clear everything + if i%2 == 0 { + helper.AssignRoleToUser(t, adminKey, "viewer", userName) + } + if i%5 == 0 { + helper.DeactivateUser(t, adminKey, userName, false) + } + } + + allUsers := helper.ListAllUsers(t, adminKey) + require.Len(t, allUsers, len(userNames)+len(staticUsers)+1) + + for _, user := range allUsers { + name := *user.UserID + + if *user.DbUserType == models.DBUserInfoDbUserTypeDbEnvUser { + continue + } + + number, err := strconv.Atoi(strings.Split(name, "-")[1]) + require.NoError(t, err) + if number%2 == 0 { + require.Len(t, user.Roles, 1) + require.Equal(t, user.Roles[0], "viewer") + } + + require.Equal(t, number%5 != 0, *user.Active) + } + }) + + t.Run("filtered list users", func(t *testing.T) { + length := 10 + userNames := make([]string, 0, length) + for i := 0; i < length; i++ { + var userName string + if i%2 == 0 { + userName = fmt.Sprintf("finance-user-%d", i) + } else { + userName = fmt.Sprintf("sales-user-%d", i) + } + + userNames = append(userNames, userName) + } + for _, userName := range userNames { + helper.DeleteUser(t, userName, adminKey) + helper.CreateUser(t, userName, adminKey) + defer helper.DeleteUser(t, userName, adminKey) // runs at end of test function to clear everything + } + + // create role that can only view finance users + readUserAction := authorization.ReadUsers + + finance := "finance-*" + readUserRoleName := "userRead" + + readUserRole := &models.Role{ + Name: &readUserRoleName, + Permissions: []*models.Permission{{ + Action: &readUserAction, + Users: &models.PermissionUsers{Users: &finance}, + }}, + } + financeUserViewer := "test-finance-user-viewer" + helper.DeleteUser(t, financeUserViewer, adminKey) + apiKey := helper.CreateUser(t, financeUserViewer, adminKey) + defer helper.DeleteUser(t, financeUserViewer, adminKey) + helper.DeleteRole(t, adminKey, readUserRoleName) + helper.CreateRole(t, adminKey, readUserRole) + defer helper.DeleteRole(t, adminKey, readUserRoleName) + + helper.AssignRoleToUser(t, adminKey, readUserRoleName, financeUserViewer) + + filteredUsers := helper.ListAllUsers(t, apiKey) + require.Len(t, filteredUsers, length/2) + }) + + t.Run("import static user and check roles", func(t *testing.T) { + // add a role to ensure it is present after import + roleName := "testRole" + testRole := &models.Role{Name: &roleName, Permissions: []*models.Permission{{Action: &authorization.ReadUsers, Users: &models.PermissionUsers{Users: &roleName}}}} + helper.DeleteRole(t, adminKey, roleName) + helper.CreateRole(t, adminKey, testRole) + defer helper.DeleteRole(t, adminKey, roleName) + helper.AssignRoleToUser(t, adminKey, roleName, customUser) + roles := helper.GetRolesForUser(t, customUser, adminKey, false) + require.Len(t, roles, 1) + require.Equal(t, *testRole.Name, *roles[0].Name) + + oldKey := helper.CreateUserWithApiKey(t, customUser, adminKey, nil) + require.Equal(t, oldKey, customKey) + + info := helper.GetInfoForOwnUser(t, oldKey) + require.Equal(t, customUser, *info.Username) + + rolesAfterImport := helper.GetRolesForUser(t, customUser, adminKey, false) + require.Len(t, rolesAfterImport, 1) + require.Equal(t, *testRole.Name, *rolesAfterImport[0].Name) + + helper.DeleteUser(t, customUser, adminKey) + }) +} + +func TestUserPermissionReturns(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + all := "*" + + _, down := composeUp(t, map[string]string{adminUser: adminKey}, map[string]string{}, nil) + defer down() + + roleName := "testingUserPermissionReturns" + defer helper.DeleteRole(t, adminKey, roleName) + for _, action := range []string{authorization.ReadUsers, authorization.CreateUsers, authorization.UpdateUsers, authorization.DeleteUsers, authorization.AssignAndRevokeUsers} { + helper.DeleteRole(t, adminKey, roleName) + + role := &models.Role{ + Name: &roleName, + Permissions: []*models.Permission{{ + Action: &action, + Users: &models.PermissionUsers{Users: &all}, + }}, + } + + helper.CreateRole(t, adminKey, role) + roleRet := helper.GetRoleByName(t, adminKey, roleName) + require.NotNil(t, roleRet) + require.Equal(t, *roleRet.Permissions[0].Users.Users, all) + require.Equal(t, *roleRet.Permissions[0].Action, action) + } +} + +func TestGetLastUsageMultinode(t *testing.T) { + adminUser := "admin-user" + adminKey := "admin-key" + ctx := context.Background() + compose, err := docker.New(). + With3NodeCluster().WithApiKey().WithUserApiKey(adminUser, adminKey).WithDbUsers(). + Start(ctx) + + require.NoError(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %s", err.Error()) + } + }() + + t.Run("get last usage multinode", func(t *testing.T) { + helper.SetupClient(compose.GetWeaviate().URI()) + + dynUser := "dyn-user" + helper.DeleteUser(t, dynUser, adminKey) + defer helper.DeleteUser(t, dynUser, adminKey) + apiKey := helper.CreateUser(t, dynUser, adminKey) + + time.Sleep(time.Millisecond * 100) // sometimes takes a little bit until a user has been propagated to all nodes + before := time.Now() + + info := helper.GetInfoForOwnUser(t, apiKey) + require.Equal(t, *info.Username, dynUser) + + user := helper.GetUserWithLastUsedTime(t, dynUser, adminKey, true) + require.Equal(t, *user.UserID, dynUser) + require.Less(t, before, user.LastUsedAt) + require.Less(t, user.LastUsedAt, time.Now()) + + lastLoginTime := user.LastUsedAt + // make request to other node and check that login time has been update in first node + helper.SetupClient(compose.GetWeaviateNode2().URI()) + + require.Equal(t, *helper.GetInfoForOwnUser(t, apiKey).Username, dynUser) + + helper.SetupClient(compose.GetWeaviateNode3().URI()) + + user = helper.GetUserWithLastUsedTime(t, dynUser, adminKey, true) + require.Equal(t, *user.UserID, dynUser) + require.Less(t, lastLoginTime, user.LastUsedAt) + require.Less(t, user.LastUsedAt, time.Now()) + + allUsers := helper.ListAllUsersWithIncludeTime(t, adminKey, true) + for _, user := range allUsers { + if *user.UserID != dynUser { + continue + } + require.Less(t, lastLoginTime, user.LastUsedAt) + require.Less(t, user.LastUsedAt, time.Now()) + } + }) + + t.Run("last usage with shutdowns", func(t *testing.T) { + firstNode := compose.GetWeaviateNode(2) + secondNode := compose.GetWeaviateNode(1) + helper.SetupClient(firstNode.URI()) + + dynUser := "dyn-user" + helper.DeleteUser(t, dynUser, adminKey) + defer helper.DeleteUser(t, dynUser, adminKey) + apiKey := helper.CreateUser(t, dynUser, adminKey) + + time.Sleep(time.Millisecond * 100) // sometimes takes a little bit until a user has been propagated to all nodes + before := time.Now() + + info := helper.GetInfoForOwnUser(t, apiKey) + require.Equal(t, *info.Username, dynUser) + + user := helper.GetUserWithLastUsedTime(t, dynUser, adminKey, true) + require.Equal(t, *user.UserID, dynUser) + require.Less(t, before, user.LastUsedAt) + require.Less(t, user.LastUsedAt, time.Now()) + + // shutdown node, its login time should be transferred to other nodes + timeout := time.Minute + err := firstNode.Container().Stop(ctx, &timeout) + require.NoError(t, err) + + // wait to make sure that node is gone + start := time.Now() + for time.Since(start) < timeout { + _, err = helper.Client(t).Meta.MetaGet(meta.NewMetaGetParams(), nil) + if err != nil { + break + } + time.Sleep(time.Second) + } + time.Sleep(time.Second * 5) // wait to make sure that node is gone + _, err = helper.Client(t).Meta.MetaGet(meta.NewMetaGetParams(), nil) + require.Error(t, err) + + helper.ResetClient() + helper.SetupClient(secondNode.URI()) + + userNode2 := helper.GetUserWithLastUsedTime(t, dynUser, adminKey, true) + require.Equal(t, *user.UserID, dynUser) + require.Less(t, before, userNode2.LastUsedAt) + require.Less(t, userNode2.LastUsedAt, time.Now()) + require.Equal(t, userNode2.LastUsedAt, user.LastUsedAt) + + allUsers := helper.ListAllUsersWithIncludeTime(t, adminKey, true) + for _, user := range allUsers { + if *user.UserID != dynUser { + continue + } + require.Less(t, user.LastUsedAt, time.Now()) + require.Equal(t, user.LastUsedAt, userNode2.LastUsedAt) + } + }) +} + +func TestStaticUserImport(t *testing.T) { + rootKey := "root-key" + rootUser := "root-user" + + readOnlyUser := "readOnly-user" + readOnlyKey := "readOnly-key" + + adminUser := "admin-user" + adminKey := "admin-key" + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + compose, err := docker.New().WithWeaviate(). + WithApiKey().WithUserApiKey(rootUser, rootKey).WithUserApiKey(readOnlyUser, readOnlyKey).WithUserApiKey(adminUser, adminKey). + WithRBAC().WithRbacRoots(rootUser). + WithWeaviateEnv("EXPERIMENTAL_AUTHORIZATION_RBAC_READONLY_USERS", "readOnly-user"). + WithWeaviateEnv("EXPERIMENTAL_AUTHORIZATION_RBAC_ADMIN_USERS", "admin-user"). + WithDbUsers().Start(ctx) + require.Nil(t, err) + + defer func() { + helper.ResetClient() + require.NoError(t, compose.Terminate(ctx)) + cancel() + }() + helper.SetupClient(compose.GetWeaviate().URI()) + + keys := map[string]string{readOnlyUser: readOnlyKey, adminUser: adminKey} + + for userName, role := range map[string]string{readOnlyUser: "viewer", adminUser: "admin"} { + t.Run("import static user and check roles for "+userName, func(t *testing.T) { + roles := helper.GetRolesForUser(t, userName, rootKey, false) + require.Len(t, roles, 1) + require.Equal(t, role, *roles[0].Name) + + oldKey := helper.CreateUserWithApiKey(t, userName, rootKey, nil) + require.Equal(t, oldKey, keys[userName]) + + newKey := helper.RotateKey(t, userName, rootKey) + _, err := helper.Client(t).Users.GetOwnInfo(users.NewGetOwnInfoParams(), helper.CreateAuth(oldKey)) + require.Error(t, err) + + user := helper.GetUser(t, userName, rootKey) + require.Equal(t, user.APIKeyFirstLetters, newKey[:3]) + require.NotEqual(t, newKey, oldKey) + + info := helper.GetInfoForOwnUser(t, newKey) + require.Equal(t, userName, *info.Username) + require.Len(t, info.Roles, 1) + require.Equal(t, *info.Roles[0].Name, role) + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/batch_request_endpoints/actionscreate_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/batch_request_endpoints/actionscreate_test.go new file mode 100644 index 0000000000000000000000000000000000000000..47d976a843acf5c696c1b88ea4af14733ce4a1d2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/batch_request_endpoints/actionscreate_test.go @@ -0,0 +1,79 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +// TODO: change this test to simulate a successful query response when the test dataset is implemented. + +// Acceptance tests for the batch ObjectsCreate endpoint +package batch_request_endpoints + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/client/batch" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +// Test if batching is working correctly. Sends an OK batch containing two batched requests that refer to non-existing classes. +// The expected outcome is a 200 batch response containing two batched responses. These batched responses should both contain errors. +func TestBatchObjectsCreateResultsOrder(t *testing.T) { + t.Parallel() + + classOneName := "ItIsExtremelyUnlikelyThatThisClassActuallyExistsButJustToBeSureHereAreSomeRandomNumbers12987825624398509861298409782539802434516542" + classTwoName := "ItIsExtremelyUnlikelyThatThisClassActuallyExistsButJustToBeSureHereAreSomeRandomNumbers12987825624398509861298409782539802434516541" + expectedResult := "class '%s' not present in schema" + + // generate objectcreate content + object1 := &models.Object{ + Class: classOneName, + Properties: map[string]interface{}{ + "testString": "Test string", + }, + } + object2 := &models.Object{ + Class: classTwoName, + Properties: map[string]interface{}{ + "testWholeNumber": 1, + }, + } + + testFields := "ALL" + + // generate request body + params := batch.NewBatchObjectsCreateParams().WithBody(batch.BatchObjectsCreateBody{ + Objects: []*models.Object{object1, object2}, + Fields: []*string{&testFields}, + }) + + // perform the request + resp, err := helper.BatchClient(t).BatchObjectsCreate(params, nil) + // ensure that the response is OK + helper.AssertRequestOk(t, resp, err, func() { + objectsCreateResponse := resp.Payload + + // check if the batch response contains two batched responses + assert.Equal(t, 2, len(objectsCreateResponse)) + + // check if the error message matches the expected outcome (and are therefore returned in the correct order) + if len(objectsCreateResponse) == 2 { + responseOne := objectsCreateResponse[0].Result.Errors.Error[0].Message + responseTwo := objectsCreateResponse[1].Result.Errors.Error[0].Message + + fullExpectedOutcomeOne := fmt.Sprintf(expectedResult, classOneName) + assert.Contains(t, responseOne, fullExpectedOutcomeOne) + + fullExpectedOutcomeTwo := fmt.Sprintf(expectedResult, classTwoName) + assert.Contains(t, responseTwo, fullExpectedOutcomeTwo) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/batch_request_endpoints/batch_delete_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/batch_request_endpoints/batch_delete_test.go new file mode 100644 index 0000000000000000000000000000000000000000..45ace6f1ea8c06f8637a0e92d9c336f779cf112e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/batch_request_endpoints/batch_delete_test.go @@ -0,0 +1,332 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package batch_request_endpoints + +import ( + "fmt" + "testing" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/batch" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +func batchDeleteJourney(t *testing.T) { + maxObjects := 20 + var sources []*models.Object + var targets []*models.Object + equalThisName := "equal-this-name" + + getBatchDelete := func(className string, path []string, valueText string, dryRun bool) *batch.BatchObjectsDeleteParams { + output := "verbose" + params := batch.NewBatchObjectsDeleteParams().WithBody(&models.BatchDelete{ + Match: &models.BatchDeleteMatch{ + Class: className, + Where: &models.WhereFilter{ + Operator: "Equal", + Path: path, + ValueText: &valueText, + }, + }, + DryRun: &dryRun, + Output: &output, + }) + return params + } + + sourceUUIDs := make([]strfmt.UUID, maxObjects) + targetUUIDs := make([]strfmt.UUID, maxObjects) + + t.Run("create some data", func(t *testing.T) { + sources = make([]*models.Object, maxObjects) + for i := range sources { + uuid := mustNewUUID() + + sources[i] = &models.Object{ + Class: "BulkTestSource", + ID: uuid, + Properties: map[string]interface{}{ + "name": equalThisName, + }, + } + + sourceUUIDs[i] = uuid + } + + targets = make([]*models.Object, maxObjects) + for i := range targets { + uuid := mustNewUUID() + + targets[i] = &models.Object{ + Class: "BulkTestTarget", + ID: uuid, + Properties: map[string]interface{}{ + "intProp": i, + }, + } + + targetUUIDs[i] = uuid + } + }) + + t.Run("import all batch objects", func(t *testing.T) { + params := batch.NewBatchObjectsCreateParams().WithBody( + batch.BatchObjectsCreateBody{ + Objects: sources, + }, + ) + res, err := helper.Client(t).Batch.BatchObjectsCreate(params, nil) + require.Nil(t, err) + + for _, elem := range res.Payload { + require.Nil(t, elem.Result.Errors) + } + + paramsTarget := batch.NewBatchObjectsCreateParams().WithBody( + batch.BatchObjectsCreateBody{ + Objects: targets, + }, + ) + resTarget, errTarget := helper.Client(t).Batch.BatchObjectsCreate(paramsTarget, nil) + require.Nil(t, errTarget) + + for _, elem := range resTarget.Payload { + require.Nil(t, elem.Result.Errors) + } + }) + + t.Run("import all batch refs", func(t *testing.T) { + batchRefs := make([]*models.BatchReference, len(sources)) + + for i := range batchRefs { + batchRefs[i] = &models.BatchReference{ + From: strfmt.URI(fmt.Sprintf("weaviate://localhost/BulkTestTarget/%s/fromSource", targetUUIDs[i])), + To: strfmt.URI(fmt.Sprintf("weaviate://localhost/BulkTestSource/%s", sourceUUIDs[i])), + } + } + + params := batch.NewBatchReferencesCreateParams().WithBody(batchRefs) + res, err := helper.Client(t).Batch.BatchReferencesCreate(params, nil) + require.Nil(t, err) + + for _, elem := range res.Payload { + require.Nil(t, elem.Result.Errors) + } + }) + + t.Run("verify using GraphQL", func(t *testing.T) { + // verify objects + result := AssertGraphQL(t, helper.RootAuth, ` + { Get { BulkTestSource(where:{operator:Equal path:["name"] valueText:"equal-this-name"}) { name } } } + `) + items := result.Get("Get", "BulkTestSource").AsSlice() + require.Len(t, items, maxObjects) + + // verify refs + result = AssertGraphQL(t, helper.RootAuth, ` + { + Get { + BulkTestTarget + ( + where: { + path: ["fromSource", "BulkTestSource", "name"] + operator: Equal + valueText: "equal-this-name" + } + ) + { + fromSource { + ... on BulkTestSource { + _additional { + id + } + } + } + } + } + } + `) + items = result.Get("Get", "BulkTestTarget").AsSlice() + for _, item := range items { + fromSource := item.(map[string]interface{})["fromSource"] + require.NotNil(t, fromSource) + } + require.Len(t, items, maxObjects) + }) + + t.Run("perform batch delete by refs dry run", func(t *testing.T) { + params := getBatchDelete("BulkTestTarget", []string{"fromSource", "BulkTestSource", "name"}, equalThisName, true) + res, err := helper.Client(t).Batch.BatchObjectsDelete(params, nil) + require.Nil(t, err) + + response := res.Payload + require.NotNil(t, response) + require.NotNil(t, response.Match) + require.NotNil(t, response.Results) + require.Equal(t, int64(maxObjects), response.Results.Matches) + require.Equal(t, int64(0), response.Results.Successful) + require.Equal(t, int64(0), response.Results.Failed) + require.Equal(t, maxObjects, len(response.Results.Objects)) + for _, elem := range response.Results.Objects { + require.Nil(t, elem.Errors) + } + }) + + t.Run("[deprecated string] perform batch delete by refs dry run", func(t *testing.T) { + params := getBatchDelete("BulkTestTarget", []string{"fromSource", "BulkTestSource", "name"}, equalThisName, true) + params.Body.Match.Where.ValueText = nil + params.Body.Match.Where.ValueString = &equalThisName + + res, err := helper.Client(t).Batch.BatchObjectsDelete(params, nil) + require.Nil(t, err) + + response := res.Payload + require.NotNil(t, response) + require.NotNil(t, response.Match) + require.NotNil(t, response.Results) + require.Equal(t, int64(maxObjects), response.Results.Matches) + require.Equal(t, int64(0), response.Results.Successful) + require.Equal(t, int64(0), response.Results.Failed) + require.Equal(t, maxObjects, len(response.Results.Objects)) + for _, elem := range response.Results.Objects { + require.Nil(t, elem.Errors) + } + }) + + t.Run("verify that batch delete by refs dry run didn't delete data", func(t *testing.T) { + result := AssertGraphQL(t, helper.RootAuth, ` + { + Get { + BulkTestTarget + ( + where: { + path: ["fromSource", "BulkTestSource", "name"] + operator: Equal + valueText: "equal-this-name" + } + ) + { + fromSource { + ... on BulkTestSource { + _additional { + id + } + } + } + } + } + } + `) + items := result.Get("Get", "BulkTestTarget").AsSlice() + require.Len(t, items, maxObjects) + }) + + t.Run("perform batch delete by prop dry run", func(t *testing.T) { + params := getBatchDelete("BulkTestSource", []string{"name"}, equalThisName, true) + res, err := helper.Client(t).Batch.BatchObjectsDelete(params, nil) + require.Nil(t, err) + + response := res.Payload + require.NotNil(t, response) + require.NotNil(t, response.Match) + require.NotNil(t, response.Results) + require.Equal(t, int64(maxObjects), response.Results.Matches) + require.Equal(t, int64(0), response.Results.Successful) + require.Equal(t, int64(0), response.Results.Failed) + require.Equal(t, maxObjects, len(response.Results.Objects)) + for _, elem := range response.Results.Objects { + require.Nil(t, elem.Errors) + } + }) + + t.Run("verify that batch delete by prop dry run didn't delete data", func(t *testing.T) { + result := AssertGraphQL(t, helper.RootAuth, ` + { Get { BulkTestSource(where:{operator:Equal path:["name"] valueText:"equal-this-name"}) { name } } } + `) + items := result.Get("Get", "BulkTestSource").AsSlice() + require.Len(t, items, maxObjects) + }) + + t.Run("perform batch delete by ref", func(t *testing.T) { + params := getBatchDelete("BulkTestTarget", []string{"fromSource", "BulkTestSource", "name"}, equalThisName, false) + res, err := helper.Client(t).Batch.BatchObjectsDelete(params, nil) + require.Nil(t, err) + + response := res.Payload + require.NotNil(t, response) + require.NotNil(t, response.Match) + require.NotNil(t, response.Results) + require.Equal(t, int64(maxObjects), response.Results.Matches) + require.Equal(t, int64(maxObjects), response.Results.Successful) + require.Equal(t, int64(0), response.Results.Failed) + require.Equal(t, maxObjects, len(response.Results.Objects)) + for _, elem := range response.Results.Objects { + require.Nil(t, elem.Errors) + } + }) + + t.Run("verify that batch delete by ref deleted everything", func(t *testing.T) { + result := AssertGraphQL(t, helper.RootAuth, ` + { + Get { + BulkTestTarget + ( + where: { + path: ["fromSource", "BulkTestSource", "name"] + operator: Equal + valueText: "equal-this-name" + } + ) + { + fromSource { + ... on BulkTestSource { + _additional { + id + } + } + } + } + } + } + `) + items := result.Get("Get", "BulkTestTarget").AsSlice() + require.Len(t, items, 0) + }) + + t.Run("perform batch delete by prop", func(t *testing.T) { + params := getBatchDelete("BulkTestSource", []string{"name"}, equalThisName, false) + res, err := helper.Client(t).Batch.BatchObjectsDelete(params, nil) + require.Nil(t, err) + + response := res.Payload + require.NotNil(t, response) + require.NotNil(t, response.Match) + require.NotNil(t, response.Results) + require.Equal(t, int64(maxObjects), response.Results.Matches) + require.Equal(t, int64(maxObjects), response.Results.Successful) + require.Equal(t, int64(0), response.Results.Failed) + require.Equal(t, maxObjects, len(response.Results.Objects)) + for _, elem := range response.Results.Objects { + require.Nil(t, elem.Errors) + } + }) + + t.Run("verify that batch delete by prop deleted everything", func(t *testing.T) { + result := AssertGraphQL(t, helper.RootAuth, ` + { Get { BulkTestSource(where:{operator:Equal path:["name"] valueText:"equal-this-name"}) { name } } } + `) + items := result.Get("Get", "BulkTestSource").AsSlice() + require.Len(t, items, 0) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/batch_request_endpoints/batch_journey_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/batch_request_endpoints/batch_journey_test.go new file mode 100644 index 0000000000000000000000000000000000000000..1c480cf27f6709f8e1a2500c5bbdbb9af43a8570 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/batch_request_endpoints/batch_journey_test.go @@ -0,0 +1,195 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package batch_request_endpoints + +import ( + "fmt" + "testing" + + "github.com/go-openapi/strfmt" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/batch" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" +) + +func batchJourney(t *testing.T) { + sourcesSize := 10 + targetsSize := 3 + var sources []*models.Object + var targets []*models.Object + + t.Run("create some data", func(t *testing.T) { + sources = make([]*models.Object, sourcesSize) + for i := range sources { + sources[i] = &models.Object{ + Class: "BulkTestSource", + ID: mustNewUUID(), + Properties: map[string]interface{}{ + "name": fmt.Sprintf("source%d", i), + }, + } + } + + targets = make([]*models.Object, targetsSize) + for i := range targets { + targets[i] = &models.Object{ + Class: "BulkTest", + ID: mustNewUUID(), + Properties: map[string]interface{}{ + "name": fmt.Sprintf("target%d", i), + }, + } + } + }) + + t.Run("import all data in batch", func(t *testing.T) { + params := batch.NewBatchObjectsCreateParams().WithBody( + batch.BatchObjectsCreateBody{ + Objects: append(sources, targets...), + }, + ) + res, err := helper.Client(t).Batch.BatchObjectsCreate(params, nil) + require.Nil(t, err) + + for _, elem := range res.Payload { + assert.Nil(t, elem.Result.Errors) + } + }) + + t.Run("set one cref each from each source to all targets", func(t *testing.T) { + body := make([]*models.BatchReference, sourcesSize*targetsSize) + for i := range sources { + for j := range targets { + index := i*targetsSize + j + body[index] = &models.BatchReference{ + From: strfmt.URI( + fmt.Sprintf("weaviate://localhost/BulkTestSource/%s/ref", sources[i].ID)), + To: strfmt.URI(fmt.Sprintf("weaviate://localhost/%s", targets[j].ID)), + } + } + } + params := batch.NewBatchReferencesCreateParams().WithBody(body) + res, err := helper.Client(t).Batch.BatchReferencesCreate(params, nil) + require.Nil(t, err) + + for _, elem := range res.Payload { + assert.Nil(t, elem.Result.Errors) + } + }) + + t.Run("verify using GraphQL", func(t *testing.T) { + result := AssertGraphQL(t, helper.RootAuth, ` + { Get { BulkTestSource { ref { ... on BulkTest { name } } } } } + `) + items := result.Get("Get", "BulkTestSource").AsSlice() + assert.Len(t, items, sourcesSize) + for _, obj := range items { + require.NotNil(t, obj) + refs := obj.(map[string]interface{})["ref"].([]interface{}) + assert.Len(t, refs, targetsSize) + } + }) +} + +func mustNewUUID() strfmt.UUID { + return strfmt.UUID(uuid.New().String()) +} + +func Test_BugFlakyResultCountWithVectorSearch(t *testing.T) { + className := "FlakyBugTestClass" + + // since this bug occurs only in around 1 in 25 cases, we run the test + // multiple times to increase the chance we're running into it + amount := 50 + for i := 0; i < amount; i++ { + t.Run("create schema", func(t *testing.T) { + createObjectClass(t, &models.Class{ + Class: className, + Properties: []*models.Property{ + { + Name: "title", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + { + Name: "url", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + { + Name: "wordCount", + DataType: []string{"int"}, + }, + }, + }) + }) + + t.Run("create and import some data", func(t *testing.T) { + objects := []*models.Object{ + { + Class: className, + Properties: map[string]interface{}{ + "title": "article 1", + "url": "http://articles.local/my-article-1", + "wordCount": 60, + }, + }, + { + Class: className, + Properties: map[string]interface{}{ + "title": "article 2", + "url": "http://articles.local/my-article-2", + "wordCount": 40, + }, + }, + { + Class: className, + Properties: map[string]interface{}{ + "title": "article 3", + "url": "http://articles.local/my-article-3", + "wordCount": 600, + }, + }, + } + + params := batch.NewBatchObjectsCreateParams().WithBody( + batch.BatchObjectsCreateBody{ + Objects: objects, + }, + ) + res, err := helper.Client(t).Batch.BatchObjectsCreate(params, nil) + require.Nil(t, err) + + for _, elem := range res.Payload { + assert.Nil(t, elem.Result.Errors) + } + }) + + t.Run("verify using GraphQL", func(t *testing.T) { + result := AssertGraphQL(t, helper.RootAuth, fmt.Sprintf(` + { Get { %s(nearText: {concepts: ["news"]}) { + wordCount title url + } } } + `, className)) + items := result.Get("Get", className).AsSlice() + assert.Len(t, items, 3) + }) + + t.Run("cleanup", func(t *testing.T) { + deleteObjectClass(t, className) + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/batch_request_endpoints/graphql_helper_for_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/batch_request_endpoints/graphql_helper_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..6d0f2f3ffced04ee2989fd9ba5fbec2ce7a3b4b7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/batch_request_endpoints/graphql_helper_for_test.go @@ -0,0 +1,89 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package batch_request_endpoints + +import ( + "encoding/json" + "errors" + "fmt" + "testing" + + "github.com/go-openapi/runtime" + + "github.com/weaviate/weaviate/client/graphql" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +type GraphQLResult struct { + Result interface{} +} + +// Perform a GraphQL request +func QueryGraphQL(t *testing.T, auth runtime.ClientAuthInfoWriterFunc, operation string, query string, variables map[string]interface{}) (*models.GraphQLResponse, error) { + var vars interface{} = variables + params := graphql.NewGraphqlPostParams().WithBody(&models.GraphQLQuery{OperationName: operation, Query: query, Variables: vars}) + response, err := helper.Client(t).Graphql.GraphqlPost(params, nil) + if err != nil { + return nil, err + } + + return response.Payload, nil +} + +// Perform a query and assert that it is successful +func AssertGraphQL(t *testing.T, auth runtime.ClientAuthInfoWriterFunc, query string) *GraphQLResult { + response, err := QueryGraphQL(t, auth, "", query, nil) + if err != nil { + var parsedErr *graphql.GraphqlPostUnprocessableEntity + if !errors.As(err, &parsedErr) { + t.Fatalf("Expected the query to succeed, but failed due to: %+v", err) + } + t.Fatalf("Expected the query to succeed, but failed with unprocessable entity: %v", parsedErr.Payload.Error[0]) + } + + if len(response.Errors) != 0 { + j, _ := json.Marshal(response.Errors) + t.Fatal("GraphQL resolved to an error:", string(j)) + } + + data := make(map[string]interface{}) + + // get rid of models.JSONData + for key, value := range response.Data { + data[key] = value + } + + return &GraphQLResult{Result: data} +} + +// Drill down in the result +func (g GraphQLResult) Get(paths ...string) *GraphQLResult { + current := g.Result + for _, path := range paths { + var ok bool + currentAsMap := (current.(map[string]interface{})) + current, ok = currentAsMap[path] + if !ok { + panic(fmt.Sprintf("Cannot get element %s in %#v; result: %#v", path, paths, g.Result)) + } + } + + return &GraphQLResult{ + Result: current, + } +} + +// Cast the result to a slice +func (g *GraphQLResult) AsSlice() []interface{} { + return g.Result.([]interface{}) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/batch_request_endpoints/graphql_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/batch_request_endpoints/graphql_test.go new file mode 100644 index 0000000000000000000000000000000000000000..58e73a5b9c8de2871d1a939e101419051b7d5be0 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/batch_request_endpoints/graphql_test.go @@ -0,0 +1,98 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package batch_request_endpoints + +// TODO: These tests add little value, they only test one specific error case, +// but don't test any happy path at all. This should probably be removed or +// fixed. However, they do at least assure that the order of return values matches +// the order of input values. + +// Acceptance tests for the batch GraphQL endpoint + +// There is a helper struct called GraphQLResult that helps to navigate through the output, +// a query generator and a few helper functions to access the GraphQL endpoint. +// See the end of this file for more details on how those work. + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + graphql_client "github.com/weaviate/weaviate/client/graphql" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +// TODO: change this test to simulate a successful query response when the test dataset is implemented. + +// Check if batch results are returned in the correct order by comparing result equality to predefined outcomes. +// This includes testing whether individual requests and the batch request are handled correctly +func gqlResultsOrder(t *testing.T) { + queryOneName := "testQuery" + queryTwoName := "testQuery2" + expectedResult := "Syntax Error GraphQL request (1:1) Unexpected Name \"%s\"\n\n1: %s\n ^\n" + + // perform the query + gqlResponse, err := queryBatchEndpoint(t, nil) + if err != nil { + t.Fatalf("The returned schema is not an JSON object: %v", err) + } + // check if the batch response contains two batched responses + assert.Equal(t, 2, len(gqlResponse)) + + // check if the error message matches the expected outcome (and are therefore returned in the correct order) + if len(gqlResponse) == 2 { + responseOne := gqlResponse[0].Errors[0].Message + responseTwo := gqlResponse[1].Errors[0].Message + + fullExpectedOutcomeOne := fmt.Sprintf(expectedResult, queryOneName, queryOneName) + assert.Equal(t, fullExpectedOutcomeOne, responseOne) + + fullExpectedOutcomeTwo := fmt.Sprintf(expectedResult, queryTwoName, queryTwoName) + assert.Equal(t, fullExpectedOutcomeTwo, responseTwo) + } +} + +func gqlMalformedRequest(t *testing.T) { + vars := []int{1, 2, 3} + expectedResult := "422: expected map[string]interface{}, received %v" + + // perform the query + gqlResponse, err := queryBatchEndpoint(t, vars) + if err != nil { + t.Fatalf("The returned schema is not an JSON object: %v", err) + } + // check if the batch response contains two batched responses + assert.Equal(t, 2, len(gqlResponse)) + + fullExpectedOutcome := fmt.Sprintf(expectedResult, vars) + assert.Equal(t, fullExpectedOutcome, gqlResponse[0].Errors[0].Message) + assert.Equal(t, fullExpectedOutcome, gqlResponse[1].Errors[0].Message) +} + +// Helper functions +// TODO: change this to a successful query when the test dataset is implemented. Make sure to implement a query returning 3 or more elements. +// Perform a batch GraphQL query +func queryBatchEndpoint(t *testing.T, vars interface{}) (models.GraphQLResponses, error) { + query1 := &models.GraphQLQuery{OperationName: "testQuery", Query: "testQuery", Variables: vars} + query2 := &models.GraphQLQuery{OperationName: "testQuery2", Query: "testQuery2", Variables: vars} + + queries := models.GraphQLQueries{query1, query2} + + params := graphql_client.NewGraphqlBatchParams().WithBody(queries) + response, err := helper.Client(t).Graphql.GraphqlBatch(params, nil) + if err != nil { + return nil, err + } + + return response.Payload, nil +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/batch_request_endpoints/setup_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/batch_request_endpoints/setup_test.go new file mode 100644 index 0000000000000000000000000000000000000000..6899d24e3a6e2b84d46fe4056df28d6e95735709 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/batch_request_endpoints/setup_test.go @@ -0,0 +1,89 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package batch_request_endpoints + +import ( + "testing" + "time" + + clschema "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" +) + +func Test_Batch(t *testing.T) { + // there is no gql provider if there is no schema, so we need some sort of a schema + + t.Run("setup", func(t *testing.T) { + createObjectClass(t, &models.Class{ + Class: "BulkTest", + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + }, + }) + createObjectClass(t, &models.Class{ + Class: "BulkTestSource", + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + { + Name: "ref", + DataType: []string{"BulkTest"}, + }, + }, + }) + createObjectClass(t, &models.Class{ + Class: "BulkTestTarget", + Properties: []*models.Property{ + { + Name: "intProp", + DataType: []string{"int"}, + }, + { + Name: "fromSource", + DataType: []string{"BulkTestSource"}, + }, + }, + }) + }) + + time.Sleep(2000 * time.Millisecond) + + t.Run("gql results order", batchJourney) + t.Run("gql results order", gqlResultsOrder) + t.Run("gql malformed request", gqlMalformedRequest) + t.Run("batch delete", batchDeleteJourney) + + deleteObjectClass(t, "BulkTest") + deleteObjectClass(t, "BulkTestSource") + deleteObjectClass(t, "BulkTestTarget") +} + +func createObjectClass(t *testing.T, class *models.Class) { + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(class) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) +} + +func deleteObjectClass(t *testing.T, class string) { + delParams := clschema.NewSchemaObjectsDeleteParams().WithClassName(class) + delRes, err := helper.Client(t).Schema.SchemaObjectsDelete(delParams, nil) + helper.AssertRequestOk(t, delRes, err, nil) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/batch_request_endpoints/thingscreate_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/batch_request_endpoints/thingscreate_test.go new file mode 100644 index 0000000000000000000000000000000000000000..1e805e2b2966f7fc37b3cdbb636d99e8ec31bd3b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/batch_request_endpoints/thingscreate_test.go @@ -0,0 +1,80 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +// TODO: change this test to simulate a successful query response when the test dataset is implemented. + +// Acceptance tests for the batch ThingsCreate endpoint +package batch_request_endpoints + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/client/batch" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +// Test if batching is working correctly. Sends an OK batch containing two batched requests that refer to non-existing classes. +// The expected outcome is a 200 batch response containing two batched responses. These batched responses should both contain errors. +func TestBatchThingsCreateResultsOrder(t *testing.T) { + t.Parallel() + + classOneName := "ItIsExtremelyUnlikelyThatThisClassActuallyExistsButJustToBeSureHereAreSomeRandomNumbers12987825624398509861298409782539802434516542" + classTwoName := "ItIsExtremelyUnlikelyThatThisClassActuallyExistsButJustToBeSureHereAreSomeRandomNumbers12987825624398509861298409782539802434516541" + expectedResult := "class '%s' not present in schema" + + // generate actioncreate content + object1 := &models.Object{ + Class: classOneName, + Properties: map[string]interface{}{ + "testString": "Test string", + }, + } + object2 := &models.Object{ + Class: classTwoName, + Properties: map[string]interface{}{ + "testWholeNumber": 1, + }, + } + + testFields := "ALL" + + // generate request body + params := batch.NewBatchObjectsCreateParams().WithBody(batch.BatchObjectsCreateBody{ + Objects: []*models.Object{object1, object2}, + Fields: []*string{&testFields}, + }) + + // perform the request + resp, err := helper.BatchClient(t).BatchObjectsCreate(params, nil) + + // ensure that the response is OK + helper.AssertRequestOk(t, resp, err, func() { + thingsCreateResponse := resp.Payload + + // check if the batch response contains two batched responses + assert.Equal(t, 2, len(thingsCreateResponse)) + + // check if the error message matches the expected outcome (and are therefore returned in the correct order) + if len(thingsCreateResponse) == 2 { + responseOne := thingsCreateResponse[0].Result.Errors.Error[0].Message + responseTwo := thingsCreateResponse[1].Result.Errors.Error[0].Message + + fullExpectedOutcomeOne := fmt.Sprintf(expectedResult, classOneName) + assert.Contains(t, responseOne, fullExpectedOutcomeOne) + + fullExpectedOutcomeTwo := fmt.Sprintf(expectedResult, classTwoName) + assert.Contains(t, responseTwo, fullExpectedOutcomeTwo) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/classifications/contextual_classification_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/classifications/contextual_classification_test.go new file mode 100644 index 0000000000000000000000000000000000000000..884810836b6a346389fc24f6718cd014031d7f3d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/classifications/contextual_classification_test.go @@ -0,0 +1,100 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "testing" + "time" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/classifications" + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +func contextualClassification(t *testing.T) { + var id strfmt.UUID + + res, err := helper.Client(t).Classifications.ClassificationsPost(classifications.NewClassificationsPostParams(). + WithParams(&models.Classification{ + Class: "Article", + ClassifyProperties: []string{"ofCategory"}, + BasedOnProperties: []string{"content"}, + Type: "text2vec-contextionary-contextual", + }), nil) + require.Nil(t, err) + id = res.Payload.ID + + // wait for classification to be completed + helper.AssertEventuallyEqualWithFrequencyAndTimeout(t, "completed", func() interface{} { + res, err := helper.Client(t).Classifications.ClassificationsGet(classifications.NewClassificationsGetParams(). + WithID(id.String()), nil) + + require.Nil(t, err) + return res.Payload.Status + }, 100*time.Millisecond, 15*time.Second) + + // wait for latest changes to be indexed / wait for consistency + helper.AssertEventuallyEqual(t, true, func() interface{} { + res, err := helper.Client(t).Objects.ObjectsGet(objects.NewObjectsGetParams(). + WithID(article1), nil) + require.Nil(t, err) + return res.Payload.Properties.(map[string]interface{})["ofCategory"] != nil + }) + helper.AssertEventuallyEqual(t, true, func() interface{} { + res, err := helper.Client(t).Objects.ObjectsGet(objects.NewObjectsGetParams(). + WithID(article2), nil) + require.Nil(t, err) + return res.Payload.Properties.(map[string]interface{})["ofCategory"] != nil + }) + helper.AssertEventuallyEqual(t, true, func() interface{} { + res, err := helper.Client(t).Objects.ObjectsGet(objects.NewObjectsGetParams(). + WithID(article3), nil) + require.Nil(t, err) + return res.Payload.Properties.(map[string]interface{})["ofCategory"] != nil + }) + + gres := AssertGraphQL(t, nil, ` +{ + Get { + Article { + _additional { + id + } + ofCategory { + ... on Category { + name + } + } + } + } +}`) + + expectedCategoriesByID := map[strfmt.UUID]string{ + article1: "Computers and Technology", + article2: "Food and Drink", + article3: "Politics", + } + articles := gres.Get("Get", "Article").AsSlice() + for _, article := range articles { + actual := article.(map[string]interface{})["ofCategory"].([]interface{})[0].(map[string]interface{})["name"].(string) + id := article.(map[string]interface{})["_additional"].(map[string]interface{})["id"].(string) + assert.Equal(t, expectedCategoriesByID[strfmt.UUID(id)], actual) + } +} + +func ptString(in string) *string { + return &in +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/classifications/graphql_helper.go b/platform/dbops/binaries/weaviate-src/test/acceptance/classifications/graphql_helper.go new file mode 100644 index 0000000000000000000000000000000000000000..eb6c04ed95a257b98ece928a6d6c267f732f48ae --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/classifications/graphql_helper.go @@ -0,0 +1,89 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "encoding/json" + "errors" + "fmt" + "testing" + + "github.com/go-openapi/runtime" + + "github.com/weaviate/weaviate/client/graphql" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +type GraphQLResult struct { + Result interface{} +} + +// Perform a GraphQL request +func QueryGraphQL(t *testing.T, auth runtime.ClientAuthInfoWriterFunc, operation string, query string, variables map[string]interface{}) (*models.GraphQLResponse, error) { + var vars interface{} = variables + params := graphql.NewGraphqlPostParams().WithBody(&models.GraphQLQuery{OperationName: operation, Query: query, Variables: vars}) + response, err := helper.Client(t).Graphql.GraphqlPost(params, nil) + if err != nil { + return nil, err + } + + return response.Payload, nil +} + +// Perform a query and assert that it is successful +func AssertGraphQL(t *testing.T, auth runtime.ClientAuthInfoWriterFunc, query string) *GraphQLResult { + response, err := QueryGraphQL(t, auth, "", query, nil) + if err != nil { + var parsedErr *graphql.GraphqlPostUnprocessableEntity + if !errors.As(err, &parsedErr) { + t.Fatalf("Expected the query to succeed, but failed due to: %#v", err) + } + t.Fatalf("Expected the query to succeed, but failed with unprocessable entity: %#v", parsedErr.Payload.Error[0]) + } + + if len(response.Errors) != 0 { + j, _ := json.Marshal(response.Errors) + t.Fatal("GraphQL resolved to an error:", string(j)) + } + + data := make(map[string]interface{}) + + // get rid of models.JSONData + for key, value := range response.Data { + data[key] = value + } + + return &GraphQLResult{Result: data} +} + +// Drill down in the result +func (g GraphQLResult) Get(paths ...string) *GraphQLResult { + current := g.Result + for _, path := range paths { + var ok bool + currentAsMap := (current.(map[string]interface{})) + current, ok = currentAsMap[path] + if !ok { + panic(fmt.Sprintf("Cannot get element %s in %#v; result: %#v", path, paths, g.Result)) + } + } + + return &GraphQLResult{ + Result: current, + } +} + +// Cast the result to a slice +func (g *GraphQLResult) AsSlice() []interface{} { + return g.Result.([]interface{}) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/classifications/knn_classification_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/classifications/knn_classification_test.go new file mode 100644 index 0000000000000000000000000000000000000000..9d36efa59846f8aa38af046bcb00fe3edc68531c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/classifications/knn_classification_test.go @@ -0,0 +1,144 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "encoding/json" + "fmt" + "testing" + "time" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/classifications" + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +func knnClassification(t *testing.T) { + var id strfmt.UUID + + t.Run("ensure class shard for classification is ready", func(t *testing.T) { + helper.AssertEventuallyEqualWithFrequencyAndTimeout(t, "READY", + func() interface{} { + shardStatus, err := helper.Client(t).Schema.SchemaObjectsShardsGet(schema.NewSchemaObjectsShardsGetParams().WithClassName("Recipe"), nil) + require.Nil(t, err) + require.GreaterOrEqual(t, len(shardStatus.Payload), 1) + return shardStatus.Payload[0].Status + }, 250*time.Millisecond, 15*time.Second) + }) + + t.Run("start the classification and wait for completion", func(t *testing.T) { + res, err := helper.Client(t).Classifications.ClassificationsPost( + classifications.NewClassificationsPostParams().WithParams(&models.Classification{ + Class: "Recipe", + ClassifyProperties: []string{"ofType"}, + BasedOnProperties: []string{"content"}, + Type: "knn", + Settings: map[string]interface{}{ + "k": 5, + }, + }), nil) + require.Nil(t, err) + id = res.Payload.ID + + // wait for classification to be completed + helper.AssertEventuallyEqualWithFrequencyAndTimeout(t, "completed", + func() interface{} { + res, err := helper.Client(t).Classifications.ClassificationsGet( + classifications.NewClassificationsGetParams().WithID(id.String()), nil) + + require.Nil(t, err) + return res.Payload.Status + }, 100*time.Millisecond, 15*time.Second) + }) + + t.Run("assure changes present", func(t *testing.T) { + // wait for latest changes to be indexed / wait for consistency + helper.AssertEventuallyEqual(t, true, func() interface{} { + res, err := helper.Client(t).Objects.ObjectsGet(objects.NewObjectsGetParams(). + WithID(unclassifiedSavory), nil) + require.Nil(t, err) + return res.Payload.Properties.(map[string]interface{})["ofType"] != nil + }) + helper.AssertEventuallyEqual(t, true, func() interface{} { + res, err := helper.Client(t).Objects.ObjectsGet(objects.NewObjectsGetParams(). + WithID(unclassifiedSweet), nil) + require.Nil(t, err) + return res.Payload.Properties.(map[string]interface{})["ofType"] != nil + }) + }) + + t.Run("inspect unclassified savory", func(t *testing.T) { + res, err := helper.Client(t).Objects. + ObjectsGet(objects.NewObjectsGetParams(). + WithID(unclassifiedSavory). + WithInclude(ptString("classification")), nil) + + require.Nil(t, err) + schema, ok := res.Payload.Properties.(map[string]interface{}) + require.True(t, ok) + + expectedRefTarget := fmt.Sprintf("weaviate://localhost/RecipeType/%s", + recipeTypeSavory) + ref := schema["ofType"].([]interface{})[0].(map[string]interface{}) + assert.Equal(t, ref["beacon"].(string), expectedRefTarget) + + verifyMetaDistances(t, ref) + }) + + t.Run("inspect unclassified sweet", func(t *testing.T) { + res, err := helper.Client(t).Objects. + ObjectsGet(objects.NewObjectsGetParams(). + WithID(unclassifiedSweet). + WithInclude(ptString("classification")), nil) + + require.Nil(t, err) + schema, ok := res.Payload.Properties.(map[string]interface{}) + require.True(t, ok) + + expectedRefTarget := fmt.Sprintf("weaviate://localhost/RecipeType/%s", + recipeTypeSweet) + ref := schema["ofType"].([]interface{})[0].(map[string]interface{}) + assert.Equal(t, ref["beacon"].(string), expectedRefTarget) + + verifyMetaDistances(t, ref) + }) +} + +func verifyMetaDistances(t *testing.T, ref map[string]interface{}) { + classification, ok := ref["classification"].(map[string]interface{}) + require.True(t, ok) + + assert.Equal(t, json.Number("3"), classification["winningCount"]) + assert.Equal(t, json.Number("2"), classification["losingCount"]) + assert.Equal(t, json.Number("5"), classification["overallCount"]) + + closestWinning, err := classification["closestWinningDistance"].(json.Number).Float64() + require.Nil(t, err) + closestLosing, err := classification["closestLosingDistance"].(json.Number).Float64() + require.Nil(t, err) + closestOverall, err := classification["closestOverallDistance"].(json.Number).Float64() + require.Nil(t, err) + meanWinning, err := classification["meanWinningDistance"].(json.Number).Float64() + require.Nil(t, err) + meanLosing, err := classification["meanLosingDistance"].(json.Number).Float64() + require.Nil(t, err) + + assert.True(t, closestWinning == closestOverall, "closestWinning == closestOverall") + assert.True(t, closestWinning < meanWinning, "closestWinning < meanWinning") + assert.True(t, closestWinning < closestLosing, "closestWinning < closestLosing") + assert.True(t, closestLosing < meanLosing, "closestLosing < meanLosing") +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/classifications/setup_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/classifications/setup_test.go new file mode 100644 index 0000000000000000000000000000000000000000..1fe0a522609e43c28e46eb80710ca5234482c851 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/classifications/setup_test.go @@ -0,0 +1,409 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "fmt" + "testing" + + "github.com/go-openapi/strfmt" + "github.com/weaviate/weaviate/client/objects" + clschema "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" +) + +var ( + // contextual + article1 strfmt.UUID = "dcbe5df8-af01-46f1-b45f-bcc9a7a0773d" // apple macbook + article2 strfmt.UUID = "6a8c7b62-fd45-488f-b884-ec87227f6eb3" // ice cream and steak + article3 strfmt.UUID = "92f05097-6371-499c-a0fe-3e60ae16fe3d" // president of the us + + // knn + recipeTypeSavory strfmt.UUID = "989d792c-b59e-4430-80a3-cf7f320f31b0" + recipeTypeSweet strfmt.UUID = "c9dfda02-6b05-4117-9d95-a188342cca48" + unclassifiedSavory strfmt.UUID = "953c03f8-d61e-44c0-bbf1-2afe0dc1ce87" + unclassifiedSweet strfmt.UUID = "04603002-cb66-4fce-bf6d-56bdf9b0b5d4" + + // zeroshot + foodTypeMeat strfmt.UUID = "998d792c-b59e-4430-80a3-cf7f320f31b0" + foodTypeIceCream strfmt.UUID = "998d792c-b59e-4430-80a3-cf7f320f31b1" + unclassifiedSteak strfmt.UUID = "953c03f8-d61e-44c0-bbf1-2afe0dc1ce10" + unclassifiedIceCreams strfmt.UUID = "953c03f8-d61e-44c0-bbf1-2afe0dc1ce11" +) + +func Test_Classifications(t *testing.T) { + t.Run("article/category setup for contextual classification", setupArticleCategory) + t.Run("recipe setup for knn classification", setupRecipe) + t.Run("food types and recipes setup for zeroshot classification", setupFoodTypes) + + // tests + t.Run("contextual classification", contextualClassification) + t.Run("knn classification", knnClassification) + t.Run("zeroshot classification", zeroshotClassification) + + // tear down + deleteObjectClass(t, "Article") + deleteObjectClass(t, "Category") + deleteObjectClass(t, "Recipe") + deleteObjectClass(t, "RecipeType") + deleteObjectClass(t, "FoodType") + deleteObjectClass(t, "Recipes") +} + +func setupArticleCategory(t *testing.T) { + t.Run("schema setup", func(t *testing.T) { + createObjectClass(t, &models.Class{ + Class: "Category", + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + }, + }) + createObjectClass(t, &models.Class{ + Class: "Article", + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + Properties: []*models.Property{ + { + Name: "content", + DataType: []string{"text"}, + }, + { + Name: "OfCategory", + DataType: []string{"Category"}, + }, + }, + }) + }) + + t.Run("object setup - categories", func(t *testing.T) { + createObject(t, &models.Object{ + Class: "Category", + Properties: map[string]interface{}{ + "name": "Food and Drink", + }, + }) + createObject(t, &models.Object{ + Class: "Category", + Properties: map[string]interface{}{ + "name": "Computers and Technology", + }, + }) + createObject(t, &models.Object{ + Class: "Category", + Properties: map[string]interface{}{ + "name": "Politics", + }, + }) + }) + + t.Run("object setup - articles", func(t *testing.T) { + createObject(t, &models.Object{ + ID: article1, + Class: "Article", + Properties: map[string]interface{}{ + "content": "The new Apple Macbook 16 inch provides great performance", + }, + }) + createObject(t, &models.Object{ + ID: article2, + Class: "Article", + Properties: map[string]interface{}{ + "content": "I love eating ice cream with my t-bone steak", + }, + }) + createObject(t, &models.Object{ + ID: article3, + Class: "Article", + Properties: map[string]interface{}{ + "content": "Barack Obama was the 44th president of the united states", + }, + }) + }) + + assertGetObjectEventually(t, "92f05097-6371-499c-a0fe-3e60ae16fe3d") +} + +func setupRecipe(t *testing.T) { + t.Run("schema setup", func(t *testing.T) { + createObjectClass(t, &models.Class{ + Class: "RecipeType", + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + }, + }) + createObjectClass(t, &models.Class{ + Class: "Recipe", + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + Properties: []*models.Property{ + { + Name: "content", + DataType: []string{"text"}, + }, + { + Name: "OfType", + DataType: []string{"RecipeType"}, + }, + }, + }) + }) + + t.Run("object setup - recipe types", func(t *testing.T) { + createObject(t, &models.Object{ + Class: "RecipeType", + ID: recipeTypeSavory, + Properties: map[string]interface{}{ + "name": "Savory", + }, + }) + + createObject(t, &models.Object{ + Class: "RecipeType", + ID: recipeTypeSweet, + Properties: map[string]interface{}{ + "name": "Sweet", + }, + }) + }) + + t.Run("object setup - articles", func(t *testing.T) { + createObject(t, &models.Object{ + Class: "Recipe", + Properties: map[string]interface{}{ + "content": "Mix two eggs with milk and 7 grams of sugar, bake in the oven at 200 degrees", + "ofType": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s", recipeTypeSweet), + }, + }, + }, + }) + + createObject(t, &models.Object{ + Class: "Recipe", + Properties: map[string]interface{}{ + "content": "Sautee the apples with sugar and add a dash of milk.", + "ofType": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s", recipeTypeSweet), + }, + }, + }, + }) + + createObject(t, &models.Object{ + Class: "Recipe", + Properties: map[string]interface{}{ + "content": "Mix butter, cream and sugar. Make eggwhites fluffy and mix with the batter", + "ofType": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s", recipeTypeSweet), + }, + }, + }, + }) + + createObject(t, &models.Object{ + Class: "Recipe", + Properties: map[string]interface{}{ + "content": "Fry the steak in the pan, then sautee the onions in the same pan", + "ofType": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s", recipeTypeSavory), + }, + }, + }, + }) + + createObject(t, &models.Object{ + Class: "Recipe", + Properties: map[string]interface{}{ + "content": "Cut the potatoes in half and add salt and pepper. Serve with the meat.", + "ofType": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s", recipeTypeSavory), + }, + }, + }, + }) + + createObject(t, &models.Object{ + Class: "Recipe", + Properties: map[string]interface{}{ + "content": "Put the pasta and sauce mix in the oven, top with plenty of cheese", + "ofType": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s", recipeTypeSavory), + }, + }, + }, + }) + + createObject(t, &models.Object{ + ID: unclassifiedSavory, + Class: "Recipe", + Properties: map[string]interface{}{ + "content": "Serve the steak with fries and ketchup.", + }, + }) + + createObject(t, &models.Object{ + ID: unclassifiedSweet, + Class: "Recipe", + Properties: map[string]interface{}{ + "content": "Whisk the cream, add sugar and serve with strawberries", + }, + }) + }) + + assertGetObjectEventually(t, unclassifiedSweet) +} + +func setupFoodTypes(t *testing.T) { + t.Run("schema setup", func(t *testing.T) { + createObjectClass(t, &models.Class{ + Class: "FoodType", + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + Properties: []*models.Property{ + { + Name: "text", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + }, + }) + createObjectClass(t, &models.Class{ + Class: "Recipes", + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + Properties: []*models.Property{ + { + Name: "text", + DataType: []string{"text"}, + }, + { + Name: "ofFoodType", + DataType: []string{"FoodType"}, + }, + }, + }) + }) + + t.Run("object setup - food types", func(t *testing.T) { + createObject(t, &models.Object{ + Class: "FoodType", + ID: foodTypeIceCream, + Properties: map[string]interface{}{ + "text": "Ice cream", + }, + }) + + createObject(t, &models.Object{ + Class: "FoodType", + ID: foodTypeMeat, + Properties: map[string]interface{}{ + "text": "Meat", + }, + }) + }) + + t.Run("object setup - recipes", func(t *testing.T) { + createObject(t, &models.Object{ + Class: "Recipes", + ID: unclassifiedSteak, + Properties: map[string]interface{}{ + "text": "Cut the steak in half and put it into pan", + }, + }) + + createObject(t, &models.Object{ + Class: "Recipes", + ID: unclassifiedIceCreams, + Properties: map[string]interface{}{ + "text": "There are flavors of vanilla, chocolate and strawberry", + }, + }) + }) +} + +func createObjectClass(t *testing.T, class *models.Class) { + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(class) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) +} + +func createObject(t *testing.T, object *models.Object) { + params := objects.NewObjectsCreateParams().WithBody(object) + resp, err := helper.Client(t).Objects.ObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) +} + +func deleteObjectClass(t *testing.T, class string) { + delParams := clschema.NewSchemaObjectsDeleteParams().WithClassName(class) + delRes, err := helper.Client(t).Schema.SchemaObjectsDelete(delParams, nil) + helper.AssertRequestOk(t, delRes, err, nil) +} + +func assertGetObjectEventually(t *testing.T, uuid strfmt.UUID) *models.Object { + var ( + resp *objects.ObjectsGetOK + err error + ) + + checkThunk := func() interface{} { + resp, err = helper.Client(t).Objects.ObjectsGet(objects.NewObjectsGetParams().WithID(uuid), nil) + return err == nil + } + + helper.AssertEventuallyEqual(t, true, checkThunk) + + var object *models.Object + + helper.AssertRequestOk(t, resp, err, func() { + object = resp.Payload + }) + + return object +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/classifications/zeroshot_classification_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/classifications/zeroshot_classification_test.go new file mode 100644 index 0000000000000000000000000000000000000000..360f0fb66f8d8770e50eacb400e13c61a4552d23 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/classifications/zeroshot_classification_test.go @@ -0,0 +1,99 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "fmt" + "testing" + "time" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/classifications" + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +func zeroshotClassification(t *testing.T) { + var id strfmt.UUID + + t.Run("start the classification and wait for completion", func(t *testing.T) { + res, err := helper.Client(t).Classifications.ClassificationsPost( + classifications.NewClassificationsPostParams().WithParams(&models.Classification{ + Class: "Recipes", + ClassifyProperties: []string{"ofFoodType"}, + BasedOnProperties: []string{"text"}, + Type: "zeroshot", + }), nil) + require.Nil(t, err) + id = res.Payload.ID + + // wait for classification to be completed + helper.AssertEventuallyEqualWithFrequencyAndTimeout(t, "completed", + func() interface{} { + res, err := helper.Client(t).Classifications.ClassificationsGet( + classifications.NewClassificationsGetParams().WithID(id.String()), nil) + + require.Nil(t, err) + return res.Payload.Status + }, 100*time.Millisecond, 15*time.Second) + }) + + t.Run("assure changes present", func(t *testing.T) { + // wait for latest changes to be indexed / wait for consistency + helper.AssertEventuallyEqual(t, true, func() interface{} { + res, err := helper.Client(t).Objects.ObjectsGet(objects.NewObjectsGetParams(). + WithID(unclassifiedSteak), nil) + require.Nil(t, err) + return res.Payload.Properties.(map[string]interface{})["ofFoodType"] != nil + }) + helper.AssertEventuallyEqual(t, true, func() interface{} { + res, err := helper.Client(t).Objects.ObjectsGet(objects.NewObjectsGetParams(). + WithID(unclassifiedIceCreams), nil) + require.Nil(t, err) + return res.Payload.Properties.(map[string]interface{})["ofFoodType"] != nil + }) + }) + + t.Run("assure proper classification present", func(t *testing.T) { + // wait for latest changes to be indexed / wait for consistency + helper.AssertEventuallyEqual(t, true, func() interface{} { + res, err := helper.Client(t).Objects.ObjectsGet(objects.NewObjectsGetParams(). + WithID(unclassifiedSteak), nil) + require.Nil(t, err) + return checkOfFoodTypeRef(res.Payload.Properties, foodTypeMeat) + }) + helper.AssertEventuallyEqual(t, true, func() interface{} { + res, err := helper.Client(t).Objects.ObjectsGet(objects.NewObjectsGetParams(). + WithID(unclassifiedIceCreams), nil) + require.Nil(t, err) + return checkOfFoodTypeRef(res.Payload.Properties, foodTypeIceCream) + }) + }) +} + +func checkOfFoodTypeRef(properties interface{}, id strfmt.UUID) bool { + ofFoodType, ok := properties.(map[string]interface{})["ofFoodType"].([]interface{}) + if !ok || len(ofFoodType) == 0 { + return false + } + ofFoodTypeMap, ok := ofFoodType[0].(map[string]interface{}) + if !ok { + return false + } + beacon, ok := ofFoodTypeMap["beacon"] + if !ok { + return false + } + return beacon == fmt.Sprintf("weaviate://localhost/FoodType/%s", id) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/cluster_api_auth/cluster_api_auth_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/cluster_api_auth/cluster_api_auth_test.go new file mode 100644 index 0000000000000000000000000000000000000000..e5f24410d590aa04929ca00aca79093f0041b772 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/cluster_api_auth/cluster_api_auth_test.go @@ -0,0 +1,87 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "context" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/nodes" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" + "github.com/weaviate/weaviate/test/helper/sample-schema/books" +) + +func TestClusterAPIAuth(t *testing.T) { + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + defer cancel() + + compose, err := docker.New().With3NodeCluster(). + WithWeaviateBasicAuth("user", "pass"). + WithText2VecContextionary(). + Start(ctx) + require.Nil(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %v", err) + } + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + + t.Run("sanity checks", func(t *testing.T) { + t.Run("check nodes", func(t *testing.T) { + resp, err := helper.Client(t).Nodes.NodesGet(nodes.NewNodesGetParams(), nil) + require.Nil(t, err) + + nodeStatusResp := resp.GetPayload() + require.NotNil(t, nodeStatusResp) + + nodes := nodeStatusResp.Nodes + require.NotNil(t, nodes) + require.Len(t, nodes, 3) + }) + + booksClass := books.ClassContextionaryVectorizer() + helper.CreateClass(t, booksClass) + defer helper.DeleteClass(t, booksClass.Class) + + t.Run("import data", func(t *testing.T) { + helper.CreateObjectsBatch(t, books.Objects()) + }) + + t.Run("nearText query", func(t *testing.T) { + query := ` + { + Get { + Books( + nearText: { + concepts: ["Frank Herbert"] + } + ){ + title + } + } + }` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + books := result.Get("Get", "Books").AsSlice() + require.True(t, len(books) > 0) + results, ok := books[0].(map[string]interface{}) + require.True(t, ok) + assert.True(t, results["title"] != nil) + }) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/fixtures/actions.json b/platform/dbops/binaries/weaviate-src/test/acceptance/fixtures/actions.json new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/fixtures/things.json b/platform/dbops/binaries/weaviate-src/test/acceptance/fixtures/things.json new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/aggregate_response_assert.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/aggregate_response_assert.go new file mode 100644 index 0000000000000000000000000000000000000000..7b29571c8c91b398ae1331f74aca8dbe4c67e986 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/aggregate_response_assert.go @@ -0,0 +1,448 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "encoding/json" + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/schema" +) + +const delta = 0.00001 + +type assertFunc func(response map[string]interface{}) bool + +type aggregateResponseAssert struct { + t *testing.T + assert *assert.Assertions +} + +func newAggregateResponseAssert(t *testing.T) *aggregateResponseAssert { + return &aggregateResponseAssert{t, assert.New(t)} +} + +func (a *aggregateResponseAssert) meta(count int64) assertFunc { + return func(response map[string]interface{}) bool { + metaKey := "meta" + if !a.assert.Contains(response, metaKey) { + return false + } + return a.hasInt(response[metaKey].(map[string]interface{}), metaKey, "count", count) + } +} + +func (a *aggregateResponseAssert) groupedBy(value string, path ...interface{}) assertFunc { + return func(response map[string]interface{}) bool { + groupedByKey := "groupedBy" + if !a.assert.Contains(response, groupedByKey) { + return false + } + aggMap := response[groupedByKey].(map[string]interface{}) + return combinedAssert( + a.hasString(aggMap, groupedByKey, "value", value), + a.hasArray(aggMap, groupedByKey, "path", path), + ) + } +} + +func (a *aggregateResponseAssert) pointingTo(propName string, path ...interface{}) assertFunc { + return func(response map[string]interface{}) bool { + if !a.assert.Contains(response, propName) { + return false + } + aggMap := response[propName].(map[string]interface{}) + return a.hasArray(aggMap, propName, "pointingTo", path) + } +} + +func (a *aggregateResponseAssert) typedBoolean(dataType schema.DataType, propName string, + count, totalFalse, totalTrue int64, + percentageFalse, percentageTrue float64, +) assertFunc { + return func(response map[string]interface{}) bool { + if !a.assert.Contains(response, propName) { + return false + } + aggMap := response[propName].(map[string]interface{}) + return combinedAssert( + a.hasInt(aggMap, propName, "count", count), + a.hasInt(aggMap, propName, "totalFalse", totalFalse), + a.hasInt(aggMap, propName, "totalTrue", totalTrue), + a.hasNumber(aggMap, propName, "percentageFalse", percentageFalse), + a.hasNumber(aggMap, propName, "percentageTrue", percentageTrue), + a.hasString(aggMap, propName, "type", string(dataType)), + ) + } +} + +func (a *aggregateResponseAssert) booleanArray(propName string, + count, totalFalse, totalTrue int64, + percentageFalse, percentageTrue float64, +) assertFunc { + return a.typedBoolean(schema.DataTypeBooleanArray, propName, count, totalFalse, totalTrue, + percentageFalse, percentageTrue) +} + +func (a *aggregateResponseAssert) boolean(propName string, + count, totalFalse, totalTrue int64, + percentageFalse, percentageTrue float64, +) assertFunc { + return a.typedBoolean(schema.DataTypeBoolean, propName, count, totalFalse, totalTrue, + percentageFalse, percentageTrue) +} + +func (a *aggregateResponseAssert) typedBoolean0(dataType schema.DataType, propName string) assertFunc { + return func(response map[string]interface{}) bool { + if !a.assert.Contains(response, propName) { + return false + } + aggMap := response[propName].(map[string]interface{}) + return combinedAssert( + a.hasInt(aggMap, propName, "count", 0), + a.hasInt(aggMap, propName, "totalFalse", 0), + a.hasInt(aggMap, propName, "totalTrue", 0), + a.hasNil(aggMap, propName, "percentageFalse"), + a.hasNil(aggMap, propName, "percentageTrue"), + a.hasString(aggMap, propName, "type", string(dataType)), + ) + } +} + +func (a *aggregateResponseAssert) booleanArray0(propName string) assertFunc { + return a.typedBoolean0(schema.DataTypeBooleanArray, propName) +} + +func (a *aggregateResponseAssert) boolean0(propName string) assertFunc { + return a.typedBoolean0(schema.DataTypeBoolean, propName) +} + +func (a *aggregateResponseAssert) typedInts(dataType schema.DataType, propName string, + count, maximum, minimum, mode, sum int64, + median, mean float64, +) assertFunc { + return func(response map[string]interface{}) bool { + if !a.assert.Contains(response, propName) { + return false + } + aggMap := response[propName].(map[string]interface{}) + return combinedAssert( + a.hasInt(aggMap, propName, "count", count), + a.hasInt(aggMap, propName, "maximum", maximum), + a.hasInt(aggMap, propName, "minimum", minimum), + a.hasInt(aggMap, propName, "mode", mode), + a.hasInt(aggMap, propName, "sum", sum), + a.hasNumber(aggMap, propName, "median", median), + a.hasNumber(aggMap, propName, "mean", mean), + a.hasString(aggMap, propName, "type", string(dataType)), + ) + } +} + +func (a *aggregateResponseAssert) intArray(propName string, + count, maximum, minimum, mode, sum int64, + median, mean float64, +) assertFunc { + return a.typedInts(schema.DataTypeIntArray, propName, count, maximum, minimum, + mode, sum, median, mean) +} + +func (a *aggregateResponseAssert) int(propName string, + count, maximum, minimum, mode, sum int64, + median, mean float64, +) assertFunc { + return a.typedInts(schema.DataTypeInt, propName, count, maximum, minimum, + mode, sum, median, mean) +} + +func (a *aggregateResponseAssert) typedInts0(dataType schema.DataType, propName string) assertFunc { + return func(response map[string]interface{}) bool { + if !a.assert.Contains(response, propName) { + return false + } + aggMap := response[propName].(map[string]interface{}) + return combinedAssert( + a.hasInt(aggMap, propName, "count", 0), + a.hasNil(aggMap, propName, "maximum"), + a.hasNil(aggMap, propName, "minimum"), + a.hasNil(aggMap, propName, "mode"), + a.hasNil(aggMap, propName, "sum"), + a.hasNil(aggMap, propName, "median"), + a.hasNil(aggMap, propName, "mean"), + a.hasString(aggMap, propName, "type", string(dataType)), + ) + } +} + +func (a *aggregateResponseAssert) intArray0(propName string) assertFunc { + return a.typedInts0(schema.DataTypeIntArray, propName) +} + +func (a *aggregateResponseAssert) int0(propName string) assertFunc { + return a.typedInts0(schema.DataTypeInt, propName) +} + +func (a *aggregateResponseAssert) typedNumbers(dataType schema.DataType, propName string, + count int64, + maximum, minimum, mode, sum, median, mean float64, +) assertFunc { + return func(response map[string]interface{}) bool { + if !a.assert.Contains(response, propName) { + return false + } + aggMap := response[propName].(map[string]interface{}) + return combinedAssert( + a.hasInt(aggMap, propName, "count", count), + a.hasNumber(aggMap, propName, "maximum", maximum), + a.hasNumber(aggMap, propName, "minimum", minimum), + a.hasNumber(aggMap, propName, "mode", mode), + a.hasNumber(aggMap, propName, "sum", sum), + a.hasNumber(aggMap, propName, "median", median), + a.hasNumber(aggMap, propName, "mean", mean), + a.hasString(aggMap, propName, "type", string(dataType)), + ) + } +} + +func (a *aggregateResponseAssert) numberArray(propName string, + count int64, + maximum, minimum, mode, sum, median, mean float64, +) assertFunc { + return a.typedNumbers(schema.DataTypeNumberArray, propName, count, maximum, minimum, + mode, sum, median, mean) +} + +func (a *aggregateResponseAssert) number(propName string, + count int64, + maximum, minimum, mode, sum, median, mean float64, +) assertFunc { + return a.typedNumbers(schema.DataTypeNumber, propName, count, maximum, minimum, + mode, sum, median, mean) +} + +func (a *aggregateResponseAssert) typedNumbers0(dataType schema.DataType, propName string) assertFunc { + return func(response map[string]interface{}) bool { + if !a.assert.Contains(response, propName) { + return false + } + aggMap := response[propName].(map[string]interface{}) + return combinedAssert( + a.hasInt(aggMap, propName, "count", 0), + a.hasNil(aggMap, propName, "maximum"), + a.hasNil(aggMap, propName, "minimum"), + a.hasNil(aggMap, propName, "mode"), + a.hasNil(aggMap, propName, "sum"), + a.hasNil(aggMap, propName, "median"), + a.hasNil(aggMap, propName, "mean"), + a.hasString(aggMap, propName, "type", string(dataType)), + ) + } +} + +func (a *aggregateResponseAssert) numberArray0(propName string) assertFunc { + return a.typedNumbers0(schema.DataTypeNumberArray, propName) +} + +func (a *aggregateResponseAssert) number0(propName string) assertFunc { + return a.typedNumbers0(schema.DataTypeNumber, propName) +} + +func (a *aggregateResponseAssert) dateArray(propName string, count int64) assertFunc { + return a.date(propName, count) +} + +func (a *aggregateResponseAssert) date(propName string, count int64) assertFunc { + return func(response map[string]interface{}) bool { + if !a.assert.Contains(response, propName) { + return false + } + return a.hasInt(response[propName].(map[string]interface{}), propName, "count", count) + } +} + +func (a *aggregateResponseAssert) dateArray0(propName string) assertFunc { + return a.date(propName, 0) +} + +func (a *aggregateResponseAssert) date0(propName string) assertFunc { + return a.date(propName, 0) +} + +func (a *aggregateResponseAssert) typedStrings(dataType schema.DataType, propName string, + count int64, + values []string, occurrences []int64, +) assertFunc { + return func(response map[string]interface{}) bool { + if !a.assert.Contains(response, propName) { + return false + } + aggMap := response[propName].(map[string]interface{}) + return combinedAssert( + a.hasInt(aggMap, propName, "count", count), + a.hasString(aggMap, propName, "type", string(dataType)), + a.hasOccurrences(aggMap, propName, values, occurrences), + ) + } +} + +func (a *aggregateResponseAssert) typedStrings0(dataType schema.DataType, propName string) assertFunc { + return func(response map[string]interface{}) bool { + if !a.assert.Contains(response, propName) { + return false + } + aggMap := response[propName].(map[string]interface{}) + return combinedAssert( + a.hasInt(aggMap, propName, "count", 0), + a.hasString(aggMap, propName, "type", string(dataType)), + a.hasOccurrences(aggMap, propName, nil, nil), + ) + } +} + +func (a *aggregateResponseAssert) textArray(propName string, + count int64, + values []string, occurrences []int64, +) assertFunc { + return a.typedStrings(schema.DataTypeTextArray, propName, count, values, occurrences) +} + +func (a *aggregateResponseAssert) text(propName string, + count int64, + values []string, occurrences []int64, +) assertFunc { + return a.typedStrings(schema.DataTypeText, propName, count, values, occurrences) +} + +func (a *aggregateResponseAssert) textArray0(propName string) assertFunc { + return a.typedStrings0(schema.DataTypeTextArray, propName) +} + +func (a *aggregateResponseAssert) text0(propName string) assertFunc { + return a.typedStrings0(schema.DataTypeText, propName) +} + +func (a *aggregateResponseAssert) hasOccurrences(parentMap map[string]interface{}, + parentKey string, values []string, occurrences []int64, +) bool { + key := "topOccurrences" + to, exists := parentMap[key] + if !exists { + return a.assert.Fail(fmt.Sprintf("'%s' does not have '%s'\n%#v", parentKey, key, parentMap)) + } + + toArr := to.([]interface{}) + assertResults := make([]bool, len(values)) + for i := range values { + key := fmt.Sprintf("%s.%s[%d]", parentKey, key, i) + toSingle := toArr[i].(map[string]interface{}) + assertResults[i] = combinedAssert( + a.hasString(toSingle, key, "value", values[i]), + a.hasInt(toSingle, key, "occurs", occurrences[i]), + ) + } + return combinedAssert(assertResults...) +} + +func (a *aggregateResponseAssert) hasNumber(parentMap map[string]interface{}, + parentKey, key string, expectedValue float64, +) bool { + v, exist := parentMap[key] + if !exist { + return a.assert.Fail(fmt.Sprintf("'%s' does not have '%s'\n%#v", parentKey, key, parentMap)) + } + if v == nil { + return a.assert.Fail(fmt.Sprintf("'%s.%s' is nil", parentKey, key)) + } + if v, ok := v.(json.Number); ok { + if value, err := v.Float64(); err == nil { + if a.assert.InDelta(expectedValue, value, delta) { + return true + } + return a.assert.Fail(fmt.Sprintf("'%s.%s' of %#v is not equal to %#v", parentKey, key, value, expectedValue)) + } + } + return a.assert.Fail(fmt.Sprintf("'%s.%s' of %#v is not equal to %#v", parentKey, key, v, expectedValue)) +} + +func (a *aggregateResponseAssert) hasInt(parentMap map[string]interface{}, + parentKey, key string, expectedValue int64, +) bool { + v, exist := parentMap[key] + if !exist { + return a.assert.Fail(fmt.Sprintf("'%s' does not have '%s'\n%#v", parentKey, key, parentMap)) + } + if v == nil { + return a.assert.Fail(fmt.Sprintf("'%s.%s' is nil", parentKey, key)) + } + if v, ok := v.(json.Number); ok { + if value, err := v.Int64(); err == nil { + if value == expectedValue { + return true + } + return a.assert.Fail(fmt.Sprintf("'%s.%s' of %#v is not equal to %#v", parentKey, key, value, expectedValue)) + } + } + return a.assert.Fail(fmt.Sprintf("'%s.%s' of %#v is not equal to %#v", parentKey, key, v, expectedValue)) +} + +func (a *aggregateResponseAssert) hasString(parentMap map[string]interface{}, + parentKey, key string, expectedValue string, +) bool { + v, exist := parentMap[key] + if !exist { + return a.assert.Fail(fmt.Sprintf("'%s' does not have '%s'\n%#v", parentKey, key, parentMap)) + } + if v == nil { + return a.assert.Fail(fmt.Sprintf("'%s.%s' is nil", parentKey, key)) + } + if v != expectedValue { + return a.assert.Fail(fmt.Sprintf("'%s.%s' of %#v is not equal to %#v", parentKey, key, v, expectedValue)) + } + return true +} + +func (a *aggregateResponseAssert) hasNil(parentMap map[string]interface{}, + parentKey, key string, +) bool { + v, exist := parentMap[key] + if !exist { + return a.assert.Fail(fmt.Sprintf("'%s' does not have '%s'\n%#v", parentKey, key, parentMap)) + } + if v != nil { + return a.assert.Fail(fmt.Sprintf("'%s.%s' is not nil", parentKey, key)) + } + return true +} + +func (a *aggregateResponseAssert) hasArray(parentMap map[string]interface{}, + parentKey, key string, expectedValue []interface{}, +) bool { + v, exist := parentMap[key] + if !exist { + return a.assert.Fail(fmt.Sprintf("'%s' does not have '%s'\n%#v", parentKey, key, parentMap)) + } + if !a.assert.Equal(expectedValue, v) { + return a.assert.Fail(fmt.Sprintf("'%s.%s' of %#v is not equal to %#v", parentKey, key, v, expectedValue)) + } + return true +} + +func combinedAssert(assertResults ...bool) bool { + for _, assertResult := range assertResults { + if !assertResult { + return false + } + } + return true +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/json_helper.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/json_helper.go new file mode 100644 index 0000000000000000000000000000000000000000..9d30e0ff59e3b18fe97f87fffbb0fe2b98669d94 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/json_helper.go @@ -0,0 +1,26 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "encoding/json" +) + +func parseJSONSlice(text string) []interface{} { + var result []interface{} + err := json.Unmarshal([]byte(text), &result) + if err != nil { + panic(err) + } + + return result +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_aggregate_hybrid_search_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_aggregate_hybrid_search_test.go new file mode 100644 index 0000000000000000000000000000000000000000..5adc6f828fc59621fddfba9a4236a194d69c0f2f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_aggregate_hybrid_search_test.go @@ -0,0 +1,221 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/test/helper" + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" +) + +func aggregationWithHybridSearch(t *testing.T) { + t.Run("without search vector", func(t *testing.T) { + query := ` + { + Aggregate { + Company + ( + objectLimit: 30 + hybrid: { + alpha: 0.5 + query: "Apple" + } + ) + { + name { + topOccurrences { + value + } + } + } + } + }` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query).Get("Aggregate", "Company").AsSlice() + require.Len(t, result, 1) + topOccur := result[0].(map[string]interface{})["name"].(map[string]interface{})["topOccurrences"].([]interface{}) + require.Len(t, topOccur, 5) + assert.Contains(t, topOccur, map[string]interface{}{"value": "Apple"}) + assert.Contains(t, topOccur, map[string]interface{}{"value": "Apple Inc."}) + assert.Contains(t, topOccur, map[string]interface{}{"value": "Apple Incorporated"}) + assert.Contains(t, topOccur, map[string]interface{}{"value": "Google"}) + assert.Contains(t, topOccur, map[string]interface{}{"value": "Google Inc."}) + }) + + t.Run("with grouping, sparse search only", func(t *testing.T) { + query := ` + { + Aggregate { + Company + ( + objectLimit: 30 + groupBy: "name" + hybrid: { + alpha: 0 + query: "Google" + } + ) + { + name { + topOccurrences { + value + } + } + } + } + }` + + type object = map[string]interface{} + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query).Get("Aggregate", "Company").AsSlice() + require.Len(t, result, 3) + assert.Contains(t, result, object{ + "name": object{ + "topOccurrences": []interface{}{ + object{"value": "Google"}, + }, + }, + }) + assert.Contains(t, result, object{ + "name": object{ + "topOccurrences": []interface{}{ + object{"value": "Google Inc."}, + }, + }, + }) + assert.Contains(t, result, object{ + "name": object{ + "topOccurrences": []interface{}{ + object{"value": "Google Incorporated"}, + }, + }, + }) + }) + + t.Run("with grouping, nearText", func(t *testing.T) { + query := ` + { + Aggregate { + Company + ( + objectLimit: 30 + groupBy: "name" + hybrid: { + alpha: 0.5 + query: "" + searches: { + nearText: { + concepts: ["Google"] + } + } + } + ) + { + name { + topOccurrences { + value + } + } + } + } + }` + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query).Get("Aggregate", "Company").AsSlice() + require.Len(t, result, 9) + }) + + t.Run("with grouping, and nearText, moveTo", func(t *testing.T) { + query := ` + { + Aggregate { + Company + ( + objectLimit: 30 + groupBy: "name" + hybrid: { + alpha: 0.5 + query: "" + searches: { + nearText: { + concepts: ["Google"] + certainty: 0.4, + moveTo: { + concepts:["positive"], + force: 0.5 + }, + moveAwayFrom: { + concepts:["epic"], + force: 0.25 + } + } + } + } + ) + { + name { + topOccurrences { + value + } + } + } + } + }` + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query).Get("Aggregate", "Company").AsSlice() + require.Len(t, result, 9) + }) + + t.Run("with grouping, nearVector and nearText conflict", func(t *testing.T) { + query := ` + { + Aggregate { + Company + ( + objectLimit: 30 + groupBy: "name" + hybrid: { + alpha: 0.5 + query: "" + searches: { + nearText: { + concepts: ["Google"] + certainty: 0.4, + moveTo: { + concepts:["positive"], + force: 0.5 + }, + moveAwayFrom: { + concepts:["epic"], + force: 0.25 + } + } + nearVector: { + vector: [0.1, 0.2, 0.3] + } + } + } + ) + { + name { + topOccurrences { + value + } + } + } + } + }` + + graphqlhelper.ErrorGraphQL(t, helper.RootAuth, query) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_aggregate_matrix_groupby_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_aggregate_matrix_groupby_test.go new file mode 100644 index 0000000000000000000000000000000000000000..55d1f3d6abfa35782238c6efee0c907d3407c6a5 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_aggregate_matrix_groupby_test.go @@ -0,0 +1,1990 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/test/helper" + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" +) + +func aggregateArrayClassWithGroupByTest(t *testing.T) { + asserts := newAggregateResponseAssert(t) + testCasesGen := &aggregateArrayClassTestCases{} + + t.Run("aggregate ArrayClass with group by texts", func(t *testing.T) { + expectedAllResultsAssertions := map[string][]assertFunc{ + "Alpha": { + asserts.groupedBy("Alpha", "texts"), + asserts.meta(4), + asserts.booleanArray("booleans", 10, 4, 6, 0.4, 0.6), + asserts.textArray("texts", 10, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{4, 3, 2, 1}), + asserts.numberArray("numbers", 10, 4, 1, 1, 20, 2, 2), + asserts.intArray("ints", 10, 104, 101, 101, 1020, 102, 102), + asserts.dateArray("dates", 10), + }, + "Bravo": { + asserts.groupedBy("Bravo", "texts"), + asserts.meta(3), + asserts.booleanArray("booleans", 9, 3, 6, 0.3333333333333333, 0.6666666666666666), + asserts.textArray("texts", 9, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{3, 3, 2, 1}), + asserts.numberArray("numbers", 9, 4, 1, 1, 19, 2, 2.111111111111111), + asserts.intArray("ints", 9, 104, 101, 101, 919, 102, 102.11111111111111), + asserts.dateArray("dates", 9), + }, + "Charlie": { + asserts.groupedBy("Charlie", "texts"), + asserts.meta(2), + asserts.booleanArray("booleans", 7, 2, 5, 0.2857142857142857, 0.7142857142857143), + asserts.textArray("texts", 7, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{2, 2, 2, 1}), + asserts.numberArray("numbers", 7, 4, 1, 1, 16, 2, 2.2857142857142856), + asserts.intArray("ints", 7, 104, 101, 101, 716, 102, 102.28571428571429), + asserts.dateArray("dates", 7), + }, + "Delta": { + asserts.groupedBy("Delta", "texts"), + asserts.meta(1), + asserts.booleanArray("booleans", 4, 1, 3, 0.25, 0.75), + asserts.textArray("texts", 4, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{1, 1, 1, 1}), + asserts.numberArray("numbers", 4, 4, 1, 1, 10, 2.5, 2.5), + asserts.intArray("ints", 4, 104, 101, 101, 410, 102.5, 102.5), + asserts.dateArray("dates", 4), + }, + } + expectedResultsWithDataAssertions := map[string][]assertFunc{ + "Alpha": { + asserts.groupedBy("Alpha", "texts"), + asserts.meta(2), + asserts.booleanArray("booleans", 7, 2, 5, 0.2857142857142857, 0.7142857142857143), + asserts.textArray("texts", 7, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{2, 2, 2, 1}), + asserts.numberArray("numbers", 7, 4, 1, 1, 16, 2, 2.2857142857142856), + asserts.intArray("ints", 7, 104, 101, 101, 716, 102, 102.28571428571429), + asserts.dateArray("dates", 7), + }, + "Bravo": { + asserts.groupedBy("Bravo", "texts"), + asserts.meta(2), + asserts.booleanArray("booleans", 7, 2, 5, 0.2857142857142857, 0.7142857142857143), + asserts.textArray("texts", 7, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{2, 2, 2, 1}), + asserts.numberArray("numbers", 7, 4, 1, 1, 16, 2, 2.2857142857142856), + asserts.intArray("ints", 7, 104, 101, 101, 716, 102, 102.28571428571429), + asserts.dateArray("dates", 7), + }, + "Charlie": { + asserts.groupedBy("Charlie", "texts"), + asserts.meta(2), + asserts.booleanArray("booleans", 7, 2, 5, 0.2857142857142857, 0.7142857142857143), + asserts.textArray("texts", 7, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{2, 2, 2, 1}), + asserts.numberArray("numbers", 7, 4, 1, 1, 16, 2, 2.2857142857142856), + asserts.intArray("ints", 7, 104, 101, 101, 716, 102, 102.28571428571429), + asserts.dateArray("dates", 7), + }, + "Delta": { + asserts.groupedBy("Delta", "texts"), + asserts.meta(1), + asserts.booleanArray("booleans", 4, 1, 3, 0.25, 0.75), + asserts.textArray("texts", 4, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{1, 1, 1, 1}), + asserts.numberArray("numbers", 4, 4, 1, 1, 10, 2.5, 2.5), + asserts.intArray("ints", 4, 104, 101, 101, 410, 102.5, 102.5), + asserts.dateArray("dates", 4), + }, + } + expectedResultsWithoutDataAssertions := map[string][]assertFunc{} + expectedNoResultsAssertions := map[string][]assertFunc{} + + testCases := []aggregateTestCase{ + testCasesGen.WithoutFilters(expectedAllResultsAssertions), + + testCasesGen.WithWhereFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereFilter_NoResults(expectedNoResultsAssertions), + + testCasesGen.WithNearObjectFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + + testCasesGen.WithWhereAndNearObjectFilters_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_NoResults(expectedNoResultsAssertions), + } + + for _, tc := range testCases { + query := aggregateArrayClassQuery(tc.filters, "groupBy: [\"texts\"]") + + t.Run(tc.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + extracted := extractArrayClassGroupByResult(result) + + assert.Len(t, extracted, len(tc.groupedAssertions)) + for groupedBy, groupAssertions := range tc.groupedAssertions { + group := findGroup(groupedBy, extracted) + require.NotNil(t, group, fmt.Sprintf("Group '%s' not found", groupedBy)) + + for _, assertion := range groupAssertions { + assertion(group) + } + } + }) + } + }) + + t.Run("aggregate ArrayClass with group by ints", func(t *testing.T) { + expectedAllResultsAssertions := map[string][]assertFunc{ + "101": { + asserts.groupedBy("101", "ints"), + asserts.meta(4), + asserts.booleanArray("booleans", 10, 4, 6, 0.4, 0.6), + asserts.textArray("texts", 10, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{4, 3, 2, 1}), + asserts.numberArray("numbers", 10, 4, 1, 1, 20, 2, 2), + asserts.intArray("ints", 10, 104, 101, 101, 1020, 102, 102), + asserts.dateArray("dates", 10), + }, + "102": { + asserts.groupedBy("102", "ints"), + asserts.meta(3), + asserts.booleanArray("booleans", 9, 3, 6, 0.3333333333333333, 0.6666666666666666), + asserts.textArray("texts", 9, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{3, 3, 2, 1}), + asserts.numberArray("numbers", 9, 4, 1, 1, 19, 2, 2.111111111111111), + asserts.intArray("ints", 9, 104, 101, 101, 919, 102, 102.11111111111111), + asserts.dateArray("dates", 9), + }, + "103": { + asserts.groupedBy("103", "ints"), + asserts.meta(2), + asserts.booleanArray("booleans", 7, 2, 5, 0.2857142857142857, 0.7142857142857143), + asserts.textArray("texts", 7, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{2, 2, 2, 1}), + asserts.numberArray("numbers", 7, 4, 1, 1, 16, 2, 2.2857142857142856), + asserts.intArray("ints", 7, 104, 101, 101, 716, 102, 102.28571428571429), + asserts.dateArray("dates", 7), + }, + "104": { + asserts.groupedBy("104", "ints"), + asserts.meta(1), + asserts.booleanArray("booleans", 4, 1, 3, 0.25, 0.75), + asserts.textArray("texts", 4, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{1, 1, 1, 1}), + asserts.numberArray("numbers", 4, 4, 1, 1, 10, 2.5, 2.5), + asserts.intArray("ints", 4, 104, 101, 101, 410, 102.5, 102.5), + asserts.dateArray("dates", 4), + }, + } + expectedResultsWithDataAssertions := map[string][]assertFunc{ + "101": { + asserts.groupedBy("101", "ints"), + asserts.meta(2), + asserts.booleanArray("booleans", 7, 2, 5, 0.2857142857142857, 0.7142857142857143), + asserts.textArray("texts", 7, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{2, 2, 2, 1}), + asserts.numberArray("numbers", 7, 4, 1, 1, 16, 2, 2.2857142857142856), + asserts.intArray("ints", 7, 104, 101, 101, 716, 102, 102.28571428571429), + asserts.dateArray("dates", 7), + }, + "102": { + asserts.groupedBy("102", "ints"), + asserts.meta(2), + asserts.booleanArray("booleans", 7, 2, 5, 0.2857142857142857, 0.7142857142857143), + asserts.textArray("texts", 7, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{2, 2, 2, 1}), + asserts.numberArray("numbers", 7, 4, 1, 1, 16, 2, 2.2857142857142856), + asserts.intArray("ints", 7, 104, 101, 101, 716, 102, 102.28571428571429), + asserts.dateArray("dates", 7), + }, + "103": { + asserts.groupedBy("103", "ints"), + asserts.meta(2), + asserts.booleanArray("booleans", 7, 2, 5, 0.2857142857142857, 0.7142857142857143), + asserts.textArray("texts", 7, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{2, 2, 2, 1}), + asserts.numberArray("numbers", 7, 4, 1, 1, 16, 2, 2.2857142857142856), + asserts.intArray("ints", 7, 104, 101, 101, 716, 102, 102.28571428571429), + asserts.dateArray("dates", 7), + }, + "104": { + asserts.groupedBy("104", "ints"), + asserts.meta(1), + asserts.booleanArray("booleans", 4, 1, 3, 0.25, 0.75), + asserts.textArray("texts", 4, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{1, 1, 1, 1}), + asserts.numberArray("numbers", 4, 4, 1, 1, 10, 2.5, 2.5), + asserts.intArray("ints", 4, 104, 101, 101, 410, 102.5, 102.5), + asserts.dateArray("dates", 4), + }, + } + expectedResultsWithoutDataAssertions := map[string][]assertFunc{} + expectedNoResultsAssertions := map[string][]assertFunc{} + + testCases := []aggregateTestCase{ + testCasesGen.WithoutFilters(expectedAllResultsAssertions), + + testCasesGen.WithWhereFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereFilter_NoResults(expectedNoResultsAssertions), + + testCasesGen.WithNearObjectFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + + testCasesGen.WithWhereAndNearObjectFilters_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_NoResults(expectedNoResultsAssertions), + } + + for _, tc := range testCases { + query := aggregateArrayClassQuery(tc.filters, "groupBy: [\"ints\"]") + + t.Run(tc.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + extracted := extractArrayClassGroupByResult(result) + + assert.Len(t, extracted, len(tc.groupedAssertions)) + for groupedBy, groupAssertions := range tc.groupedAssertions { + group := findGroup(groupedBy, extracted) + require.NotNil(t, group, fmt.Sprintf("Group '%s' not found", groupedBy)) + + for _, assertion := range groupAssertions { + assertion(group) + } + } + }) + } + }) + + t.Run("aggregate ArrayClass with group by numbers", func(t *testing.T) { + expectedAllResultsAssertions := map[string][]assertFunc{ + "1": { + asserts.groupedBy("1", "numbers"), + asserts.meta(4), + asserts.booleanArray("booleans", 10, 4, 6, 0.4, 0.6), + asserts.textArray("texts", 10, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{4, 3, 2, 1}), + asserts.numberArray("numbers", 10, 4, 1, 1, 20, 2, 2), + asserts.intArray("ints", 10, 104, 101, 101, 1020, 102, 102), + asserts.dateArray("dates", 10), + }, + "2": { + asserts.groupedBy("2", "numbers"), + asserts.meta(3), + asserts.booleanArray("booleans", 9, 3, 6, 0.3333333333333333, 0.6666666666666666), + asserts.textArray("texts", 9, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{3, 3, 2, 1}), + asserts.numberArray("numbers", 9, 4, 1, 1, 19, 2, 2.111111111111111), + asserts.intArray("ints", 9, 104, 101, 101, 919, 102, 102.11111111111111), + asserts.dateArray("dates", 9), + }, + "3": { + asserts.groupedBy("3", "numbers"), + asserts.meta(2), + asserts.booleanArray("booleans", 7, 2, 5, 0.2857142857142857, 0.7142857142857143), + asserts.textArray("texts", 7, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{2, 2, 2, 1}), + asserts.numberArray("numbers", 7, 4, 1, 1, 16, 2, 2.2857142857142856), + asserts.intArray("ints", 7, 104, 101, 101, 716, 102, 102.28571428571429), + asserts.dateArray("dates", 7), + }, + "4": { + asserts.groupedBy("4", "numbers"), + asserts.meta(1), + asserts.booleanArray("booleans", 4, 1, 3, 0.25, 0.75), + asserts.textArray("texts", 4, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{1, 1, 1, 1}), + asserts.numberArray("numbers", 4, 4, 1, 1, 10, 2.5, 2.5), + asserts.intArray("ints", 4, 104, 101, 101, 410, 102.5, 102.5), + asserts.dateArray("dates", 4), + }, + } + expectedResultsWithDataAssertions := map[string][]assertFunc{ + "1": { + asserts.groupedBy("1", "numbers"), + asserts.meta(2), + asserts.booleanArray("booleans", 7, 2, 5, 0.2857142857142857, 0.7142857142857143), + asserts.textArray("texts", 7, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{2, 2, 2, 1}), + asserts.numberArray("numbers", 7, 4, 1, 1, 16, 2, 2.2857142857142856), + asserts.intArray("ints", 7, 104, 101, 101, 716, 102, 102.28571428571429), + asserts.dateArray("dates", 7), + }, + "2": { + asserts.groupedBy("2", "numbers"), + asserts.meta(2), + asserts.booleanArray("booleans", 7, 2, 5, 0.2857142857142857, 0.7142857142857143), + asserts.textArray("texts", 7, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{2, 2, 2, 1}), + asserts.numberArray("numbers", 7, 4, 1, 1, 16, 2, 2.2857142857142856), + asserts.intArray("ints", 7, 104, 101, 101, 716, 102, 102.28571428571429), + asserts.dateArray("dates", 7), + }, + "3": { + asserts.groupedBy("3", "numbers"), + asserts.meta(2), + asserts.booleanArray("booleans", 7, 2, 5, 0.2857142857142857, 0.7142857142857143), + asserts.textArray("texts", 7, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{2, 2, 2, 1}), + asserts.numberArray("numbers", 7, 4, 1, 1, 16, 2, 2.2857142857142856), + asserts.intArray("ints", 7, 104, 101, 101, 716, 102, 102.28571428571429), + asserts.dateArray("dates", 7), + }, + "4": { + asserts.groupedBy("4", "numbers"), + asserts.meta(1), + asserts.booleanArray("booleans", 4, 1, 3, 0.25, 0.75), + asserts.textArray("texts", 4, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{1, 1, 1, 1}), + asserts.numberArray("numbers", 4, 4, 1, 1, 10, 2.5, 2.5), + asserts.intArray("ints", 4, 104, 101, 101, 410, 102.5, 102.5), + asserts.dateArray("dates", 4), + }, + } + expectedResultsWithoutDataAssertions := map[string][]assertFunc{} + expectedNoResultsAssertions := map[string][]assertFunc{} + + testCases := []aggregateTestCase{ + testCasesGen.WithoutFilters(expectedAllResultsAssertions), + + testCasesGen.WithWhereFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereFilter_NoResults(expectedNoResultsAssertions), + + testCasesGen.WithNearObjectFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + + testCasesGen.WithWhereAndNearObjectFilters_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_NoResults(expectedNoResultsAssertions), + } + + for _, tc := range testCases { + query := aggregateArrayClassQuery(tc.filters, "groupBy: [\"numbers\"]") + + t.Run(tc.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + extracted := extractArrayClassGroupByResult(result) + + assert.Len(t, extracted, len(tc.groupedAssertions)) + for groupedBy, groupAssertions := range tc.groupedAssertions { + group := findGroup(groupedBy, extracted) + require.NotNil(t, group, fmt.Sprintf("Group '%s' not found", groupedBy)) + + for _, assertion := range groupAssertions { + assertion(group) + } + } + }) + } + }) + + t.Run("aggregate ArrayClass with group by dates", func(t *testing.T) { + expectedAllResultsAssertions := map[string][]assertFunc{ + "2021-06-01T22:18:59.640162Z": { + asserts.groupedBy("2021-06-01T22:18:59.640162Z", "dates"), + asserts.meta(4), + asserts.booleanArray("booleans", 10, 4, 6, 0.4, 0.6), + asserts.textArray("texts", 10, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{4, 3, 2, 1}), + asserts.numberArray("numbers", 10, 4, 1, 1, 20, 2, 2), + asserts.intArray("ints", 10, 104, 101, 101, 1020, 102, 102), + asserts.dateArray("dates", 10), + }, + "2022-06-02T22:18:59.640162Z": { + asserts.groupedBy("2022-06-02T22:18:59.640162Z", "dates"), + asserts.meta(3), + asserts.booleanArray("booleans", 9, 3, 6, 0.3333333333333333, 0.6666666666666666), + asserts.textArray("texts", 9, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{3, 3, 2, 1}), + asserts.numberArray("numbers", 9, 4, 1, 1, 19, 2, 2.111111111111111), + asserts.intArray("ints", 9, 104, 101, 101, 919, 102, 102.11111111111111), + asserts.dateArray("dates", 9), + }, + "2023-06-03T22:18:59.640162Z": { + asserts.groupedBy("2023-06-03T22:18:59.640162Z", "dates"), + asserts.meta(2), + asserts.booleanArray("booleans", 7, 2, 5, 0.2857142857142857, 0.7142857142857143), + asserts.textArray("texts", 7, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{2, 2, 2, 1}), + asserts.numberArray("numbers", 7, 4, 1, 1, 16, 2, 2.2857142857142856), + asserts.intArray("ints", 7, 104, 101, 101, 716, 102, 102.28571428571429), + asserts.dateArray("dates", 7), + }, + "2024-06-04T22:18:59.640162Z": { + asserts.groupedBy("2024-06-04T22:18:59.640162Z", "dates"), + asserts.meta(1), + asserts.booleanArray("booleans", 4, 1, 3, 0.25, 0.75), + asserts.textArray("texts", 4, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{1, 1, 1, 1}), + asserts.numberArray("numbers", 4, 4, 1, 1, 10, 2.5, 2.5), + asserts.intArray("ints", 4, 104, 101, 101, 410, 102.5, 102.5), + asserts.dateArray("dates", 4), + }, + } + expectedResultsWithDataAssertions := map[string][]assertFunc{ + "2021-06-01T22:18:59.640162Z": { + asserts.groupedBy("2021-06-01T22:18:59.640162Z", "dates"), + asserts.meta(2), + asserts.booleanArray("booleans", 7, 2, 5, 0.2857142857142857, 0.7142857142857143), + asserts.textArray("texts", 7, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{2, 2, 2, 1}), + asserts.numberArray("numbers", 7, 4, 1, 1, 16, 2, 2.2857142857142856), + asserts.intArray("ints", 7, 104, 101, 101, 716, 102, 102.28571428571429), + asserts.dateArray("dates", 7), + }, + "2022-06-02T22:18:59.640162Z": { + asserts.groupedBy("2022-06-02T22:18:59.640162Z", "dates"), + asserts.meta(2), + asserts.booleanArray("booleans", 7, 2, 5, 0.2857142857142857, 0.7142857142857143), + asserts.textArray("texts", 7, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{2, 2, 2, 1}), + asserts.numberArray("numbers", 7, 4, 1, 1, 16, 2, 2.2857142857142856), + asserts.intArray("ints", 7, 104, 101, 101, 716, 102, 102.28571428571429), + asserts.dateArray("dates", 7), + }, + "2023-06-03T22:18:59.640162Z": { + asserts.groupedBy("2023-06-03T22:18:59.640162Z", "dates"), + asserts.meta(2), + asserts.booleanArray("booleans", 7, 2, 5, 0.2857142857142857, 0.7142857142857143), + asserts.textArray("texts", 7, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{2, 2, 2, 1}), + asserts.numberArray("numbers", 7, 4, 1, 1, 16, 2, 2.2857142857142856), + asserts.intArray("ints", 7, 104, 101, 101, 716, 102, 102.28571428571429), + asserts.dateArray("dates", 7), + }, + "2024-06-04T22:18:59.640162Z": { + asserts.groupedBy("2024-06-04T22:18:59.640162Z", "dates"), + asserts.meta(1), + asserts.booleanArray("booleans", 4, 1, 3, 0.25, 0.75), + asserts.textArray("texts", 4, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{1, 1, 1, 1}), + asserts.numberArray("numbers", 4, 4, 1, 1, 10, 2.5, 2.5), + asserts.intArray("ints", 4, 104, 101, 101, 410, 102.5, 102.5), + asserts.dateArray("dates", 4), + }, + } + expectedResultsWithoutDataAssertions := map[string][]assertFunc{} + expectedNoResultsAssertions := map[string][]assertFunc{} + + testCases := []aggregateTestCase{ + testCasesGen.WithoutFilters(expectedAllResultsAssertions), + + testCasesGen.WithWhereFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereFilter_NoResults(expectedNoResultsAssertions), + + testCasesGen.WithNearObjectFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + + testCasesGen.WithWhereAndNearObjectFilters_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_NoResults(expectedNoResultsAssertions), + } + + for _, tc := range testCases { + query := aggregateArrayClassQuery(tc.filters, "groupBy: [\"dates\"]") + + t.Run(tc.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + extracted := extractArrayClassGroupByResult(result) + + assert.Len(t, extracted, len(tc.groupedAssertions)) + for groupedBy, groupAssertions := range tc.groupedAssertions { + group := findGroup(groupedBy, extracted) + require.NotNil(t, group, fmt.Sprintf("Group '%s' not found", groupedBy)) + + for _, assertion := range groupAssertions { + assertion(group) + } + } + }) + } + }) + + t.Run("aggregate ArrayClass with group by booleans", func(t *testing.T) { + expectedAllResultsAssertions := map[string][]assertFunc{ + "true": { + asserts.groupedBy("true", "booleans"), + asserts.meta(3), + asserts.booleanArray("booleans", 9, 3, 6, 0.3333333333333333, 0.6666666666666666), + asserts.textArray("texts", 9, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{3, 3, 2, 1}), + asserts.numberArray("numbers", 9, 4, 1, 1, 19, 2, 2.111111111111111), + asserts.intArray("ints", 9, 104, 101, 101, 919, 102, 102.11111111111111), + asserts.dateArray("dates", 9), + }, + "false": { + asserts.groupedBy("false", "booleans"), + asserts.meta(4), + asserts.booleanArray("booleans", 10, 4, 6, 0.4, 0.6), + asserts.textArray("texts", 10, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{4, 3, 2, 1}), + asserts.numberArray("numbers", 10, 4, 1, 1, 20, 2, 2), + asserts.intArray("ints", 10, 104, 101, 101, 1020, 102, 102), + asserts.dateArray("dates", 10), + }, + } + expectedResultsWithDataAssertions := map[string][]assertFunc{ + "true": { + asserts.groupedBy("true", "booleans"), + asserts.meta(2), + asserts.booleanArray("booleans", 7, 2, 5, 0.2857142857142857, 0.7142857142857143), + asserts.textArray("texts", 7, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{2, 2, 2, 1}), + asserts.numberArray("numbers", 7, 4, 1, 1, 16, 2, 2.2857142857142856), + asserts.intArray("ints", 7, 104, 101, 101, 716, 102, 102.28571428571429), + asserts.dateArray("dates", 7), + }, + "false": { + asserts.groupedBy("false", "booleans"), + asserts.meta(2), + asserts.booleanArray("booleans", 7, 2, 5, 0.2857142857142857, 0.7142857142857143), + asserts.textArray("texts", 7, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{2, 2, 2, 1}), + asserts.numberArray("numbers", 7, 4, 1, 1, 16, 2, 2.2857142857142856), + asserts.intArray("ints", 7, 104, 101, 101, 716, 102, 102.28571428571429), + asserts.dateArray("dates", 7), + }, + } + expectedResultsWithoutDataAssertions := map[string][]assertFunc{} + expectedNoResultsAssertions := map[string][]assertFunc{} + + testCases := []aggregateTestCase{ + testCasesGen.WithoutFilters(expectedAllResultsAssertions), + + testCasesGen.WithWhereFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereFilter_NoResults(expectedNoResultsAssertions), + + testCasesGen.WithNearObjectFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + + testCasesGen.WithWhereAndNearObjectFilters_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_NoResults(expectedNoResultsAssertions), + } + + for _, tc := range testCases { + query := aggregateArrayClassQuery(tc.filters, "groupBy: [\"booleans\"]") + + t.Run(tc.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + extracted := extractArrayClassGroupByResult(result) + + assert.Len(t, extracted, len(tc.groupedAssertions)) + for groupedBy, groupAssertions := range tc.groupedAssertions { + group := findGroup(groupedBy, extracted) + require.NotNil(t, group, fmt.Sprintf("Group '%s' not found", groupedBy)) + + for _, assertion := range groupAssertions { + assertion(group) + } + } + }) + } + }) +} + +func aggregateDuplicatesClassWithGroupByTest(t *testing.T) { + asserts := newAggregateResponseAssert(t) + testCasesGen := &aggregateDuplicatesClassTestCases{} + + t.Run("aggregate DuplicatesClass with group by texts", func(t *testing.T) { + expectedAllResultsAssertions := map[string][]assertFunc{ + "Alpha": { + asserts.groupedBy("Alpha", "texts"), + asserts.meta(3), + asserts.booleanArray("booleans", 9, 3, 6, 0.3333333333333333, 0.6666666666666666), + asserts.textArray("texts", 9, []string{"Alpha", "Bravo"}, []int64{6, 3}), + asserts.numberArray("numbers", 9, 2, 1, 1, 12, 1, 1.3333333333333333), + asserts.intArray("ints", 9, 102, 101, 101, 912, 101, 101.33333333333333), + asserts.dateArray("dates", 9), + }, + "Bravo": { + asserts.groupedBy("Bravo", "texts"), + asserts.meta(3), + asserts.booleanArray("booleans", 9, 3, 6, 0.3333333333333333, 0.6666666666666666), + asserts.textArray("texts", 9, []string{"Alpha", "Bravo"}, []int64{6, 3}), + asserts.numberArray("numbers", 9, 2, 1, 1, 12, 1, 1.3333333333333333), + asserts.intArray("ints", 9, 102, 101, 101, 912, 101, 101.33333333333333), + asserts.dateArray("dates", 9), + }, + } + expectedSomeResultsAssertions := map[string][]assertFunc{ + "Alpha": { + asserts.groupedBy("Alpha", "texts"), + asserts.meta(1), + asserts.booleanArray("booleans", 4, 1, 3, 0.25, 0.75), + asserts.textArray("texts", 4, []string{"Alpha", "Bravo"}, []int64{3, 1}), + asserts.numberArray("numbers", 4, 2, 1, 1, 5, 1, 1.25), + asserts.intArray("ints", 4, 102, 101, 101, 405, 101, 101.25), + asserts.dateArray("dates", 4), + }, + "Bravo": { + asserts.groupedBy("Bravo", "texts"), + asserts.meta(1), + asserts.booleanArray("booleans", 4, 1, 3, 0.25, 0.75), + asserts.textArray("texts", 4, []string{"Alpha", "Bravo"}, []int64{3, 1}), + asserts.numberArray("numbers", 4, 2, 1, 1, 5, 1, 1.25), + asserts.intArray("ints", 4, 102, 101, 101, 405, 101, 101.25), + asserts.dateArray("dates", 4), + }, + } + expectedNoResultsAssertsions := map[string][]assertFunc{} + + testCases := []aggregateTestCase{ + testCasesGen.WithoutFilters(expectedAllResultsAssertions), + + testCasesGen.WithWhereFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereFilter_SomeResults(expectedSomeResultsAssertions), + testCasesGen.WithWhereFilter_NoResults(expectedNoResultsAssertsions), + } + + for _, tc := range testCases { + query := aggregateDuplicatesClassQuery(tc.filters, "groupBy: [\"texts\"]") + + t.Run(tc.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + extracted := extractDuplicatesClassGroupByResult(result) + + assert.Len(t, extracted, len(tc.groupedAssertions)) + for groupedBy, groupAssertions := range tc.groupedAssertions { + group := findGroup(groupedBy, extracted) + require.NotNil(t, group, fmt.Sprintf("Group '%s' not found", groupedBy)) + + for _, assertion := range groupAssertions { + assertion(group) + } + } + }) + } + }) + + t.Run("aggregate DuplicatesClass with group by ints", func(t *testing.T) { + expectedAllResultsAssertions := map[string][]assertFunc{ + "101": { + asserts.groupedBy("101", "ints"), + asserts.meta(3), + asserts.booleanArray("booleans", 9, 3, 6, 0.3333333333333333, 0.6666666666666666), + asserts.textArray("texts", 9, []string{"Alpha", "Bravo"}, []int64{6, 3}), + asserts.numberArray("numbers", 9, 2, 1, 1, 12, 1, 1.3333333333333333), + asserts.intArray("ints", 9, 102, 101, 101, 912, 101, 101.33333333333333), + asserts.dateArray("dates", 9), + }, + "102": { + asserts.groupedBy("102", "ints"), + asserts.meta(3), + asserts.booleanArray("booleans", 9, 3, 6, 0.3333333333333333, 0.6666666666666666), + asserts.textArray("texts", 9, []string{"Alpha", "Bravo"}, []int64{6, 3}), + asserts.numberArray("numbers", 9, 2, 1, 1, 12, 1, 1.3333333333333333), + asserts.intArray("ints", 9, 102, 101, 101, 912, 101, 101.33333333333333), + asserts.dateArray("dates", 9), + }, + } + expectedSomeResultsAssertions := map[string][]assertFunc{ + "101": { + asserts.groupedBy("101", "ints"), + asserts.meta(1), + asserts.booleanArray("booleans", 4, 1, 3, 0.25, 0.75), + asserts.textArray("texts", 4, []string{"Alpha", "Bravo"}, []int64{3, 1}), + asserts.numberArray("numbers", 4, 2, 1, 1, 5, 1, 1.25), + asserts.intArray("ints", 4, 102, 101, 101, 405, 101, 101.25), + asserts.dateArray("dates", 4), + }, + "102": { + asserts.groupedBy("102", "ints"), + asserts.meta(1), + asserts.booleanArray("booleans", 4, 1, 3, 0.25, 0.75), + asserts.textArray("texts", 4, []string{"Alpha", "Bravo"}, []int64{3, 1}), + asserts.numberArray("numbers", 4, 2, 1, 1, 5, 1, 1.25), + asserts.intArray("ints", 4, 102, 101, 101, 405, 101, 101.25), + asserts.dateArray("dates", 4), + }, + } + expectedNoResultsAssertsions := map[string][]assertFunc{} + + testCases := []aggregateTestCase{ + testCasesGen.WithoutFilters(expectedAllResultsAssertions), + + testCasesGen.WithWhereFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereFilter_SomeResults(expectedSomeResultsAssertions), + testCasesGen.WithWhereFilter_NoResults(expectedNoResultsAssertsions), + } + + for _, tc := range testCases { + query := aggregateDuplicatesClassQuery(tc.filters, "groupBy: [\"ints\"]") + + t.Run(tc.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + extracted := extractDuplicatesClassGroupByResult(result) + + assert.Len(t, extracted, len(tc.groupedAssertions)) + for groupedBy, groupAssertions := range tc.groupedAssertions { + group := findGroup(groupedBy, extracted) + require.NotNil(t, group, fmt.Sprintf("Group '%s' not found", groupedBy)) + + for _, assertion := range groupAssertions { + assertion(group) + } + } + }) + } + }) + + t.Run("aggregate DuplicatesClass with group by numbers", func(t *testing.T) { + expectedAllResultsAssertions := map[string][]assertFunc{ + "1": { + asserts.groupedBy("1", "numbers"), + asserts.meta(3), + asserts.booleanArray("booleans", 9, 3, 6, 0.3333333333333333, 0.6666666666666666), + asserts.textArray("texts", 9, []string{"Alpha", "Bravo"}, []int64{6, 3}), + asserts.numberArray("numbers", 9, 2, 1, 1, 12, 1, 1.3333333333333333), + asserts.intArray("ints", 9, 102, 101, 101, 912, 101, 101.33333333333333), + asserts.dateArray("dates", 9), + }, + "2": { + asserts.groupedBy("2", "numbers"), + asserts.meta(3), + asserts.booleanArray("booleans", 9, 3, 6, 0.3333333333333333, 0.6666666666666666), + asserts.textArray("texts", 9, []string{"Alpha", "Bravo"}, []int64{6, 3}), + asserts.numberArray("numbers", 9, 2, 1, 1, 12, 1, 1.3333333333333333), + asserts.intArray("ints", 9, 102, 101, 101, 912, 101, 101.33333333333333), + asserts.dateArray("dates", 9), + }, + } + expectedSomeResultsAssertions := map[string][]assertFunc{ + "1": { + asserts.groupedBy("1", "numbers"), + asserts.meta(1), + asserts.booleanArray("booleans", 4, 1, 3, 0.25, 0.75), + asserts.textArray("texts", 4, []string{"Alpha", "Bravo"}, []int64{3, 1}), + asserts.numberArray("numbers", 4, 2, 1, 1, 5, 1, 1.25), + asserts.intArray("ints", 4, 102, 101, 101, 405, 101, 101.25), + asserts.dateArray("dates", 4), + }, + "2": { + asserts.groupedBy("2", "numbers"), + asserts.meta(1), + asserts.booleanArray("booleans", 4, 1, 3, 0.25, 0.75), + asserts.textArray("texts", 4, []string{"Alpha", "Bravo"}, []int64{3, 1}), + asserts.numberArray("numbers", 4, 2, 1, 1, 5, 1, 1.25), + asserts.intArray("ints", 4, 102, 101, 101, 405, 101, 101.25), + asserts.dateArray("dates", 4), + }, + } + expectedNoResultsAssertsions := map[string][]assertFunc{} + + testCases := []aggregateTestCase{ + testCasesGen.WithoutFilters(expectedAllResultsAssertions), + + testCasesGen.WithWhereFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereFilter_SomeResults(expectedSomeResultsAssertions), + testCasesGen.WithWhereFilter_NoResults(expectedNoResultsAssertsions), + } + + for _, tc := range testCases { + query := aggregateDuplicatesClassQuery(tc.filters, "groupBy: [\"numbers\"]") + + t.Run(tc.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + extracted := extractDuplicatesClassGroupByResult(result) + + assert.Len(t, extracted, len(tc.groupedAssertions)) + for groupedBy, groupAssertions := range tc.groupedAssertions { + group := findGroup(groupedBy, extracted) + require.NotNil(t, group, fmt.Sprintf("Group '%s' not found", groupedBy)) + + for _, assertion := range groupAssertions { + assertion(group) + } + } + }) + } + }) + + t.Run("aggregate DuplicatesClass with group by dates as string", func(t *testing.T) { + expectedAllResultsAssertions := map[string][]assertFunc{ + "2021-06-01T22:18:59.640162Z": { + asserts.groupedBy("2021-06-01T22:18:59.640162Z", "dates"), + asserts.meta(3), + asserts.booleanArray("booleans", 9, 3, 6, 0.3333333333333333, 0.6666666666666666), + asserts.textArray("texts", 9, []string{"Alpha", "Bravo"}, []int64{6, 3}), + asserts.numberArray("numbers", 9, 2, 1, 1, 12, 1, 1.3333333333333333), + asserts.intArray("ints", 9, 102, 101, 101, 912, 101, 101.33333333333333), + asserts.dateArray("dates", 9), + }, + "2022-06-02T22:18:59.640162Z": { + asserts.groupedBy("2022-06-02T22:18:59.640162Z", "dates"), + asserts.meta(3), + asserts.booleanArray("booleans", 9, 3, 6, 0.3333333333333333, 0.6666666666666666), + asserts.textArray("texts", 9, []string{"Alpha", "Bravo"}, []int64{6, 3}), + asserts.numberArray("numbers", 9, 2, 1, 1, 12, 1, 1.3333333333333333), + asserts.intArray("ints", 9, 102, 101, 101, 912, 101, 101.33333333333333), + asserts.dateArray("dates", 9), + }, + } + expectedSomeResultsAssertions := map[string][]assertFunc{ + "2021-06-01T22:18:59.640162Z": { + asserts.groupedBy("2021-06-01T22:18:59.640162Z", "dates"), + asserts.meta(1), + asserts.booleanArray("booleans", 4, 1, 3, 0.25, 0.75), + asserts.textArray("texts", 4, []string{"Alpha", "Bravo"}, []int64{3, 1}), + asserts.numberArray("numbers", 4, 2, 1, 1, 5, 1, 1.25), + asserts.intArray("ints", 4, 102, 101, 101, 405, 101, 101.25), + asserts.dateArray("dates", 4), + }, + "2022-06-02T22:18:59.640162Z": { + asserts.groupedBy("2022-06-02T22:18:59.640162Z", "dates"), + asserts.meta(1), + asserts.booleanArray("booleans", 4, 1, 3, 0.25, 0.75), + asserts.textArray("texts", 4, []string{"Alpha", "Bravo"}, []int64{3, 1}), + asserts.numberArray("numbers", 4, 2, 1, 1, 5, 1, 1.25), + asserts.intArray("ints", 4, 102, 101, 101, 405, 101, 101.25), + asserts.dateArray("dates", 4), + }, + } + expectedNoResultsAssertsions := map[string][]assertFunc{} + + testCases := []aggregateTestCase{ + testCasesGen.WithoutFilters(expectedAllResultsAssertions), + + testCasesGen.WithWhereFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereFilter_SomeResults(expectedSomeResultsAssertions), + testCasesGen.WithWhereFilter_NoResults(expectedNoResultsAssertsions), + } + + for _, tc := range testCases { + query := aggregateDuplicatesClassQuery(tc.filters, "groupBy: [\"dates\"]") + + t.Run(tc.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + extracted := extractDuplicatesClassGroupByResult(result) + + assert.Len(t, extracted, len(tc.groupedAssertions)) + for groupedBy, groupAssertions := range tc.groupedAssertions { + group := findGroup(groupedBy, extracted) + require.NotNil(t, group, fmt.Sprintf("Group '%s' not found", groupedBy)) + + for _, assertion := range groupAssertions { + assertion(group) + } + } + }) + } + }) + + t.Run("aggregate DuplicatesClass with group by booleans", func(t *testing.T) { + expectedAllResultsAssertions := map[string][]assertFunc{ + "true": { + asserts.groupedBy("true", "booleans"), + asserts.meta(3), + asserts.booleanArray("booleans", 9, 3, 6, 0.3333333333333333, 0.6666666666666666), + asserts.textArray("texts", 9, []string{"Alpha", "Bravo"}, []int64{6, 3}), + asserts.numberArray("numbers", 9, 2, 1, 1, 12, 1, 1.3333333333333333), + asserts.intArray("ints", 9, 102, 101, 101, 912, 101, 101.33333333333333), + asserts.dateArray("dates", 9), + }, + "false": { + asserts.groupedBy("false", "booleans"), + asserts.meta(3), + asserts.booleanArray("booleans", 9, 3, 6, 0.3333333333333333, 0.6666666666666666), + asserts.textArray("texts", 9, []string{"Alpha", "Bravo"}, []int64{6, 3}), + asserts.numberArray("numbers", 9, 2, 1, 1, 12, 1, 1.3333333333333333), + asserts.intArray("ints", 9, 102, 101, 101, 912, 101, 101.33333333333333), + asserts.dateArray("dates", 9), + }, + } + expectedSomeResultsAssertions := map[string][]assertFunc{ + "true": { + asserts.groupedBy("true", "booleans"), + asserts.meta(1), + asserts.booleanArray("booleans", 4, 1, 3, 0.25, 0.75), + asserts.textArray("texts", 4, []string{"Alpha", "Bravo"}, []int64{3, 1}), + asserts.numberArray("numbers", 4, 2, 1, 1, 5, 1, 1.25), + asserts.intArray("ints", 4, 102, 101, 101, 405, 101, 101.25), + asserts.dateArray("dates", 4), + }, + "false": { + asserts.groupedBy("false", "booleans"), + asserts.meta(1), + asserts.booleanArray("booleans", 4, 1, 3, 0.25, 0.75), + asserts.textArray("texts", 4, []string{"Alpha", "Bravo"}, []int64{3, 1}), + asserts.numberArray("numbers", 4, 2, 1, 1, 5, 1, 1.25), + asserts.intArray("ints", 4, 102, 101, 101, 405, 101, 101.25), + asserts.dateArray("dates", 4), + }, + } + expectedNoResultsAssertsions := map[string][]assertFunc{} + + testCases := []aggregateTestCase{ + testCasesGen.WithoutFilters(expectedAllResultsAssertions), + + testCasesGen.WithWhereFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereFilter_SomeResults(expectedSomeResultsAssertions), + testCasesGen.WithWhereFilter_NoResults(expectedNoResultsAssertsions), + } + + for _, tc := range testCases { + query := aggregateDuplicatesClassQuery(tc.filters, "groupBy: [\"booleans\"]") + + t.Run(tc.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + extracted := extractDuplicatesClassGroupByResult(result) + + assert.Len(t, extracted, len(tc.groupedAssertions)) + for groupedBy, groupAssertions := range tc.groupedAssertions { + group := findGroup(groupedBy, extracted) + require.NotNil(t, group, fmt.Sprintf("Group '%s' not found", groupedBy)) + + for _, assertion := range groupAssertions { + assertion(group) + } + } + }) + } + }) +} + +func aggregateCityClassWithGroupByTest(t *testing.T) { + t.Run("aggregate City with group by city area", func(t *testing.T) { + asserts := newAggregateResponseAssert(t) + testCasesGen := &aggregateCityTestCases{} + + expectedAllResultsAssertions := map[string][]assertFunc{ + "891.96": { + asserts.groupedBy("891.96", "cityArea"), + asserts.meta(1), + asserts.number("cityArea", 1, 891.96, 891.96, 891.96, 891.96, 891.96, 891.96), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyBerlin}, []int64{1}), + asserts.boolean("isCapital", 1, 0, 1, 0, 1), + asserts.textArray("museums", 1, []string{"German Historical Museum"}, []int64{1}), + asserts.text("name", 1, []string{"Berlin"}, []int64{1}), + asserts.int("population", 1, 3470000, 3470000, 3470000, 3470000, 3470000, 3470000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + "891.95": { + asserts.groupedBy("891.95", "cityArea"), + asserts.meta(1), + asserts.number("cityArea", 1, 891.95, 891.95, 891.95, 891.95, 891.95, 891.95), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyAmsterdam}, []int64{1}), + asserts.boolean("isCapital", 1, 0, 1, 0, 1), + asserts.textArray("museums", 2, []string{"Rijksmuseum", "Stedelijk Museum"}, []int64{1, 1}), + asserts.text("name", 1, []string{"Amsterdam"}, []int64{1}), + asserts.int("population", 1, 1800000, 1800000, 1800000, 1800000, 1800000, 1800000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + "217.22": { + asserts.groupedBy("217.22", "cityArea"), + asserts.meta(1), + asserts.number("cityArea", 1, 217.22, 217.22, 217.22, 217.22, 217.22, 217.22), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyDusseldorf}, []int64{1}), + asserts.boolean("isCapital", 1, 1, 0, 1, 0), + asserts.textArray("museums", 3, []string{"Onomato", "Schiffahrt Museum", "Schlossturm"}, []int64{1, 1, 1}), + asserts.text("name", 1, []string{"Dusseldorf"}, []int64{1}), + asserts.int("population", 1, 600000, 600000, 600000, 600000, 600000, 600000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + "319.35": { + asserts.groupedBy("319.35", "cityArea"), + asserts.meta(1), + asserts.number("cityArea", 1, 319.35, 319.35, 319.35, 319.35, 319.35, 319.35), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyRotterdam}, []int64{1}), + asserts.boolean("isCapital", 1, 1, 0, 1, 0), + asserts.textArray("museums", 3, []string{"Museum Boijmans Van Beuningen", "Wereldmuseum", "Witte de With Center for Contemporary Art"}, []int64{1, 1, 1}), + asserts.text("name", 1, []string{"Rotterdam"}, []int64{1}), + asserts.int("population", 1, 600000, 600000, 600000, 600000, 600000, 600000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + } + expectedResultsWithDataAssertions := map[string][]assertFunc{ + "891.96": { + asserts.groupedBy("891.96", "cityArea"), + asserts.meta(1), + asserts.number("cityArea", 1, 891.96, 891.96, 891.96, 891.96, 891.96, 891.96), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyBerlin}, []int64{1}), + asserts.boolean("isCapital", 1, 0, 1, 0, 1), + asserts.textArray("museums", 1, []string{"German Historical Museum"}, []int64{1}), + asserts.text("name", 1, []string{"Berlin"}, []int64{1}), + asserts.int("population", 1, 3470000, 3470000, 3470000, 3470000, 3470000, 3470000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + "891.95": { + asserts.groupedBy("891.95", "cityArea"), + asserts.meta(1), + asserts.number("cityArea", 1, 891.95, 891.95, 891.95, 891.95, 891.95, 891.95), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyAmsterdam}, []int64{1}), + asserts.boolean("isCapital", 1, 0, 1, 0, 1), + asserts.textArray("museums", 2, []string{"Rijksmuseum", "Stedelijk Museum"}, []int64{1, 1}), + asserts.text("name", 1, []string{"Amsterdam"}, []int64{1}), + asserts.int("population", 1, 1800000, 1800000, 1800000, 1800000, 1800000, 1800000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + } + expectedResultsWithoutDataAssertions := map[string][]assertFunc{} + expectedNoResultsAssertions := map[string][]assertFunc{} + + testCases := []aggregateTestCase{ + testCasesGen.WithoutFilters(expectedAllResultsAssertions), + + testCasesGen.WithWhereFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereFilter_NoResults(expectedNoResultsAssertions), + + testCasesGen.WithNearObjectFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + + testCasesGen.WithWhereAndNearObjectFilters_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_NoResults(expectedNoResultsAssertions), + } + + for _, tc := range testCases { + query := aggregateCityQuery(tc.filters, "groupBy: [\"cityArea\"]") + + t.Run(tc.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + extracted := extractCityGroupByResult(result) + + assert.Len(t, extracted, len(tc.groupedAssertions)) + for groupedBy, groupAssertions := range tc.groupedAssertions { + group := findGroup(groupedBy, extracted) + require.NotNil(t, group, fmt.Sprintf("Group '%s' not found", groupedBy)) + + for _, assertion := range groupAssertions { + assertion(group) + } + } + }) + } + }) + + t.Run("aggregate City with group by history", func(t *testing.T) { + asserts := newAggregateResponseAssert(t) + testCasesGen := &aggregateCityTestCases{} + + expectedAllResultsAssertions := map[string][]assertFunc{ + historyBerlin: { + asserts.groupedBy(historyBerlin, "history"), + asserts.meta(1), + asserts.number("cityArea", 1, 891.96, 891.96, 891.96, 891.96, 891.96, 891.96), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyBerlin}, []int64{1}), + asserts.boolean("isCapital", 1, 0, 1, 0, 1), + asserts.textArray("museums", 1, []string{"German Historical Museum"}, []int64{1}), + asserts.text("name", 1, []string{"Berlin"}, []int64{1}), + asserts.int("population", 1, 3470000, 3470000, 3470000, 3470000, 3470000, 3470000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + historyAmsterdam: { + asserts.groupedBy(historyAmsterdam, "history"), + asserts.meta(1), + asserts.number("cityArea", 1, 891.95, 891.95, 891.95, 891.95, 891.95, 891.95), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyAmsterdam}, []int64{1}), + asserts.boolean("isCapital", 1, 0, 1, 0, 1), + asserts.textArray("museums", 2, []string{"Rijksmuseum", "Stedelijk Museum"}, []int64{1, 1}), + asserts.text("name", 1, []string{"Amsterdam"}, []int64{1}), + asserts.int("population", 1, 1800000, 1800000, 1800000, 1800000, 1800000, 1800000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + historyDusseldorf: { + asserts.groupedBy(historyDusseldorf, "history"), + asserts.meta(1), + asserts.number("cityArea", 1, 217.22, 217.22, 217.22, 217.22, 217.22, 217.22), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyDusseldorf}, []int64{1}), + asserts.boolean("isCapital", 1, 1, 0, 1, 0), + asserts.textArray("museums", 3, []string{"Onomato", "Schiffahrt Museum", "Schlossturm"}, []int64{1, 1, 1}), + asserts.text("name", 1, []string{"Dusseldorf"}, []int64{1}), + asserts.int("population", 1, 600000, 600000, 600000, 600000, 600000, 600000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + historyRotterdam: { + asserts.groupedBy(historyRotterdam, "history"), + asserts.meta(1), + asserts.number("cityArea", 1, 319.35, 319.35, 319.35, 319.35, 319.35, 319.35), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyRotterdam}, []int64{1}), + asserts.boolean("isCapital", 1, 1, 0, 1, 0), + asserts.textArray("museums", 3, []string{"Museum Boijmans Van Beuningen", "Wereldmuseum", "Witte de With Center for Contemporary Art"}, []int64{1, 1, 1}), + asserts.text("name", 1, []string{"Rotterdam"}, []int64{1}), + asserts.int("population", 1, 600000, 600000, 600000, 600000, 600000, 600000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + } + expectedResultsWithDataAssertions := map[string][]assertFunc{ + historyBerlin: { + asserts.groupedBy(historyBerlin, "history"), + asserts.meta(1), + asserts.number("cityArea", 1, 891.96, 891.96, 891.96, 891.96, 891.96, 891.96), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyBerlin}, []int64{1}), + asserts.boolean("isCapital", 1, 0, 1, 0, 1), + asserts.textArray("museums", 1, []string{"German Historical Museum"}, []int64{1}), + asserts.text("name", 1, []string{"Berlin"}, []int64{1}), + asserts.int("population", 1, 3470000, 3470000, 3470000, 3470000, 3470000, 3470000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + historyAmsterdam: { + asserts.groupedBy(historyAmsterdam, "history"), + asserts.meta(1), + asserts.number("cityArea", 1, 891.95, 891.95, 891.95, 891.95, 891.95, 891.95), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyAmsterdam}, []int64{1}), + asserts.boolean("isCapital", 1, 0, 1, 0, 1), + asserts.textArray("museums", 2, []string{"Rijksmuseum", "Stedelijk Museum"}, []int64{1, 1}), + asserts.text("name", 1, []string{"Amsterdam"}, []int64{1}), + asserts.int("population", 1, 1800000, 1800000, 1800000, 1800000, 1800000, 1800000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + } + expectedResultsWithoutDataAssertions := map[string][]assertFunc{} + expectedNoResultsAssertions := map[string][]assertFunc{} + + testCases := []aggregateTestCase{ + testCasesGen.WithoutFilters(expectedAllResultsAssertions), + + testCasesGen.WithWhereFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereFilter_NoResults(expectedNoResultsAssertions), + + testCasesGen.WithNearObjectFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + + testCasesGen.WithWhereAndNearObjectFilters_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_NoResults(expectedNoResultsAssertions), + } + + for _, tc := range testCases { + query := aggregateCityQuery(tc.filters, "groupBy: [\"history\"]") + + t.Run(tc.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + extracted := extractCityGroupByResult(result) + + assert.Len(t, extracted, len(tc.groupedAssertions)) + for groupedBy, groupAssertions := range tc.groupedAssertions { + group := findGroup(groupedBy, extracted) + require.NotNil(t, group, fmt.Sprintf("Group '%s' not found", groupedBy)) + + for _, assertion := range groupAssertions { + assertion(group) + } + } + }) + } + }) + + t.Run("aggregate City with group by name", func(t *testing.T) { + asserts := newAggregateResponseAssert(t) + testCasesGen := &aggregateCityTestCases{} + + expectedAllResultsAssertions := map[string][]assertFunc{ + "Berlin": { + asserts.groupedBy("Berlin", "name"), + asserts.meta(1), + asserts.number("cityArea", 1, 891.96, 891.96, 891.96, 891.96, 891.96, 891.96), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyBerlin}, []int64{1}), + asserts.boolean("isCapital", 1, 0, 1, 0, 1), + asserts.textArray("museums", 1, []string{"German Historical Museum"}, []int64{1}), + asserts.text("name", 1, []string{"Berlin"}, []int64{1}), + asserts.int("population", 1, 3470000, 3470000, 3470000, 3470000, 3470000, 3470000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + "Amsterdam": { + asserts.groupedBy("Amsterdam", "name"), + asserts.meta(1), + asserts.number("cityArea", 1, 891.95, 891.95, 891.95, 891.95, 891.95, 891.95), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyAmsterdam}, []int64{1}), + asserts.boolean("isCapital", 1, 0, 1, 0, 1), + asserts.textArray("museums", 2, []string{"Rijksmuseum", "Stedelijk Museum"}, []int64{1, 1}), + asserts.text("name", 1, []string{"Amsterdam"}, []int64{1}), + asserts.int("population", 1, 1800000, 1800000, 1800000, 1800000, 1800000, 1800000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + "Dusseldorf": { + asserts.groupedBy("Dusseldorf", "name"), + asserts.meta(1), + asserts.number("cityArea", 1, 217.22, 217.22, 217.22, 217.22, 217.22, 217.22), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyDusseldorf}, []int64{1}), + asserts.boolean("isCapital", 1, 1, 0, 1, 0), + asserts.textArray("museums", 3, []string{"Onomato", "Schiffahrt Museum", "Schlossturm"}, []int64{1, 1, 1}), + asserts.text("name", 1, []string{"Dusseldorf"}, []int64{1}), + asserts.int("population", 1, 600000, 600000, 600000, 600000, 600000, 600000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + "Rotterdam": { + asserts.groupedBy("Rotterdam", "name"), + asserts.meta(1), + asserts.number("cityArea", 1, 319.35, 319.35, 319.35, 319.35, 319.35, 319.35), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyRotterdam}, []int64{1}), + asserts.boolean("isCapital", 1, 1, 0, 1, 0), + asserts.textArray("museums", 3, []string{"Museum Boijmans Van Beuningen", "Wereldmuseum", "Witte de With Center for Contemporary Art"}, []int64{1, 1, 1}), + asserts.text("name", 1, []string{"Rotterdam"}, []int64{1}), + asserts.int("population", 1, 600000, 600000, 600000, 600000, 600000, 600000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + "Missing Island": { + asserts.groupedBy("Missing Island", "name"), + asserts.meta(1), + asserts.number0("cityArea"), + asserts.date0("cityRights"), + asserts.text0("history"), + asserts.boolean("isCapital", 1, 1, 0, 1, 0), + asserts.textArray0("museums"), + asserts.text("name", 1, []string{"Missing Island"}, []int64{1}), + asserts.int("population", 1, 0, 0, 0, 0, 0, 0), + asserts.textArray0("timezones"), + asserts.pointingTo("inCountry", "Country"), + }, + } + expectedResultsWithDataAssertions := map[string][]assertFunc{ + "Berlin": { + asserts.groupedBy("Berlin", "name"), + asserts.meta(1), + asserts.number("cityArea", 1, 891.96, 891.96, 891.96, 891.96, 891.96, 891.96), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyBerlin}, []int64{1}), + asserts.boolean("isCapital", 1, 0, 1, 0, 1), + asserts.textArray("museums", 1, []string{"German Historical Museum"}, []int64{1}), + asserts.text("name", 1, []string{"Berlin"}, []int64{1}), + asserts.int("population", 1, 3470000, 3470000, 3470000, 3470000, 3470000, 3470000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + "Amsterdam": { + asserts.groupedBy("Amsterdam", "name"), + asserts.meta(1), + asserts.number("cityArea", 1, 891.95, 891.95, 891.95, 891.95, 891.95, 891.95), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyAmsterdam}, []int64{1}), + asserts.boolean("isCapital", 1, 0, 1, 0, 1), + asserts.textArray("museums", 2, []string{"Rijksmuseum", "Stedelijk Museum"}, []int64{1, 1}), + asserts.text("name", 1, []string{"Amsterdam"}, []int64{1}), + asserts.int("population", 1, 1800000, 1800000, 1800000, 1800000, 1800000, 1800000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + } + expectedResultsWithoutDataAssertions := map[string][]assertFunc{} + expectedNoResultsAssertions := map[string][]assertFunc{} + + testCases := []aggregateTestCase{ + testCasesGen.WithoutFilters(expectedAllResultsAssertions), + + testCasesGen.WithWhereFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereFilter_NoResults(expectedNoResultsAssertions), + + testCasesGen.WithNearObjectFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + + testCasesGen.WithWhereAndNearObjectFilters_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_NoResults(expectedNoResultsAssertions), + } + + for _, tc := range testCases { + query := aggregateCityQuery(tc.filters, "groupBy: [\"name\"]") + + t.Run(tc.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + extracted := extractCityGroupByResult(result) + + assert.Len(t, extracted, len(tc.groupedAssertions)) + for groupedBy, groupAssertions := range tc.groupedAssertions { + group := findGroup(groupedBy, extracted) + require.NotNil(t, group, fmt.Sprintf("Group '%s' not found", groupedBy)) + + for _, assertion := range groupAssertions { + assertion(group) + } + } + }) + } + }) + + t.Run("aggregate City with group by is capital", func(t *testing.T) { + asserts := newAggregateResponseAssert(t) + testCasesGen := &aggregateCityTestCases{} + + expectedAllResultsAssertions := map[string][]assertFunc{ + "true": { + asserts.groupedBy("true", "isCapital"), + asserts.meta(2), + asserts.number("cityArea", 2, 891.96, 891.95, 891.95, 1783.91, 891.955, 891.955), + asserts.date("cityRights", 2), + asserts.text("history", 2, []string{historyAmsterdam, historyBerlin}, []int64{1, 1}), + asserts.boolean("isCapital", 2, 0, 2, 0, 1), + asserts.textArray("museums", 3, []string{"German Historical Museum", "Rijksmuseum", "Stedelijk Museum"}, []int64{1, 1, 1}), + asserts.text("name", 2, []string{"Amsterdam", "Berlin"}, []int64{1, 1}), + asserts.int("population", 2, 3470000, 1800000, 1800000, 5270000, 2635000, 2635000), + asserts.textArray("timezones", 4, []string{"CEST", "CET"}, []int64{2, 2}), + asserts.pointingTo("inCountry", "Country"), + }, + "false": { + asserts.groupedBy("false", "isCapital"), + asserts.meta(3), + asserts.number("cityArea", 2, 319.35, 217.22, 217.22, 536.57, 268.285, 268.285), + asserts.date("cityRights", 2), + asserts.text("history", 2, []string{historyRotterdam, historyDusseldorf}, []int64{1, 1}), + asserts.boolean("isCapital", 3, 3, 0, 1, 0), + asserts.textArray("museums", 6, []string{"Museum Boijmans Van Beuningen", "Onomato", "Schiffahrt Museum", "Schlossturm", "Wereldmuseum"}, []int64{1, 1, 1, 1, 1}), + asserts.text("name", 3, []string{"Dusseldorf", "Missing Island", "Rotterdam"}, []int64{1, 1, 1}), + asserts.int("population", 3, 600000, 0, 600000, 1200000, 600000, 400000), + asserts.textArray("timezones", 4, []string{"CEST", "CET"}, []int64{2, 2}), + asserts.pointingTo("inCountry", "Country"), + }, + } + expectedResultsWithDataAssertions := map[string][]assertFunc{ + "true": { + asserts.groupedBy("true", "isCapital"), + asserts.meta(2), + asserts.number("cityArea", 2, 891.96, 891.95, 891.95, 1783.91, 891.955, 891.955), + asserts.date("cityRights", 2), + asserts.text("history", 2, []string{historyAmsterdam, historyBerlin}, []int64{1, 1}), + asserts.boolean("isCapital", 2, 0, 2, 0, 1), + asserts.textArray("museums", 3, []string{"German Historical Museum", "Rijksmuseum", "Stedelijk Museum"}, []int64{1, 1, 1}), + asserts.text("name", 2, []string{"Amsterdam", "Berlin"}, []int64{1, 1}), + asserts.int("population", 2, 3470000, 1800000, 1800000, 5270000, 2635000, 2635000), + asserts.textArray("timezones", 4, []string{"CEST", "CET"}, []int64{2, 2}), + asserts.pointingTo("inCountry", "Country"), + }, + } + expectedResultsWithoutDataAssertions := map[string][]assertFunc{} + expectedNoResultsAssertions := map[string][]assertFunc{} + + testCases := []aggregateTestCase{ + testCasesGen.WithoutFilters(expectedAllResultsAssertions), + + testCasesGen.WithWhereFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereFilter_NoResults(expectedNoResultsAssertions), + + testCasesGen.WithNearObjectFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + + testCasesGen.WithWhereAndNearObjectFilters_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_NoResults(expectedNoResultsAssertions), + } + + for _, tc := range testCases { + query := aggregateCityQuery(tc.filters, "groupBy: [\"isCapital\"]") + + t.Run(tc.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + extracted := extractCityGroupByResult(result) + + assert.Len(t, extracted, len(tc.groupedAssertions)) + for groupedBy, groupAssertions := range tc.groupedAssertions { + group := findGroup(groupedBy, extracted) + require.NotNil(t, group, fmt.Sprintf("Group '%s' not found", groupedBy)) + + for _, assertion := range groupAssertions { + assertion(group) + } + } + }) + } + }) + + t.Run("aggregate City with group by name", func(t *testing.T) { + asserts := newAggregateResponseAssert(t) + testCasesGen := &aggregateCityTestCases{} + + expectedAllResultsAssertions := map[string][]assertFunc{ + "1400-01-01T00:00:00+02:00": { + asserts.groupedBy("1400-01-01T00:00:00+02:00", "cityRights"), + asserts.meta(2), + asserts.number("cityArea", 2, 891.96, 891.95, 891.95, 1783.91, 891.955, 891.955), + asserts.date("cityRights", 2), + asserts.text("history", 2, []string{historyAmsterdam, historyBerlin}, []int64{1, 1}), + asserts.boolean("isCapital", 2, 0, 2, 0, 1), + asserts.textArray("museums", 3, []string{"German Historical Museum", "Rijksmuseum", "Stedelijk Museum"}, []int64{1, 1, 1}), + asserts.text("name", 2, []string{"Amsterdam", "Berlin"}, []int64{1, 1}), + asserts.int("population", 2, 3470000, 1800000, 1800000, 5270000, 2635000, 2635000), + asserts.textArray("timezones", 4, []string{"CEST", "CET"}, []int64{2, 2}), + asserts.pointingTo("inCountry", "Country"), + }, + "1135-01-01T00:00:00+02:00": { + asserts.groupedBy("1135-01-01T00:00:00+02:00", "cityRights"), + asserts.meta(1), + asserts.number("cityArea", 1, 217.22, 217.22, 217.22, 217.22, 217.22, 217.22), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyDusseldorf}, []int64{1}), + asserts.boolean("isCapital", 1, 1, 0, 1, 0), + asserts.textArray("museums", 3, []string{"Onomato", "Schiffahrt Museum", "Schlossturm"}, []int64{1, 1, 1}), + asserts.text("name", 1, []string{"Dusseldorf"}, []int64{1}), + asserts.int("population", 1, 600000, 600000, 600000, 600000, 600000, 600000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + "1283-01-01T00:00:00+02:00": { + asserts.groupedBy("1283-01-01T00:00:00+02:00", "cityRights"), + asserts.meta(1), + asserts.number("cityArea", 1, 319.35, 319.35, 319.35, 319.35, 319.35, 319.35), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyRotterdam}, []int64{1}), + asserts.boolean("isCapital", 1, 1, 0, 1, 0), + asserts.textArray("museums", 3, []string{"Museum Boijmans Van Beuningen", "Wereldmuseum", "Witte de With Center for Contemporary Art"}, []int64{1, 1, 1}), + asserts.text("name", 1, []string{"Rotterdam"}, []int64{1}), + asserts.int("population", 1, 600000, 600000, 600000, 600000, 600000, 600000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + } + expectedResultsWithDataAssertions := map[string][]assertFunc{ + "1400-01-01T00:00:00+02:00": { + asserts.groupedBy("1400-01-01T00:00:00+02:00", "cityRights"), + asserts.meta(2), + asserts.number("cityArea", 2, 891.96, 891.95, 891.95, 1783.91, 891.955, 891.955), + asserts.date("cityRights", 2), + asserts.text("history", 2, []string{historyAmsterdam, historyBerlin}, []int64{1, 1}), + asserts.boolean("isCapital", 2, 0, 2, 0, 1), + asserts.textArray("museums", 3, []string{"German Historical Museum", "Rijksmuseum", "Stedelijk Museum"}, []int64{1, 1, 1}), + asserts.text("name", 2, []string{"Amsterdam", "Berlin"}, []int64{1, 1}), + asserts.int("population", 2, 3470000, 1800000, 1800000, 5270000, 2635000, 2635000), + asserts.textArray("timezones", 4, []string{"CEST", "CET"}, []int64{2, 2}), + asserts.pointingTo("inCountry", "Country"), + }, + } + expectedResultsWithoutDataAssertions := map[string][]assertFunc{} + expectedNoResultsAssertions := map[string][]assertFunc{} + + testCases := []aggregateTestCase{ + testCasesGen.WithoutFilters(expectedAllResultsAssertions), + + testCasesGen.WithWhereFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereFilter_NoResults(expectedNoResultsAssertions), + + testCasesGen.WithNearObjectFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + + testCasesGen.WithWhereAndNearObjectFilters_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_NoResults(expectedNoResultsAssertions), + } + + for _, tc := range testCases { + query := aggregateCityQuery(tc.filters, "groupBy: [\"cityRights\"]") + + t.Run(tc.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + extracted := extractCityGroupByResult(result) + + assert.Len(t, extracted, len(tc.groupedAssertions)) + for groupedBy, groupAssertions := range tc.groupedAssertions { + group := findGroup(groupedBy, extracted) + require.NotNil(t, group, fmt.Sprintf("Group '%s' not found", groupedBy)) + + for _, assertion := range groupAssertions { + assertion(group) + } + } + }) + } + }) + + t.Run("aggregate City with group by population", func(t *testing.T) { + asserts := newAggregateResponseAssert(t) + testCasesGen := &aggregateCityTestCases{} + + expectedAllResultsAssertions := map[string][]assertFunc{ + "600000": { + asserts.groupedBy("600000", "population"), + asserts.meta(2), + asserts.number("cityArea", 2, 319.35, 217.22, 217.22, 536.57, 268.285, 268.285), + asserts.date("cityRights", 2), + asserts.text("history", 2, []string{historyRotterdam, historyDusseldorf}, []int64{1, 1}), + asserts.boolean("isCapital", 2, 2, 0, 1, 0), + asserts.textArray("museums", 6, []string{"Museum Boijmans Van Beuningen", "Onomato", "Schiffahrt Museum", "Schlossturm", "Wereldmuseum"}, []int64{1, 1, 1, 1, 1}), + asserts.text("name", 2, []string{"Dusseldorf", "Rotterdam"}, []int64{1, 1}), + asserts.int("population", 2, 600000, 600000, 600000, 1200000, 600000, 600000), + asserts.textArray("timezones", 4, []string{"CEST", "CET"}, []int64{2, 2}), + asserts.pointingTo("inCountry", "Country"), + }, + "3.47e+06": { + asserts.groupedBy("3.47e+06", "population"), + asserts.meta(1), + asserts.number("cityArea", 1, 891.96, 891.96, 891.96, 891.96, 891.96, 891.96), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyBerlin}, []int64{1}), + asserts.boolean("isCapital", 1, 0, 1, 0, 1), + asserts.textArray("museums", 1, []string{"German Historical Museum"}, []int64{1}), + asserts.text("name", 1, []string{"Berlin"}, []int64{1}), + asserts.int("population", 1, 3470000, 3470000, 3470000, 3470000, 3470000, 3470000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + "1.8e+06": { + asserts.groupedBy("1.8e+06", "population"), + asserts.meta(1), + asserts.number("cityArea", 1, 891.95, 891.95, 891.95, 891.95, 891.95, 891.95), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyAmsterdam}, []int64{1}), + asserts.boolean("isCapital", 1, 0, 1, 0, 1), + asserts.textArray("museums", 2, []string{"Rijksmuseum", "Stedelijk Museum"}, []int64{1, 1}), + asserts.text("name", 1, []string{"Amsterdam"}, []int64{1}), + asserts.int("population", 1, 1800000, 1800000, 1800000, 1800000, 1800000, 1800000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + "0": { + asserts.groupedBy("0", "population"), + asserts.meta(1), + asserts.number0("cityArea"), + asserts.date0("cityRights"), + asserts.text0("history"), + asserts.boolean("isCapital", 1, 1, 0, 1, 0), + asserts.textArray0("museums"), + asserts.text("name", 1, []string{"Missing Island"}, []int64{1}), + asserts.int("population", 1, 0, 0, 0, 0, 0, 0), + asserts.textArray0("timezones"), + asserts.pointingTo("inCountry", "Country"), + }, + } + expectedResultsWithDataAssertions := map[string][]assertFunc{ + "3.47e+06": { + asserts.groupedBy("3.47e+06", "population"), + asserts.meta(1), + asserts.number("cityArea", 1, 891.96, 891.96, 891.96, 891.96, 891.96, 891.96), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyBerlin}, []int64{1}), + asserts.boolean("isCapital", 1, 0, 1, 0, 1), + asserts.textArray("museums", 1, []string{"German Historical Museum"}, []int64{1}), + asserts.text("name", 1, []string{"Berlin"}, []int64{1}), + asserts.int("population", 1, 3470000, 3470000, 3470000, 3470000, 3470000, 3470000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + "1.8e+06": { + asserts.groupedBy("1.8e+06", "population"), + asserts.meta(1), + asserts.number("cityArea", 1, 891.95, 891.95, 891.95, 891.95, 891.95, 891.95), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyAmsterdam}, []int64{1}), + asserts.boolean("isCapital", 1, 0, 1, 0, 1), + asserts.textArray("museums", 2, []string{"Rijksmuseum", "Stedelijk Museum"}, []int64{1, 1}), + asserts.text("name", 1, []string{"Amsterdam"}, []int64{1}), + asserts.int("population", 1, 1800000, 1800000, 1800000, 1800000, 1800000, 1800000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + } + expectedResultsWithoutDataAssertions := map[string][]assertFunc{} + expectedNoResultsAssertions := map[string][]assertFunc{} + + testCases := []aggregateTestCase{ + testCasesGen.WithoutFilters(expectedAllResultsAssertions), + + testCasesGen.WithWhereFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereFilter_NoResults(expectedNoResultsAssertions), + + testCasesGen.WithNearObjectFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + + testCasesGen.WithWhereAndNearObjectFilters_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_NoResults(expectedNoResultsAssertions), + } + + for _, tc := range testCases { + query := aggregateCityQuery(tc.filters, "groupBy: [\"population\"]") + + t.Run(tc.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + extracted := extractCityGroupByResult(result) + + assert.Len(t, extracted, len(tc.groupedAssertions)) + for groupedBy, groupAssertions := range tc.groupedAssertions { + group := findGroup(groupedBy, extracted) + require.NotNil(t, group, fmt.Sprintf("Group '%s' not found", groupedBy)) + + for _, assertion := range groupAssertions { + assertion(group) + } + } + }) + } + }) + + t.Run("aggregate City with group by timezones", func(t *testing.T) { + asserts := newAggregateResponseAssert(t) + testCasesGen := &aggregateCityTestCases{} + + expectedAllResultsAssertions := map[string][]assertFunc{ + "CEST": { + asserts.groupedBy("CEST", "timezones"), + asserts.meta(4), + asserts.number("cityArea", 4, 891.96, 217.22, 217.22, 2320.48, 605.6500000000001, 580.12), + asserts.date("cityRights", 4), + asserts.text("history", 4, []string{historyAmsterdam, historyRotterdam, historyBerlin, historyDusseldorf}, []int64{1, 1, 1, 1}), + asserts.boolean("isCapital", 4, 2, 2, 0.5, 0.5), + asserts.textArray("museums", 9, []string{"German Historical Museum", "Museum Boijmans Van Beuningen", "Onomato", "Rijksmuseum", "Schiffahrt Museum"}, []int64{1, 1, 1, 1, 1}), + asserts.text("name", 4, []string{"Amsterdam", "Berlin", "Dusseldorf", "Rotterdam"}, []int64{1, 1, 1, 1}), + asserts.int("population", 4, 3470000, 600000, 600000, 6470000, 1200000, 1617500), + asserts.textArray("timezones", 8, []string{"CEST", "CET"}, []int64{4, 4}), + asserts.pointingTo("inCountry", "Country"), + }, + "CET": { + asserts.groupedBy("CET", "timezones"), + asserts.meta(4), + asserts.number("cityArea", 4, 891.96, 217.22, 217.22, 2320.48, 605.6500000000001, 580.12), + asserts.date("cityRights", 4), + asserts.text("history", 4, []string{historyAmsterdam, historyRotterdam, historyBerlin, historyDusseldorf}, []int64{1, 1, 1, 1}), + asserts.boolean("isCapital", 4, 2, 2, 0.5, 0.5), + asserts.textArray("museums", 9, []string{"German Historical Museum", "Museum Boijmans Van Beuningen", "Onomato", "Rijksmuseum", "Schiffahrt Museum"}, []int64{1, 1, 1, 1, 1}), + asserts.text("name", 4, []string{"Amsterdam", "Berlin", "Dusseldorf", "Rotterdam"}, []int64{1, 1, 1, 1}), + asserts.int("population", 4, 3470000, 600000, 600000, 6470000, 1200000, 1617500), + asserts.textArray("timezones", 8, []string{"CEST", "CET"}, []int64{4, 4}), + asserts.pointingTo("inCountry", "Country"), + }, + } + expectedResultsWithDataAssertions := map[string][]assertFunc{ + "CEST": { + asserts.groupedBy("CEST", "timezones"), + asserts.meta(2), + asserts.number("cityArea", 2, 891.96, 891.95, 891.95, 1783.91, 891.955, 891.955), + asserts.date("cityRights", 2), + asserts.text("history", 2, []string{historyAmsterdam, historyBerlin}, []int64{1, 1}), + asserts.boolean("isCapital", 2, 0, 2, 0, 1), + asserts.textArray("museums", 3, []string{"German Historical Museum", "Rijksmuseum", "Stedelijk Museum"}, []int64{1, 1, 1}), + asserts.text("name", 2, []string{"Amsterdam", "Berlin"}, []int64{1, 1}), + asserts.int("population", 2, 3470000, 1800000, 1800000, 5270000, 2635000, 2635000), + asserts.textArray("timezones", 4, []string{"CEST", "CET"}, []int64{2, 2}), + asserts.pointingTo("inCountry", "Country"), + }, + "CET": { + asserts.groupedBy("CET", "timezones"), + asserts.meta(2), + asserts.number("cityArea", 2, 891.96, 891.95, 891.95, 1783.91, 891.955, 891.955), + asserts.date("cityRights", 2), + asserts.text("history", 2, []string{historyAmsterdam, historyBerlin}, []int64{1, 1}), + asserts.boolean("isCapital", 2, 0, 2, 0, 1), + asserts.textArray("museums", 3, []string{"German Historical Museum", "Rijksmuseum", "Stedelijk Museum"}, []int64{1, 1, 1}), + asserts.text("name", 2, []string{"Amsterdam", "Berlin"}, []int64{1, 1}), + asserts.int("population", 2, 3470000, 1800000, 1800000, 5270000, 2635000, 2635000), + asserts.textArray("timezones", 4, []string{"CEST", "CET"}, []int64{2, 2}), + asserts.pointingTo("inCountry", "Country"), + }, + } + expectedResultsWithoutDataAssertions := map[string][]assertFunc{} + expectedNoResultsAssertions := map[string][]assertFunc{} + + testCases := []aggregateTestCase{ + testCasesGen.WithoutFilters(expectedAllResultsAssertions), + + testCasesGen.WithWhereFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereFilter_NoResults(expectedNoResultsAssertions), + + testCasesGen.WithNearObjectFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + + testCasesGen.WithWhereAndNearObjectFilters_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_NoResults(expectedNoResultsAssertions), + } + + for _, tc := range testCases { + query := aggregateCityQuery(tc.filters, "groupBy: [\"timezones\"]") + + t.Run(tc.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + extracted := extractCityGroupByResult(result) + + assert.Len(t, extracted, len(tc.groupedAssertions)) + for groupedBy, groupAssertions := range tc.groupedAssertions { + group := findGroup(groupedBy, extracted) + require.NotNil(t, group, fmt.Sprintf("Group '%s' not found", groupedBy)) + + for _, assertion := range groupAssertions { + assertion(group) + } + } + }) + } + }) + + t.Run("aggregate City with group by museums", func(t *testing.T) { + asserts := newAggregateResponseAssert(t) + testCasesGen := &aggregateCityTestCases{} + + expectedAllResultsAssertions := map[string][]assertFunc{ + "German Historical Museum": { + asserts.groupedBy("German Historical Museum", "museums"), + asserts.meta(1), + asserts.number("cityArea", 1, 891.96, 891.96, 891.96, 891.96, 891.96, 891.96), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyBerlin}, []int64{1}), + asserts.boolean("isCapital", 1, 0, 1, 0, 1), + asserts.textArray("museums", 1, []string{"German Historical Museum"}, []int64{1}), + asserts.text("name", 1, []string{"Berlin"}, []int64{1}), + asserts.int("population", 1, 3470000, 3470000, 3470000, 3470000, 3470000, 3470000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + "Rijksmuseum": { + asserts.groupedBy("Rijksmuseum", "museums"), + asserts.meta(1), + asserts.number("cityArea", 1, 891.95, 891.95, 891.95, 891.95, 891.95, 891.95), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyAmsterdam}, []int64{1}), + asserts.boolean("isCapital", 1, 0, 1, 0, 1), + asserts.textArray("museums", 2, []string{"Rijksmuseum", "Stedelijk Museum"}, []int64{1, 1}), + asserts.text("name", 1, []string{"Amsterdam"}, []int64{1}), + asserts.int("population", 1, 1800000, 1800000, 1800000, 1800000, 1800000, 1800000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + "Stedelijk Museum": { + asserts.groupedBy("Stedelijk Museum", "museums"), + asserts.meta(1), + asserts.number("cityArea", 1, 891.95, 891.95, 891.95, 891.95, 891.95, 891.95), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyAmsterdam}, []int64{1}), + asserts.boolean("isCapital", 1, 0, 1, 0, 1), + asserts.textArray("museums", 2, []string{"Rijksmuseum", "Stedelijk Museum"}, []int64{1, 1}), + asserts.text("name", 1, []string{"Amsterdam"}, []int64{1}), + asserts.int("population", 1, 1800000, 1800000, 1800000, 1800000, 1800000, 1800000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + "Onomato": { + asserts.groupedBy("Onomato", "museums"), + asserts.meta(1), + asserts.number("cityArea", 1, 217.22, 217.22, 217.22, 217.22, 217.22, 217.22), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyDusseldorf}, []int64{1}), + asserts.boolean("isCapital", 1, 1, 0, 1, 0), + asserts.textArray("museums", 3, []string{"Onomato", "Schiffahrt Museum", "Schlossturm"}, []int64{1, 1, 1}), + asserts.text("name", 1, []string{"Dusseldorf"}, []int64{1}), + asserts.int("population", 1, 600000, 600000, 600000, 600000, 600000, 600000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + "Schiffahrt Museum": { + asserts.groupedBy("Schiffahrt Museum", "museums"), + asserts.meta(1), + asserts.number("cityArea", 1, 217.22, 217.22, 217.22, 217.22, 217.22, 217.22), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyDusseldorf}, []int64{1}), + asserts.boolean("isCapital", 1, 1, 0, 1, 0), + asserts.textArray("museums", 3, []string{"Onomato", "Schiffahrt Museum", "Schlossturm"}, []int64{1, 1, 1}), + asserts.text("name", 1, []string{"Dusseldorf"}, []int64{1}), + asserts.int("population", 1, 600000, 600000, 600000, 600000, 600000, 600000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + "Schlossturm": { + asserts.groupedBy("Schlossturm", "museums"), + asserts.meta(1), + asserts.number("cityArea", 1, 217.22, 217.22, 217.22, 217.22, 217.22, 217.22), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyDusseldorf}, []int64{1}), + asserts.boolean("isCapital", 1, 1, 0, 1, 0), + asserts.textArray("museums", 3, []string{"Onomato", "Schiffahrt Museum", "Schlossturm"}, []int64{1, 1, 1}), + asserts.text("name", 1, []string{"Dusseldorf"}, []int64{1}), + asserts.int("population", 1, 600000, 600000, 600000, 600000, 600000, 600000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + "Museum Boijmans Van Beuningen": { + asserts.groupedBy("Museum Boijmans Van Beuningen", "museums"), + asserts.meta(1), + asserts.number("cityArea", 1, 319.35, 319.35, 319.35, 319.35, 319.35, 319.35), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyRotterdam}, []int64{1}), + asserts.boolean("isCapital", 1, 1, 0, 1, 0), + asserts.textArray("museums", 3, []string{"Museum Boijmans Van Beuningen", "Wereldmuseum", "Witte de With Center for Contemporary Art"}, []int64{1, 1, 1}), + asserts.text("name", 1, []string{"Rotterdam"}, []int64{1}), + asserts.int("population", 1, 600000, 600000, 600000, 600000, 600000, 600000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + "Wereldmuseum": { + asserts.groupedBy("Wereldmuseum", "museums"), + asserts.meta(1), + asserts.number("cityArea", 1, 319.35, 319.35, 319.35, 319.35, 319.35, 319.35), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyRotterdam}, []int64{1}), + asserts.boolean("isCapital", 1, 1, 0, 1, 0), + asserts.textArray("museums", 3, []string{"Museum Boijmans Van Beuningen", "Wereldmuseum", "Witte de With Center for Contemporary Art"}, []int64{1, 1, 1}), + asserts.text("name", 1, []string{"Rotterdam"}, []int64{1}), + asserts.int("population", 1, 600000, 600000, 600000, 600000, 600000, 600000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + "Witte de With Center for Contemporary Art": { + asserts.groupedBy("Witte de With Center for Contemporary Art", "museums"), + asserts.meta(1), + asserts.number("cityArea", 1, 319.35, 319.35, 319.35, 319.35, 319.35, 319.35), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyRotterdam}, []int64{1}), + asserts.boolean("isCapital", 1, 1, 0, 1, 0), + asserts.textArray("museums", 3, []string{"Museum Boijmans Van Beuningen", "Wereldmuseum", "Witte de With Center for Contemporary Art"}, []int64{1, 1, 1}), + asserts.text("name", 1, []string{"Rotterdam"}, []int64{1}), + asserts.int("population", 1, 600000, 600000, 600000, 600000, 600000, 600000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + } + expectedResultsWithDataAssertions := map[string][]assertFunc{ + "German Historical Museum": { + asserts.groupedBy("German Historical Museum", "museums"), + asserts.meta(1), + asserts.number("cityArea", 1, 891.96, 891.96, 891.96, 891.96, 891.96, 891.96), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyBerlin}, []int64{1}), + asserts.boolean("isCapital", 1, 0, 1, 0, 1), + asserts.textArray("museums", 1, []string{"German Historical Museum"}, []int64{1}), + asserts.text("name", 1, []string{"Berlin"}, []int64{1}), + asserts.int("population", 1, 3470000, 3470000, 3470000, 3470000, 3470000, 3470000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + "Rijksmuseum": { + asserts.groupedBy("Rijksmuseum", "museums"), + asserts.meta(1), + asserts.number("cityArea", 1, 891.95, 891.95, 891.95, 891.95, 891.95, 891.95), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyAmsterdam}, []int64{1}), + asserts.boolean("isCapital", 1, 0, 1, 0, 1), + asserts.textArray("museums", 2, []string{"Rijksmuseum", "Stedelijk Museum"}, []int64{1, 1}), + asserts.text("name", 1, []string{"Amsterdam"}, []int64{1}), + asserts.int("population", 1, 1800000, 1800000, 1800000, 1800000, 1800000, 1800000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + "Stedelijk Museum": { + asserts.groupedBy("Stedelijk Museum", "museums"), + asserts.meta(1), + asserts.number("cityArea", 1, 891.95, 891.95, 891.95, 891.95, 891.95, 891.95), + asserts.date("cityRights", 1), + asserts.text("history", 1, []string{historyAmsterdam}, []int64{1}), + asserts.boolean("isCapital", 1, 0, 1, 0, 1), + asserts.textArray("museums", 2, []string{"Rijksmuseum", "Stedelijk Museum"}, []int64{1, 1}), + asserts.text("name", 1, []string{"Amsterdam"}, []int64{1}), + asserts.int("population", 1, 1800000, 1800000, 1800000, 1800000, 1800000, 1800000), + asserts.textArray("timezones", 2, []string{"CEST", "CET"}, []int64{1, 1}), + asserts.pointingTo("inCountry", "Country"), + }, + } + expectedResultsWithoutDataAssertions := map[string][]assertFunc{} + expectedNoResultsAssertions := map[string][]assertFunc{} + + testCases := []aggregateTestCase{ + testCasesGen.WithoutFilters(expectedAllResultsAssertions), + + testCasesGen.WithWhereFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereFilter_NoResults(expectedNoResultsAssertions), + + testCasesGen.WithNearObjectFilter_AllResults(expectedAllResultsAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithNearObjectFilter_ResultsWithoutData(expectedResultsWithoutDataAssertions), + + testCasesGen.WithWhereAndNearObjectFilters_AllResults(expectedAllResultsAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithData(expectedResultsWithDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithoutData(expectedResultsWithoutDataAssertions), + testCasesGen.WithWhereAndNearObjectFilters_NoResults(expectedNoResultsAssertions), + } + + for _, tc := range testCases { + query := aggregateCityQuery(tc.filters, "groupBy: [\"museums\"]") + + t.Run(tc.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + extracted := extractCityGroupByResult(result) + + assert.Len(t, extracted, len(tc.groupedAssertions)) + for groupedBy, groupAssertions := range tc.groupedAssertions { + group := findGroup(groupedBy, extracted) + require.NotNil(t, group, fmt.Sprintf("Group '%s' not found", groupedBy)) + + for _, assertion := range groupAssertions { + assertion(group) + } + } + }) + } + }) +} + +func extractArrayClassGroupByResult(result *graphqlhelper.GraphQLResult) []interface{} { + return extractAggregateResult(result, arrayClassName) +} + +func extractDuplicatesClassGroupByResult(result *graphqlhelper.GraphQLResult) []interface{} { + return extractAggregateResult(result, duplicatesClassName) +} + +func extractCityGroupByResult(result *graphqlhelper.GraphQLResult) []interface{} { + return extractAggregateResult(result, cityClassName) +} + +func findGroup(groupedBy string, groups []interface{}) map[string]interface{} { + for i := range groups { + if group, ok := groups[i].(map[string]interface{}); !ok { + continue + } else if gb, exists := group["groupedBy"]; !exists { + continue + } else if gbm, ok := gb.(map[string]interface{}); !ok { + continue + } else if value, exists := gbm["value"]; !exists { + continue + } else if groupedByValue, ok := value.(string); !ok { + continue + } else if groupedByValue == groupedBy { + return group + } + } + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_aggregate_matrix_no_groupby_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_aggregate_matrix_no_groupby_test.go new file mode 100644 index 0000000000000000000000000000000000000000..99bb4557c88de49b8fea10ce9e0e1785a4c4ea13 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_aggregate_matrix_no_groupby_test.go @@ -0,0 +1,302 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "testing" + + "github.com/weaviate/weaviate/test/helper" + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" +) + +func aggregateArrayClassWithoutGroupByTest(t *testing.T) { + t.Run("aggregate ArrayClass without group by", func(t *testing.T) { + asserts := newAggregateResponseAssert(t) + testCasesGen := &aggregateArrayClassTestCases{} + + expectedAllResultsAssertions := []assertFunc{ + asserts.meta(7), + asserts.booleanArray("booleans", 10, 4, 6, 0.4, 0.6), + asserts.textArray("texts", 10, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{4, 3, 2, 1}), + asserts.numberArray("numbers", 10, 4, 1, 1, 20, 2, 2), + asserts.intArray("ints", 10, 104, 101, 101, 1020, 102, 102), + asserts.dateArray("dates", 10), + } + expectedResultsWithDataAssertions := []assertFunc{ + asserts.meta(2), + asserts.booleanArray("booleans", 7, 2, 5, 0.2857142857142857, 0.7142857142857143), + asserts.textArray("texts", 7, []string{"Alpha", "Bravo", "Charlie", "Delta"}, []int64{2, 2, 2, 1}), + asserts.numberArray("numbers", 7, 4, 1, 1, 16, 2, 2.2857142857142856), + asserts.intArray("ints", 7, 104, 101, 101, 716, 102, 102.28571428571429), + asserts.dateArray("dates", 7), + } + expectedResultsWithoutDataAssertions := []assertFunc{ + asserts.meta(3), + asserts.booleanArray0("booleans"), + asserts.textArray0("texts"), + asserts.numberArray0("numbers"), + asserts.intArray0("ints"), + asserts.dateArray0("dates"), + } + expectedNoResultsAssertions := []assertFunc{ + asserts.meta(0), + asserts.booleanArray0("booleans"), + asserts.textArray0("texts"), + asserts.numberArray0("numbers"), + asserts.intArray0("ints"), + asserts.dateArray0("dates"), + } + + testCases := []aggregateTestCase{ + testCasesGen.WithoutFilters(wrapWithMap(expectedAllResultsAssertions)), + + testCasesGen.WithWhereFilter_AllResults(wrapWithMap(expectedAllResultsAssertions)), + testCasesGen.WithWhereFilter_ResultsWithData(wrapWithMap(expectedResultsWithDataAssertions)), + testCasesGen.WithWhereFilter_ResultsWithoutData(wrapWithMap(expectedResultsWithoutDataAssertions)), + testCasesGen.WithWhereFilter_NoResults(wrapWithMap(expectedNoResultsAssertions)), + + testCasesGen.WithNearObjectFilter_AllResults(wrapWithMap(expectedAllResultsAssertions)), + testCasesGen.WithNearObjectFilter_ResultsWithData(wrapWithMap(expectedResultsWithDataAssertions)), + testCasesGen.WithNearObjectFilter_ResultsWithoutData(wrapWithMap(expectedResultsWithoutDataAssertions)), + + testCasesGen.WithWhereAndNearObjectFilters_AllResults(wrapWithMap(expectedAllResultsAssertions)), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithData(wrapWithMap(expectedResultsWithDataAssertions)), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithoutData(wrapWithMap(expectedResultsWithoutDataAssertions)), + testCasesGen.WithWhereAndNearObjectFilters_NoResults(wrapWithMap(expectedNoResultsAssertions)), + } + + for _, tc := range testCases { + query := aggregateArrayClassQuery(tc.filters, "") + + t.Run(tc.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + extracted := extractArrayClassNoGroupByResult(result) + + for _, groupAssertions := range tc.groupedAssertions { + for _, assertion := range groupAssertions { + assertion(extracted) + } + } + }) + } + }) +} + +func aggregateDuplicatesClassWithoutGroupByTest(t *testing.T) { + t.Run("aggregate DuplicatesClass without group by", func(t *testing.T) { + asserts := newAggregateResponseAssert(t) + testCasesGen := &aggregateDuplicatesClassTestCases{} + + expectedAllResultsAssertions := []assertFunc{ + asserts.meta(3), + asserts.booleanArray("booleans", 9, 3, 6, 0.3333333333333333, 0.6666666666666666), + asserts.textArray("texts", 9, []string{"Alpha", "Bravo"}, []int64{6, 3}), + asserts.numberArray("numbers", 9, 2, 1, 1, 12, 1, 1.3333333333333333), + asserts.intArray("ints", 9, 102, 101, 101, 912, 101, 101.33333333333333), + asserts.dateArray("dates", 9), + } + expectedSomeResultsAssertions := []assertFunc{ + asserts.meta(1), + asserts.booleanArray("booleans", 4, 1, 3, 0.25, 0.75), + asserts.textArray("texts", 4, []string{"Alpha", "Bravo"}, []int64{3, 1}), + asserts.numberArray("numbers", 4, 2, 1, 1, 5, 1, 1.25), + asserts.intArray("ints", 4, 102, 101, 101, 405, 101, 101.25), + asserts.dateArray("dates", 4), + } + expectedNoResultsAssertions := []assertFunc{ + asserts.meta(0), + asserts.booleanArray0("booleans"), + asserts.textArray0("texts"), + asserts.numberArray0("numbers"), + asserts.intArray0("ints"), + asserts.dateArray0("dates"), + } + + testCases := []aggregateTestCase{ + testCasesGen.WithoutFilters(wrapWithMap(expectedAllResultsAssertions)), + + testCasesGen.WithWhereFilter_AllResults(wrapWithMap(expectedAllResultsAssertions)), + testCasesGen.WithWhereFilter_SomeResults(wrapWithMap(expectedSomeResultsAssertions)), + testCasesGen.WithWhereFilter_NoResults(wrapWithMap(expectedNoResultsAssertions)), + } + + for _, tc := range testCases { + query := aggregateDuplicatesClassQuery(tc.filters, "") + + t.Run(tc.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + extracted := extractDuplicatesClassNoGroupByResult(result) + + for _, groupAssertions := range tc.groupedAssertions { + for _, assertion := range groupAssertions { + assertion(extracted) + } + } + }) + } + }) +} + +func aggregateNoPropsClassWithoutGroupByTest(t *testing.T) { + t.Run("aggregate NoPropsClass without group by", func(t *testing.T) { + asserts := newAggregateResponseAssert(t) + testCasesGen := &aggregateNoPropsClassTestCases{} + + expectedAllResultsAssertions := []assertFunc{ + asserts.meta(2), + } + expectedSomeResultsAssertions := []assertFunc{ + asserts.meta(1), + } + expectedNoResultsAssertions := []assertFunc{ + asserts.meta(0), + } + + testCases := []aggregateTestCase{ + testCasesGen.WithoutFilters(wrapWithMap(expectedAllResultsAssertions)), + + testCasesGen.WithWhereFilter_AllResults(wrapWithMap(expectedAllResultsAssertions)), + testCasesGen.WithWhereFilter_SomeResults(wrapWithMap(expectedSomeResultsAssertions)), + testCasesGen.WithWhereFilter_NoResults(wrapWithMap(expectedNoResultsAssertions)), + + testCasesGen.WithNearObjectFilter_AllResults(wrapWithMap(expectedAllResultsAssertions)), + + testCasesGen.WithWhereAndNearObjectFilters_AllResults(wrapWithMap(expectedAllResultsAssertions)), + testCasesGen.WithWhereAndNearObjectFilters_SomeResults(wrapWithMap(expectedSomeResultsAssertions)), + testCasesGen.WithWhereAndNearObjectFilters_NoResults(wrapWithMap(expectedNoResultsAssertions)), + } + + for _, tc := range testCases { + query := aggregateNoPropsQuery(tc.filters) + + t.Run(tc.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + extracted := extractNoPropsClassNoGroupByResult(result) + + for _, groupAssertions := range tc.groupedAssertions { + for _, assertion := range groupAssertions { + assertion(extracted) + } + } + }) + } + }) +} + +func aggregateCityClassWithoutGroupByTest(t *testing.T) { + t.Run("aggregate City without group by", func(t *testing.T) { + asserts := newAggregateResponseAssert(t) + testCasesGen := &aggregateCityTestCases{} + + expectedAllResultsAssertions := []assertFunc{ + asserts.meta(6), + asserts.number("cityArea", 4, 891.96, 217.22, 217.22, 2320.48, 605.6500000000001, 580.12), + asserts.date("cityRights", 4), + asserts.text("history", 4, []string{historyAmsterdam, historyRotterdam, historyBerlin, historyDusseldorf}, []int64{1, 1, 1, 1}), + asserts.boolean("isCapital", 5, 3, 2, 0.6, 0.4), + asserts.textArray("museums", 9, []string{"German Historical Museum", "Museum Boijmans Van Beuningen", "Onomato", "Rijksmuseum", "Schiffahrt Museum"}, []int64{1, 1, 1, 1, 1}), + asserts.text("name", 5, []string{"Amsterdam", "Berlin", "Dusseldorf", "Missing Island", "Rotterdam"}, []int64{1, 1, 1, 1, 1}), + asserts.int("population", 5, 3470000, 0, 600000, 6470000, 600000, 1294000), + asserts.textArray("timezones", 8, []string{"CEST", "CET"}, []int64{4, 4}), + asserts.pointingTo("inCountry", "Country"), + } + expectedResultsWithDataAssertions := []assertFunc{ + asserts.meta(2), + asserts.number("cityArea", 2, 891.96, 891.95, 891.95, 1783.91, 891.955, 891.955), + asserts.date("cityRights", 2), + asserts.text("history", 2, []string{historyAmsterdam, historyBerlin}, []int64{1, 1}), + asserts.boolean("isCapital", 2, 0, 2, 0, 1), + asserts.textArray("museums", 3, []string{"German Historical Museum", "Rijksmuseum", "Stedelijk Museum"}, []int64{1, 1, 1}), + asserts.text("name", 2, []string{"Amsterdam", "Berlin"}, []int64{1, 1}), + asserts.int("population", 2, 3470000, 1800000, 1800000, 5270000, 2635000, 2635000), + asserts.textArray("timezones", 4, []string{"CEST", "CET"}, []int64{2, 2}), + asserts.pointingTo("inCountry", "Country"), + } + expectedResultsWithoutDataAssertions := []assertFunc{ + asserts.meta(1), + asserts.number0("cityArea"), + asserts.date0("cityRights"), + asserts.text0("history"), + asserts.boolean0("isCapital"), + asserts.textArray0("museums"), + asserts.text0("name"), + asserts.int0("population"), + asserts.textArray0("timezones"), + asserts.pointingTo("inCountry", "Country"), + } + expectedNoResultsAssertions := []assertFunc{ + asserts.meta(0), + asserts.number0("cityArea"), + asserts.date0("cityRights"), + asserts.text0("history"), + asserts.boolean0("isCapital"), + asserts.textArray0("museums"), + asserts.text0("name"), + asserts.int0("population"), + asserts.textArray0("timezones"), + asserts.pointingTo("inCountry", "Country"), + } + + testCases := []aggregateTestCase{ + testCasesGen.WithoutFilters(wrapWithMap(expectedAllResultsAssertions)), + + testCasesGen.WithWhereFilter_AllResults(wrapWithMap(expectedAllResultsAssertions)), + testCasesGen.WithWhereFilter_ResultsWithData(wrapWithMap(expectedResultsWithDataAssertions)), + testCasesGen.WithWhereFilter_ResultsWithoutData(wrapWithMap(expectedResultsWithoutDataAssertions)), + testCasesGen.WithWhereFilter_NoResults(wrapWithMap(expectedNoResultsAssertions)), + + testCasesGen.WithNearObjectFilter_AllResults(wrapWithMap(expectedAllResultsAssertions)), + testCasesGen.WithNearObjectFilter_ResultsWithData(wrapWithMap(expectedResultsWithDataAssertions)), + testCasesGen.WithNearObjectFilter_ResultsWithoutData(wrapWithMap(expectedResultsWithoutDataAssertions)), + + testCasesGen.WithWhereAndNearObjectFilters_AllResults(wrapWithMap(expectedAllResultsAssertions)), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithData(wrapWithMap(expectedResultsWithDataAssertions)), + testCasesGen.WithWhereAndNearObjectFilters_ResultsWithoutData(wrapWithMap(expectedResultsWithoutDataAssertions)), + testCasesGen.WithWhereAndNearObjectFilters_NoResults(wrapWithMap(expectedNoResultsAssertions)), + } + + for _, tc := range testCases { + query := aggregateCityQuery(tc.filters, "") + + t.Run(tc.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + extracted := extractCityNoGroupByResult(result) + + for _, groupAssertions := range tc.groupedAssertions { + for _, assertion := range groupAssertions { + assertion(extracted) + } + } + }) + } + }) +} + +func extractArrayClassNoGroupByResult(result *graphqlhelper.GraphQLResult) map[string]interface{} { + return extractAggregateResult(result, arrayClassName)[0].(map[string]interface{}) +} + +func extractDuplicatesClassNoGroupByResult(result *graphqlhelper.GraphQLResult) map[string]interface{} { + return extractAggregateResult(result, duplicatesClassName)[0].(map[string]interface{}) +} + +func extractNoPropsClassNoGroupByResult(result *graphqlhelper.GraphQLResult) map[string]interface{} { + return extractAggregateResult(result, noPropsClassName)[0].(map[string]interface{}) +} + +func extractCityNoGroupByResult(result *graphqlhelper.GraphQLResult) map[string]interface{} { + return extractAggregateResult(result, cityClassName)[0].(map[string]interface{}) +} + +func wrapWithMap(assertFuncs []assertFunc) map[string][]assertFunc { + return map[string][]assertFunc{"": assertFuncs} +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_aggregate_matrix_setup_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_aggregate_matrix_setup_test.go new file mode 100644 index 0000000000000000000000000000000000000000..e8f99dcac0c1680723bae9e7e4063acc3e9608dd --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_aggregate_matrix_setup_test.go @@ -0,0 +1,1104 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "fmt" + "time" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" + "github.com/weaviate/weaviate/usecases/config" +) + +const notExistingObjectId = "cfa3b21e-ca5f-4db7-a412-ffffffffffff" + +const ( + arrayClassName = "ArrayClass" + + objectArrayClassID1_4el = "cfa3b21e-ca5f-4db7-a412-5fc6a23c534a" + objectArrayClassID2_3el = "cfa3b21e-ca5f-4db7-a412-5fc6a23c534b" + objectArrayClassID3_2el = "cfa3b21e-ca5f-4db7-a412-5fc6a23c535a" + objectArrayClassID4_1el = "cfa3b21e-ca5f-4db7-a412-5fc6a23c535b" + objectArrayClassID5_0el = "cfa3b21e-ca5f-4db7-a412-5fc6a23c536a" + objectArrayClassID6_nils = "cfa3b21e-ca5f-4db7-a412-5fc6a23c536b" + objectArrayClassID7_empty = "cfa3b21e-ca5f-4db7-a412-5fc6a23c536c" +) + +const ( + noPropsClassName = "ClassWithoutProperties" + + objectNoPropsClassID1 = "dfa3b21e-ca5f-4db7-a412-5fc6a23c5301" + objectNoPropsClassID2 = "dfa3b21e-ca5f-4db7-a412-5fc6a23c5311" +) + +const ( + cityClassName = "City" +) + +const ( + duplicatesClassName = "DuplicatesClass" + + objectDuplicatesClassID1_4el = "a8076f34-ec16-4333-a963-00c89c5ba001" + objectDuplicatesClassID2_3el = "a8076f34-ec16-4333-a963-00c89c5ba002" + objectDuplicatesClassID3_2el = "a8076f34-ec16-4333-a963-00c89c5ba003" +) + +func arrayClassSchema() *models.Class { + return &models.Class{ + Class: arrayClassName, + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + InvertedIndexConfig: &models.InvertedIndexConfig{IndexPropertyLength: true, IndexNullState: true, UsingBlockMaxWAND: config.DefaultUsingBlockMaxWAND}, + Properties: []*models.Property{ + { + Name: "texts", + DataType: schema.DataTypeTextArray.PropString(), + Tokenization: models.PropertyTokenizationWord, + }, + { + Name: "numbers", + DataType: []string{"number[]"}, + }, + { + Name: "ints", + DataType: []string{"int[]"}, + }, + { + Name: "booleans", + DataType: []string{"boolean[]"}, + }, + { + Name: "dates", + DataType: []string{"date[]"}, + }, + }, + } +} + +func arrayClassObjects() []*models.Object { + return []*models.Object{ + objectArrayClass4el(), + objectArrayClass3el(), + objectArrayClass2el(), + objectArrayClass1el(), + objectArrayClass0el(), + objectArrayClassNils(), + objectArrayClassEmpty(), + } +} + +func objectArrayClass4el() *models.Object { + return &models.Object{ + Class: arrayClassName, + ID: objectArrayClassID1_4el, + Properties: map[string]interface{}{ + "texts": []string{"Alpha", "Bravo", "Charlie", "Delta"}, + "numbers": []float64{1.0, 2.0, 3.0, 4.0}, + "ints": []int{101, 102, 103, 104}, + "booleans": []bool{true, true, true, false}, + "dates": []string{ + "2021-06-01T22:18:59.640162Z", + "2022-06-02T22:18:59.640162Z", + "2023-06-03T22:18:59.640162Z", + "2024-06-04T22:18:59.640162Z", + }, + }, + } +} + +func objectArrayClass3el() *models.Object { + return &models.Object{ + Class: arrayClassName, + ID: objectArrayClassID2_3el, + Properties: map[string]interface{}{ + "texts": []string{"Alpha", "Bravo", "Charlie"}, + "numbers": []float64{1.0, 2.0, 3.0}, + "ints": []int{101, 102, 103}, + "booleans": []bool{true, true, false}, + "dates": []string{ + "2021-06-01T22:18:59.640162Z", + "2022-06-02T22:18:59.640162Z", + "2023-06-03T22:18:59.640162Z", + }, + }, + } +} + +func objectArrayClass2el() *models.Object { + return &models.Object{ + Class: arrayClassName, + ID: objectArrayClassID3_2el, + Properties: map[string]interface{}{ + "texts": []string{"Alpha", "Bravo"}, + "numbers": []float64{1.0, 2.0}, + "ints": []int{101, 102}, + "booleans": []bool{true, false}, + "dates": []string{ + "2021-06-01T22:18:59.640162Z", + "2022-06-02T22:18:59.640162Z", + }, + }, + } +} + +func objectArrayClass1el() *models.Object { + return &models.Object{ + Class: arrayClassName, + ID: objectArrayClassID4_1el, + Properties: map[string]interface{}{ + "texts": []string{"Alpha"}, + "numbers": []float64{1.0}, + "ints": []int{101}, + "booleans": []bool{false}, + "dates": []string{ + "2021-06-01T22:18:59.640162Z", + }, + }, + } +} + +func objectArrayClass0el() *models.Object { + return &models.Object{ + Class: arrayClassName, + ID: objectArrayClassID5_0el, + Properties: map[string]interface{}{ + "texts": []string{}, + "numbers": []float64{}, + "ints": []int{}, + "booleans": []bool{}, + "dates": []time.Time{}, + }, + } +} + +func objectArrayClassNils() *models.Object { + return &models.Object{ + Class: arrayClassName, + ID: objectArrayClassID6_nils, + Properties: map[string]interface{}{ + "texts": nil, + "numbers": nil, + "ints": nil, + "booleans": nil, + "dates": nil, + }, + } +} + +func objectArrayClassEmpty() *models.Object { + return &models.Object{ + Class: arrayClassName, + ID: objectArrayClassID7_empty, + } +} + +func aggregateArrayClassQuery(filters, groupBy string) string { + query := `{ + Aggregate { + %s + %s + { + meta{ + count + } + booleans{ + count + type + totalTrue + totalFalse + percentageTrue + percentageFalse + } + texts{ + count + type + topOccurrences { + value + occurs + } + } + numbers{ + count + type + minimum + maximum + mean + median + mode + sum + } + ints{ + count + type + minimum + maximum + mean + median + mode + sum + } + dates{ + count + } + %s + } + } + }` + + params := "" + if filters != "" || groupBy != "" { + params = fmt.Sprintf( + `( + %s + %s + )`, filters, groupBy) + } + groupedBy := "" + if groupBy != "" { + groupedBy = `groupedBy{ + value + path + }` + } + + return fmt.Sprintf(query, arrayClassName, params, groupedBy) +} + +func extractAggregateResult(result *graphqlhelper.GraphQLResult, className string) []interface{} { + return result.Get("Aggregate", className).AsSlice() +} + +func noPropsClassSchema() *models.Class { + return &models.Class{ + Class: noPropsClassName, + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + } +} + +func noPropsClassObjects() []*models.Object { + return []*models.Object{ + objectNoPropsClass1(), + objectNoPropsClass2(), + } +} + +func objectNoPropsClass1() *models.Object { + return &models.Object{ + Class: noPropsClassName, + ID: objectNoPropsClassID1, + } +} + +func objectNoPropsClass2() *models.Object { + return &models.Object{ + Class: noPropsClassName, + ID: objectNoPropsClassID2, + } +} + +func aggregateNoPropsQuery(filters string) string { + query := ` + { + Aggregate { + %s + %s + { + meta{ + count + } + } + } + } + ` + + params := "" + if filters != "" { + params = fmt.Sprintf( + `( + %s + )`, filters) + } + + return fmt.Sprintf(query, noPropsClassName, params) +} + +func aggregateCityQuery(filters, groupBy string) string { + query := `{ + Aggregate { + %s + %s + { + meta { + count + } + name { + count + type + topOccurrences { + value + occurs + } + } + cityArea { + count + type + minimum + maximum + mean + median + mode + sum + } + isCapital { + count + type + totalTrue + totalFalse + percentageTrue + percentageFalse + } + population { + count + type + minimum + maximum + mean + median + mode + sum + } + cityRights { + count + } + history { + count + type + topOccurrences { + value + occurs + } + } + museums { + count + type + topOccurrences { + value + occurs + } + } + timezones { + count + type + topOccurrences { + value + occurs + } + } + inCountry { + pointingTo + type + } + %s + } + } + }` + + params := "" + if filters != "" || groupBy != "" { + params = fmt.Sprintf( + `( + %s + %s + )`, filters, groupBy) + } + groupedBy := "" + if groupBy != "" { + groupedBy = `groupedBy{ + value + path + }` + } + + return fmt.Sprintf(query, cityClassName, params, groupedBy) +} + +func duplicatesClassSchema() *models.Class { + return &models.Class{ + Class: duplicatesClassName, + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + Properties: []*models.Property{ + { + Name: "texts", + DataType: schema.DataTypeTextArray.PropString(), + Tokenization: models.PropertyTokenizationWord, + }, + { + Name: "numbers", + DataType: []string{"number[]"}, + }, + { + Name: "ints", + DataType: []string{"int[]"}, + }, + { + Name: "booleans", + DataType: []string{"boolean[]"}, + }, + { + Name: "dates", + DataType: []string{"date[]"}, + }, + }, + } +} + +func duplicatesClassObjects() []*models.Object { + return []*models.Object{ + objectDuplicatesClass4el(), + objectDuplicatesClass3el(), + objectDuplicatesClass2el(), + } +} + +func objectDuplicatesClass4el() *models.Object { + return &models.Object{ + Class: duplicatesClassName, + ID: objectDuplicatesClassID1_4el, + Properties: map[string]interface{}{ + "texts": []string{"Alpha", "Alpha", "Alpha", "Bravo"}, + "numbers": []float64{1.0, 1.0, 1.0, 2.0}, + "ints": []int{101, 101, 101, 102}, + "booleans": []bool{true, true, true, false}, + "dates": []string{ + "2021-06-01T22:18:59.640162Z", + "2021-06-01T22:18:59.640162Z", + "2021-06-01T22:18:59.640162Z", + "2022-06-02T22:18:59.640162Z", + }, + }, + } +} + +func objectDuplicatesClass3el() *models.Object { + return &models.Object{ + Class: duplicatesClassName, + ID: objectDuplicatesClassID2_3el, + Properties: map[string]interface{}{ + "texts": []string{"Alpha", "Alpha", "Bravo"}, + "numbers": []float64{1.0, 1.0, 2.0}, + "ints": []int{101, 101, 102}, + "booleans": []bool{true, true, false}, + "dates": []string{ + "2021-06-01T22:18:59.640162Z", + "2021-06-01T22:18:59.640162Z", + "2022-06-02T22:18:59.640162Z", + }, + }, + } +} + +func objectDuplicatesClass2el() *models.Object { + return &models.Object{ + Class: duplicatesClassName, + ID: objectDuplicatesClassID3_2el, + Properties: map[string]interface{}{ + "texts": []string{"Alpha", "Bravo"}, + "numbers": []float64{1.0, 2.0}, + "ints": []int{101, 102}, + "booleans": []bool{true, false}, + "dates": []string{ + "2021-06-01T22:18:59.640162Z", + "2022-06-02T22:18:59.640162Z", + }, + }, + } +} + +func aggregateDuplicatesClassQuery(filters, groupBy string) string { + query := `{ + Aggregate { + %s + %s + { + meta{ + count + } + booleans{ + count + type + totalTrue + totalFalse + percentageTrue + percentageFalse + } + texts{ + count + type + topOccurrences { + value + occurs + } + } + numbers{ + count + type + minimum + maximum + mean + median + mode + sum + } + ints{ + count + type + minimum + maximum + mean + median + mode + sum + } + dates{ + count + } + %s + } + } + }` + + params := "" + if filters != "" || groupBy != "" { + params = fmt.Sprintf( + `( + %s + %s + )`, filters, groupBy) + } + groupedBy := "" + if groupBy != "" { + groupedBy = `groupedBy{ + value + path + }` + } + + return fmt.Sprintf(query, duplicatesClassName, params, groupedBy) +} + +type aggregateTestCase struct { + name string + filters string + groupedAssertions map[string][]assertFunc // map[groupedBy]assertionsForGroup +} + +type aggregateArrayClassTestCases struct{} + +func (tc *aggregateArrayClassTestCases) WithoutFilters(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "without filters", + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateArrayClassTestCases) WithWhereFilter_AllResults(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where filter (all results)", + filters: ` + where: { + operator: Like + path: ["id"] + valueText: "*" + }`, + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateArrayClassTestCases) WithWhereFilter_ResultsWithData(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where filter (results with data)", + filters: fmt.Sprintf(` + where: { + operator: Like + path: ["id"] + valueText: "%s" + }`, objectArrayClassID1_4el[:35]+"?"), + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateArrayClassTestCases) WithWhereFilter_ResultsWithoutData(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where filter (results without data)", + filters: fmt.Sprintf(` + where: { + operator: Like + path: ["id"] + valueText: "%s" + }`, objectArrayClassID5_0el[:35]+"?"), + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateArrayClassTestCases) WithWhereFilter_NoResults(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where filter (no results)", + filters: fmt.Sprintf(` + where: { + operator: Like + path: ["id"] + valueText: "%s" + }`, notExistingObjectId), + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateArrayClassTestCases) WithNearObjectFilter_AllResults(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with nearObject filter (all results)", + filters: fmt.Sprintf(` + nearObject: { + id: "%s" + certainty: 0.7 + }`, objectArrayClassID1_4el), + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateArrayClassTestCases) WithNearObjectFilter_ResultsWithData(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with nearObject filter (results with data)", + filters: fmt.Sprintf(` + nearObject: { + id: "%s" + certainty: 0.97 + }`, objectArrayClassID1_4el), + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateArrayClassTestCases) WithNearObjectFilter_ResultsWithoutData(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with nearObject filter (results without data)", + filters: fmt.Sprintf(` + nearObject: { + id: "%s" + certainty: 1 + }`, objectArrayClassID5_0el), + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateArrayClassTestCases) WithWhereAndNearObjectFilters_AllResults(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where & nearObject filters (all results)", + filters: fmt.Sprintf(` + where: { + operator: Like + path: ["id"] + valueText: "*" + } + nearObject: { + id: "%s" + certainty: 0.7 + }`, objectArrayClassID1_4el), + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateArrayClassTestCases) WithWhereAndNearObjectFilters_ResultsWithData(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where & nearObject filters (results with data)", + filters: fmt.Sprintf(` + where: { + operator: Like + path: ["id"] + valueText: "%s" + } + nearObject: { + id: "%s" + certainty: 0.97 + }`, objectArrayClassID1_4el[:35]+"?", objectArrayClassID1_4el), + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateArrayClassTestCases) WithWhereAndNearObjectFilters_ResultsWithoutData(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where & nearObject filters (results without data)", + filters: fmt.Sprintf(` + where: { + operator: Like + path: ["id"] + valueText: "%s" + } + nearObject: { + id: "%s" + certainty: 1 + }`, objectArrayClassID5_0el[:35]+"?", objectArrayClassID5_0el), + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateArrayClassTestCases) WithWhereAndNearObjectFilters_NoResults(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where & nearObject filters (no results)", + filters: fmt.Sprintf(` + where: { + operator: Like + path: ["id"] + valueText: "%s" + } + nearObject: { + id: "%s" + certainty: 0.8 + }`, notExistingObjectId, objectArrayClassID1_4el), + groupedAssertions: groupedAssertions, + } +} + +type aggregateNoPropsClassTestCases struct{} + +func (tc *aggregateNoPropsClassTestCases) WithoutFilters(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "without filters", + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateNoPropsClassTestCases) WithWhereFilter_AllResults(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where filter (all results)", + filters: ` + where: { + operator: Like + path: ["id"] + valueText: "*" + }`, + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateNoPropsClassTestCases) WithWhereFilter_SomeResults(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where filter (some results)", + filters: fmt.Sprintf(` + where: { + operator: Like + path: ["id"] + valueText: "%s" + }`, objectNoPropsClassID1[:35]+"?"), + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateNoPropsClassTestCases) WithWhereFilter_NoResults(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where filter (no results)", + filters: fmt.Sprintf(` + where: { + operator: Like + path: ["id"] + valueText: "%s" + }`, notExistingObjectId), + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateNoPropsClassTestCases) WithNearObjectFilter_AllResults(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with nearObject filter (all results)", + filters: fmt.Sprintf(` + nearObject: { + id: "%s" + certainty: 0.1 + }`, objectNoPropsClassID1), + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateNoPropsClassTestCases) WithWhereAndNearObjectFilters_AllResults(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where & nearObject filters (all results)", + filters: fmt.Sprintf(` + where: { + operator: Like + path: ["id"] + valueText: "*" + } + nearObject: { + id: "%s" + certainty: 0.1 + }`, objectNoPropsClassID1), + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateNoPropsClassTestCases) WithWhereAndNearObjectFilters_SomeResults(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where & nearObject filters (some results)", + filters: fmt.Sprintf(` + where: { + operator: Like + path: ["id"] + valueText: "%s" + } + nearObject: { + id: "%s" + certainty: 1 + }`, objectNoPropsClassID1[:35]+"?", objectNoPropsClassID1), + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateNoPropsClassTestCases) WithWhereAndNearObjectFilters_NoResults(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where & nearObject filters (no results)", + filters: fmt.Sprintf(` + where: { + operator: Like + path: ["id"] + valueText: "%s" + } + nearObject: { + id: "%s" + certainty: 0.1 + }`, notExistingObjectId, objectNoPropsClassID1), + groupedAssertions: groupedAssertions, + } +} + +type aggregateCityTestCases struct{} + +func (tc *aggregateCityTestCases) WithoutFilters(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "without filters", + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateCityTestCases) WithWhereFilter_AllResults(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where filter (all results)", + filters: ` + where: { + operator: Like + path: ["id"] + valueText: "*" + }`, + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateCityTestCases) WithWhereFilter_ResultsWithData(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where filter (results with data)", + filters: ` + where: { + operator: Equal, + path: ["isCapital"], + valueBoolean: true + }`, + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateCityTestCases) WithWhereFilter_ResultsWithoutData(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where filter (results without data)", + filters: fmt.Sprintf(` + where: { + operator: Like + path: ["id"] + valueText: "%s" + }`, nullisland), + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateCityTestCases) WithWhereFilter_NoResults(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where filter (no results)", + filters: fmt.Sprintf(` + where: { + operator: Like + path: ["id"] + valueText: "%s" + }`, notExistingObjectId), + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateCityTestCases) WithNearObjectFilter_AllResults(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with nearObject filter (all results)", + filters: fmt.Sprintf(` + nearObject: { + id: "%s" + certainty: 0.1 + }`, berlin), + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateCityTestCases) WithNearObjectFilter_ResultsWithData(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with nearObject filter (results with data)", + filters: fmt.Sprintf(` + nearObject: { + id: "%s" + certainty: 0.81 + }`, berlin), + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateCityTestCases) WithNearObjectFilter_ResultsWithoutData(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with nearObject filter (results without data)", + filters: fmt.Sprintf(` + nearObject: { + id: "%s" + certainty: 0.9 + }`, nullisland), + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateCityTestCases) WithWhereAndNearObjectFilters_AllResults(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where & nearObject filters (all results)", + filters: fmt.Sprintf(` + where: { + operator: Like + path: ["id"] + valueText: "*" + } + nearObject: { + id: "%s" + certainty: 0.1 + }`, berlin), + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateCityTestCases) WithWhereAndNearObjectFilters_ResultsWithData(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where & nearObject filters (results with data)", + filters: fmt.Sprintf(` + where: { + operator: Equal, + path: ["isCapital"], + valueBoolean: true + } + nearObject: { + id: "%s" + certainty: 0.81 + }`, berlin), + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateCityTestCases) WithWhereAndNearObjectFilters_ResultsWithoutData(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where & nearObject filters (results without data)", + filters: fmt.Sprintf(` + where: { + operator: Like + path: ["id"] + valueText: "%s" + } + nearObject: { + id: "%s" + certainty: 0.9 + }`, nullisland, nullisland), + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateCityTestCases) WithWhereAndNearObjectFilters_NoResults(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where & nearObject filters (no results)", + filters: fmt.Sprintf(` + where: { + operator: Like + path: ["id"] + valueText: "%s" + } + nearObject: { + id: "%s" + certainty: 0.1 + }`, notExistingObjectId, berlin), + groupedAssertions: groupedAssertions, + } +} + +type aggregateDuplicatesClassTestCases struct{} + +func (tc *aggregateDuplicatesClassTestCases) WithoutFilters(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "without filters", + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateDuplicatesClassTestCases) WithWhereFilter_AllResults(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where filter (all results)", + filters: ` + where: { + operator: Like + path: ["id"] + valueText: "*" + }`, + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateDuplicatesClassTestCases) WithWhereFilter_SomeResults(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where filter (some results)", + filters: fmt.Sprintf(` + where: { + operator: Like + path: ["id"] + valueText: "%s" + }`, objectDuplicatesClassID1_4el), + groupedAssertions: groupedAssertions, + } +} + +func (tc *aggregateDuplicatesClassTestCases) WithWhereFilter_NoResults(groupedAssertions map[string][]assertFunc) aggregateTestCase { + return aggregateTestCase{ + name: "with where filter (no results)", + filters: fmt.Sprintf(` + where: { + operator: Like + path: ["id"] + valueText: "%s" + }`, notExistingObjectId), + groupedAssertions: groupedAssertions, + } +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_aggregate_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_aggregate_test.go new file mode 100644 index 0000000000000000000000000000000000000000..f2e1ad86ff7cc3923cf385c434ed3076bdba760c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_aggregate_test.go @@ -0,0 +1,1696 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "encoding/json" + "fmt" + "strconv" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + schemaclient "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" +) + +// This test prevents a regression on the fix for +// https://github.com/weaviate/weaviate/issues/824 +func localMeta_StringPropsNotSetEverywhere(t *testing.T) { + graphqlhelper.AssertGraphQL(t, helper.RootAuth, ` + { + Aggregate { + City { + name { + topOccurrences { + occurs + value + } + } + } + } + } + `) +} + +func localMetaWithWhereAndNearTextFilters(t *testing.T) { + t.Run("with distance", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, ` + { + Aggregate{ + City (where: { + valueBoolean: true, + operator: Equal, + path: ["isCapital"] + } + nearText: { + concepts: ["Amsterdam"] + distance: 0.2 + } + ){ + meta { + count + } + isCapital { + count + percentageFalse + percentageTrue + totalFalse + totalTrue + type + } + population { + mean + count + maximum + minimum + sum + type + } + inCountry { + pointingTo + type + } + name { + topOccurrences { + occurs + value + } + type + count + } + } + } + } + `) + + t.Run("meta count", func(t *testing.T) { + meta := result.Get("Aggregate", "City").AsSlice()[0].(map[string]interface{})["meta"] + count := meta.(map[string]interface{})["count"] + expected := json.Number("1") + assert.Equal(t, expected, count) + }) + + t.Run("boolean props", func(t *testing.T) { + isCapital := result.Get("Aggregate", "City").AsSlice()[0].(map[string]interface{})["isCapital"] + expected := map[string]interface{}{ + "count": json.Number("1"), + "percentageTrue": json.Number("1"), + "percentageFalse": json.Number("0"), + "totalTrue": json.Number("1"), + "totalFalse": json.Number("0"), + "type": "boolean", + } + assert.Equal(t, expected, isCapital) + }) + + t.Run("int/number props", func(t *testing.T) { + population := result.Get("Aggregate", "City").AsSlice()[0].(map[string]interface{})["population"] + expected := map[string]interface{}{ + "mean": json.Number("1800000"), + "count": json.Number("1"), + "maximum": json.Number("1800000"), + "minimum": json.Number("1800000"), + "sum": json.Number("1800000"), + "type": "int", + } + assert.Equal(t, expected, population) + }) + + t.Run("ref prop", func(t *testing.T) { + inCountry := result.Get("Aggregate", "City").AsSlice()[0].(map[string]interface{})["inCountry"] + expected := map[string]interface{}{ + "pointingTo": []interface{}{"Country"}, + "type": "cref", + } + assert.Equal(t, expected, inCountry) + }) + + t.Run("string prop", func(t *testing.T) { + name := result.Get("Aggregate", "City"). + AsSlice()[0].(map[string]interface{})["name"].(map[string]interface{}) + typeField := name["type"] + topOccurrences := name["topOccurrences"] + + assert.Equal(t, schema.DataTypeText.String(), typeField) + + expectedTopOccurrences := []interface{}{ + map[string]interface{}{ + "value": "Amsterdam", + "occurs": json.Number("1"), + }, + } + assert.ElementsMatch(t, expectedTopOccurrences, topOccurrences) + }) + }) + + t.Run("with certainty", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, ` + { + Aggregate{ + City (where: { + valueBoolean: true, + operator: Equal, + path: ["isCapital"] + } + nearText: { + concepts: ["Amsterdam"] + certainty: 0.9 + } + ){ + meta { + count + } + isCapital { + count + percentageFalse + percentageTrue + totalFalse + totalTrue + type + } + population { + mean + count + maximum + minimum + sum + type + } + inCountry { + pointingTo + type + } + name { + topOccurrences { + occurs + value + } + type + count + } + } + } + }`) + + t.Run("meta count", func(t *testing.T) { + meta := result.Get("Aggregate", "City").AsSlice()[0].(map[string]interface{})["meta"] + count := meta.(map[string]interface{})["count"] + expected := json.Number("1") + assert.Equal(t, expected, count) + }) + + t.Run("boolean props", func(t *testing.T) { + isCapital := result.Get("Aggregate", "City").AsSlice()[0].(map[string]interface{})["isCapital"] + expected := map[string]interface{}{ + "count": json.Number("1"), + "percentageTrue": json.Number("1"), + "percentageFalse": json.Number("0"), + "totalTrue": json.Number("1"), + "totalFalse": json.Number("0"), + "type": "boolean", + } + assert.Equal(t, expected, isCapital) + }) + + t.Run("int/number props", func(t *testing.T) { + population := result.Get("Aggregate", "City").AsSlice()[0].(map[string]interface{})["population"] + expected := map[string]interface{}{ + "mean": json.Number("1800000"), + "count": json.Number("1"), + "maximum": json.Number("1800000"), + "minimum": json.Number("1800000"), + "sum": json.Number("1800000"), + "type": "int", + } + assert.Equal(t, expected, population) + }) + + t.Run("ref prop", func(t *testing.T) { + inCountry := result.Get("Aggregate", "City").AsSlice()[0].(map[string]interface{})["inCountry"] + expected := map[string]interface{}{ + "pointingTo": []interface{}{"Country"}, + "type": "cref", + } + assert.Equal(t, expected, inCountry) + }) + + t.Run("string prop", func(t *testing.T) { + name := result.Get("Aggregate", "City"). + AsSlice()[0].(map[string]interface{})["name"].(map[string]interface{}) + typeField := name["type"] + topOccurrences := name["topOccurrences"] + + assert.Equal(t, schema.DataTypeText.String(), typeField) + + expectedTopOccurrences := []interface{}{ + map[string]interface{}{ + "value": "Amsterdam", + "occurs": json.Number("1"), + }, + } + assert.ElementsMatch(t, expectedTopOccurrences, topOccurrences) + }) + }) +} + +func localMetaWithWhereAndNearObjectFilters(t *testing.T) { + t.Run("with distance", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, ` + { + Aggregate{ + City (where: { + valueBoolean: true, + operator: Equal, + path: ["isCapital"] + } + nearObject: { + id: "9b9cbea5-e87e-4cd0-89af-e2f424fd52d6" + distance: 0.2 + } + ){ + meta { + count + } + isCapital { + count + percentageFalse + percentageTrue + totalFalse + totalTrue + type + } + population { + mean + count + maximum + minimum + sum + type + } + inCountry { + pointingTo + type + } + name { + topOccurrences { + occurs + value + } + type + count + } + } + } + }`) + + t.Run("meta count", func(t *testing.T) { + meta := result.Get("Aggregate", "City").AsSlice()[0].(map[string]interface{})["meta"] + count := meta.(map[string]interface{})["count"] + expected := json.Number("1") + assert.Equal(t, expected, count) + }) + + t.Run("boolean props", func(t *testing.T) { + isCapital := result.Get("Aggregate", "City").AsSlice()[0].(map[string]interface{})["isCapital"] + expected := map[string]interface{}{ + "count": json.Number("1"), + "percentageTrue": json.Number("1"), + "percentageFalse": json.Number("0"), + "totalTrue": json.Number("1"), + "totalFalse": json.Number("0"), + "type": "boolean", + } + assert.Equal(t, expected, isCapital) + }) + + t.Run("int/number props", func(t *testing.T) { + population := result.Get("Aggregate", "City").AsSlice()[0].(map[string]interface{})["population"] + expected := map[string]interface{}{ + "mean": json.Number("3470000"), + "count": json.Number("1"), + "maximum": json.Number("3470000"), + "minimum": json.Number("3470000"), + "sum": json.Number("3470000"), + "type": "int", + } + assert.Equal(t, expected, population) + }) + + t.Run("ref prop", func(t *testing.T) { + inCountry := result.Get("Aggregate", "City").AsSlice()[0].(map[string]interface{})["inCountry"] + expected := map[string]interface{}{ + "pointingTo": []interface{}{"Country"}, + "type": "cref", + } + assert.Equal(t, expected, inCountry) + }) + + t.Run("string prop", func(t *testing.T) { + name := result.Get("Aggregate", "City"). + AsSlice()[0].(map[string]interface{})["name"].(map[string]interface{}) + typeField := name["type"] + topOccurrences := name["topOccurrences"] + + assert.Equal(t, schema.DataTypeText.String(), typeField) + + expectedTopOccurrences := []interface{}{ + map[string]interface{}{ + "value": "Berlin", + "occurs": json.Number("1"), + }, + } + assert.ElementsMatch(t, expectedTopOccurrences, topOccurrences) + }) + }) + + t.Run("with certainty", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, ` + { + Aggregate{ + City (where: { + valueBoolean: true, + operator: Equal, + path: ["isCapital"] + } + nearObject: { + id: "9b9cbea5-e87e-4cd0-89af-e2f424fd52d6" + certainty: 0.9 + } + ){ + meta { + count + } + isCapital { + count + percentageFalse + percentageTrue + totalFalse + totalTrue + type + } + population { + mean + count + maximum + minimum + sum + type + } + inCountry { + pointingTo + type + } + name { + topOccurrences { + occurs + value + } + type + count + } + } + } + }`) + + t.Run("meta count", func(t *testing.T) { + meta := result.Get("Aggregate", "City").AsSlice()[0].(map[string]interface{})["meta"] + count := meta.(map[string]interface{})["count"] + expected := json.Number("1") + assert.Equal(t, expected, count) + }) + + t.Run("boolean props", func(t *testing.T) { + isCapital := result.Get("Aggregate", "City").AsSlice()[0].(map[string]interface{})["isCapital"] + expected := map[string]interface{}{ + "count": json.Number("1"), + "percentageTrue": json.Number("1"), + "percentageFalse": json.Number("0"), + "totalTrue": json.Number("1"), + "totalFalse": json.Number("0"), + "type": "boolean", + } + assert.Equal(t, expected, isCapital) + }) + + t.Run("int/number props", func(t *testing.T) { + population := result.Get("Aggregate", "City").AsSlice()[0].(map[string]interface{})["population"] + expected := map[string]interface{}{ + "mean": json.Number("3470000"), + "count": json.Number("1"), + "maximum": json.Number("3470000"), + "minimum": json.Number("3470000"), + "sum": json.Number("3470000"), + "type": "int", + } + assert.Equal(t, expected, population) + }) + + t.Run("ref prop", func(t *testing.T) { + inCountry := result.Get("Aggregate", "City").AsSlice()[0].(map[string]interface{})["inCountry"] + expected := map[string]interface{}{ + "pointingTo": []interface{}{"Country"}, + "type": "cref", + } + assert.Equal(t, expected, inCountry) + }) + + t.Run("string prop", func(t *testing.T) { + name := result.Get("Aggregate", "City"). + AsSlice()[0].(map[string]interface{})["name"].(map[string]interface{}) + typeField := name["type"] + topOccurrences := name["topOccurrences"] + + assert.Equal(t, schema.DataTypeText.String(), typeField) + + expectedTopOccurrences := []interface{}{ + map[string]interface{}{ + "value": "Berlin", + "occurs": json.Number("1"), + }, + } + assert.ElementsMatch(t, expectedTopOccurrences, topOccurrences) + }) + }) +} + +func localMetaWithNearVectorFilter(t *testing.T) { + t.Run("with distance", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, ` + { + Aggregate{ + CustomVectorClass( + nearVector: { + vector: [1,0,0] + distance: 0.0002 + } + ){ + meta { + count + } + name { + topOccurrences { + occurs + value + } + type + count + } + } + } + }`) + + t.Run("meta count", func(t *testing.T) { + meta := result.Get("Aggregate", "CustomVectorClass").AsSlice()[0].(map[string]interface{})["meta"] + count := meta.(map[string]interface{})["count"] + expected := json.Number("1") + assert.Equal(t, expected, count) + }) + + t.Run("string prop", func(t *testing.T) { + name := result.Get("Aggregate", "CustomVectorClass"). + AsSlice()[0].(map[string]interface{})["name"].(map[string]interface{}) + typeField := name["type"] + topOccurrences := name["topOccurrences"] + + assert.Equal(t, schema.DataTypeText.String(), typeField) + + expectedTopOccurrences := []interface{}{ + map[string]interface{}{ + "value": "Mercedes", + "occurs": json.Number("1"), + }, + } + assert.ElementsMatch(t, expectedTopOccurrences, topOccurrences) + }) + }) + + t.Run("with certainty", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, ` + { + Aggregate{ + CustomVectorClass( + nearVector: { + vector: [1,0,0] + certainty: 0.9999 + } + ){ + meta { + count + } + name { + topOccurrences { + occurs + value + } + type + count + } + } + } + }`) + + t.Run("meta count", func(t *testing.T) { + meta := result.Get("Aggregate", "CustomVectorClass").AsSlice()[0].(map[string]interface{})["meta"] + count := meta.(map[string]interface{})["count"] + expected := json.Number("1") + assert.Equal(t, expected, count) + }) + + t.Run("string prop", func(t *testing.T) { + name := result.Get("Aggregate", "CustomVectorClass"). + AsSlice()[0].(map[string]interface{})["name"].(map[string]interface{}) + typeField := name["type"] + topOccurrences := name["topOccurrences"] + + assert.Equal(t, schema.DataTypeText.String(), typeField) + + expectedTopOccurrences := []interface{}{ + map[string]interface{}{ + "value": "Mercedes", + "occurs": json.Number("1"), + }, + } + assert.ElementsMatch(t, expectedTopOccurrences, topOccurrences) + }) + }) +} + +func localMetaWithWhereAndNearVectorFilters(t *testing.T) { + t.Run("with distance", func(t *testing.T) { + t.Run("with expected results, low certainty", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, ` + { + Aggregate { + CustomVectorClass( + where: { + valueText: "Ford" + operator: Equal + path: ["name"] + } + nearVector: { + vector: [1,0,0] + distance: 0.6 + } + ) { + meta { + count + } + name { + topOccurrences { + occurs + value + } + type + count + } + } + } + } + `) + + require.NotNil(t, result) + + agg := result.Result.(map[string]interface{})["Aggregate"].(map[string]interface{}) + cls := agg["CustomVectorClass"].([]interface{}) + require.Len(t, cls, 1) + name := cls[0].(map[string]interface{})["name"].(map[string]interface{}) + topOcc := name["topOccurrences"].([]interface{}) + require.Len(t, topOcc, 1) + val := topOcc[0].(map[string]interface{})["value"] + assert.Equal(t, "Ford", val) + }) + + t.Run("with no expected results, low distance", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, ` + { + Aggregate { + CustomVectorClass( + where: { + valueText: "Ford" + operator: Equal + path: ["name"] + } + nearVector: { + vector: [1,0,0] + distance: 0.2 + } + ) { + meta { + count + } + name { + topOccurrences { + occurs + value + } + type + count + } + } + } + } + `) + + require.NotNil(t, result) + + agg := result.Result.(map[string]interface{})["Aggregate"].(map[string]interface{}) + cls := agg["CustomVectorClass"].([]interface{}) + require.Len(t, cls, 1) + name := cls[0].(map[string]interface{})["name"].(map[string]interface{}) + topOcc := name["topOccurrences"].([]interface{}) + require.Len(t, topOcc, 0) + }) + + t.Run("with expected results, low distance", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, ` + { + Aggregate { + CustomVectorClass( + where: { + valueText: "Mercedes" + operator: Equal + path: ["name"] + } + nearVector: { + vector: [1,0,0] + distance: 0.1 + } + ) { + meta { + count + } + name { + topOccurrences { + occurs + value + } + type + count + } + } + } + }`) + + require.NotNil(t, result) + + agg := result.Result.(map[string]interface{})["Aggregate"].(map[string]interface{}) + cls := agg["CustomVectorClass"].([]interface{}) + require.Len(t, cls, 1) + name := cls[0].(map[string]interface{})["name"].(map[string]interface{}) + topOcc := name["topOccurrences"].([]interface{}) + require.Len(t, topOcc, 1) + val := topOcc[0].(map[string]interface{})["value"] + assert.Equal(t, "Mercedes", val) + }) + }) + + t.Run("with certainty", func(t *testing.T) { + t.Run("with expected results, low certainty", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, ` + { + Aggregate { + CustomVectorClass( + where: { + valueText: "Ford" + operator: Equal + path: ["name"] + } + nearVector: { + vector: [1,0,0] + certainty: 0.7 + } + ) { + meta { + count + } + name { + topOccurrences { + occurs + value + } + type + count + } + } + } + } + `) + + require.NotNil(t, result) + + agg := result.Result.(map[string]interface{})["Aggregate"].(map[string]interface{}) + cls := agg["CustomVectorClass"].([]interface{}) + require.Len(t, cls, 1) + name := cls[0].(map[string]interface{})["name"].(map[string]interface{}) + topOcc := name["topOccurrences"].([]interface{}) + require.Len(t, topOcc, 1) + val := topOcc[0].(map[string]interface{})["value"] + assert.Equal(t, "Ford", val) + }) + + t.Run("with no expected results, high certainty", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, ` + { + Aggregate { + CustomVectorClass( + where: { + valueText: "Ford" + operator: Equal + path: ["name"] + } + nearVector: { + vector: [1,0,0] + certainty: 0.9 + } + ) { + meta { + count + } + name { + topOccurrences { + occurs + value + } + type + count + } + } + } + } + `) + + require.NotNil(t, result) + + agg := result.Result.(map[string]interface{})["Aggregate"].(map[string]interface{}) + cls := agg["CustomVectorClass"].([]interface{}) + require.Len(t, cls, 1) + name := cls[0].(map[string]interface{})["name"].(map[string]interface{}) + topOcc := name["topOccurrences"].([]interface{}) + require.Len(t, topOcc, 0) + }) + + t.Run("with expected results, high certainty", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, ` + { + Aggregate { + CustomVectorClass( + where: { + valueText: "Mercedes" + operator: Equal + path: ["name"] + } + nearVector: { + vector: [1,0,0] + certainty: 0.9 + } + ) { + meta { + count + } + name { + topOccurrences { + occurs + value + } + type + count + } + } + } + } + `) + + require.NotNil(t, result) + + agg := result.Result.(map[string]interface{})["Aggregate"].(map[string]interface{}) + cls := agg["CustomVectorClass"].([]interface{}) + require.Len(t, cls, 1) + name := cls[0].(map[string]interface{})["name"].(map[string]interface{}) + topOcc := name["topOccurrences"].([]interface{}) + require.Len(t, topOcc, 1) + val := topOcc[0].(map[string]interface{})["value"] + assert.Equal(t, "Mercedes", val) + }) + }) +} + +func localMetaWithWhereGroupByNearMediaFilters(t *testing.T) { + t.Run("with nearObject", func(t *testing.T) { + query := ` + { + Aggregate { + Company + ( + groupBy: "name" + nearObject: {id: "cfa3b21e-ca4f-4db7-a432-7fc6a23c534d", certainty: 0.99} + ) + { + groupedBy { + value + } + meta { + count + } + } + } + }` + + expected := map[string]interface{}{ + "Aggregate": map[string]interface{}{ + "Company": []interface{}{ + map[string]interface{}{ + "groupedBy": map[string]interface{}{ + "value": "Microsoft Inc.", + }, + "meta": map[string]interface{}{ + "count": json.Number("1"), + }, + }, + }, + }, + } + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query).Result + assert.EqualValues(t, expected, result) + }) + + t.Run("with nearText", func(t *testing.T) { + t.Run("with distance", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, ` + { + Aggregate{ + City ( + groupBy: "population" + where: { + valueBoolean: true, + operator: Equal, + path: ["isCapital"] + } + nearText: { + concepts: ["Amsterdam"] + distance: 0.2 + } + ){ + meta { + count + } + groupedBy { + value + } + } + } + } + `) + + expected := map[string]interface{}{ + "Aggregate": map[string]interface{}{ + "City": []interface{}{ + map[string]interface{}{ + "groupedBy": map[string]interface{}{ + "value": "1.8e+06", + }, + "meta": map[string]interface{}{ + "count": json.Number("1"), + }, + }, + }, + }, + } + + assert.EqualValues(t, expected, result.Result) + }) + + t.Run("with certainty", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, ` + { + Aggregate{ + City ( + groupBy: "population" + where: { + valueBoolean: true, + operator: Equal, + path: ["isCapital"] + } + nearText: { + concepts: ["Amsterdam"] + certainty: 0.9 + } + ){ + meta { + count + } + groupedBy { + value + } + } + } + } + `) + + expected := map[string]interface{}{ + "Aggregate": map[string]interface{}{ + "City": []interface{}{ + map[string]interface{}{ + "groupedBy": map[string]interface{}{ + "value": "1.8e+06", + }, + "meta": map[string]interface{}{ + "count": json.Number("1"), + }, + }, + }, + }, + } + + assert.EqualValues(t, expected, result.Result) + }) + }) + + t.Run("with nearVector", func(t *testing.T) { + getQuery := ` + { + Get { + Company(where: { + path: ["name"] + operator: Equal + valueText: "Google Inc." + }) + { + _additional { + vector + } + } + } + }` + + vectorResult := graphqlhelper.AssertGraphQL(t, helper.RootAuth, getQuery). + Get("Get", "Company"). + AsSlice()[0].(map[string]interface{})["_additional"].(map[string]interface{})["vector"].([]interface{}) + + vector := make([]float32, len(vectorResult)) + for i, ifc := range vectorResult { + val, err := strconv.ParseFloat(ifc.(json.Number).String(), 32) + require.Nil(t, err) + vector[i] = float32(val) + } + + aggQuery := fmt.Sprintf(` + { + Aggregate { + Company + ( + groupBy: "name" + nearVector: {vector: %+v, certainty: 0.99} + ) + { + groupedBy { + value + } + meta { + count + } + } + } + } + `, vector) + + aggResult := graphqlhelper.AssertGraphQL(t, helper.RootAuth, aggQuery).Result + + expected := map[string]interface{}{ + "Aggregate": map[string]interface{}{ + "Company": []interface{}{ + map[string]interface{}{ + "groupedBy": map[string]interface{}{ + "value": "Google Inc.", + }, + "meta": map[string]interface{}{ + "count": json.Number("1"), + }, + }, + }, + }, + } + + assert.EqualValues(t, expected, aggResult) + }) +} + +func localMetaWithObjectLimit(t *testing.T) { + t.Run("with nearText and no distance/certainty, where filter and groupBy", func(t *testing.T) { + objectLimit := 4 + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, fmt.Sprintf(` + { + Aggregate { + Company ( + groupBy: ["name"] + where: { + valueText: "Apple*", + operator: Like, + path: ["name"] + } + objectLimit: %d + nearText: { + concepts: ["Apple"] + certainty: 0.5 + } + ){ + meta { + count + } + groupedBy { + value + } + } + } + } + `, objectLimit)) + + expected := []interface{}{ + map[string]interface{}{ + "groupedBy": map[string]interface{}{ + "value": "Apple Incorporated", + }, + "meta": map[string]interface{}{ + "count": json.Number("1"), + }, + }, + map[string]interface{}{ + "groupedBy": map[string]interface{}{ + "value": "Apple Inc.", + }, + "meta": map[string]interface{}{ + "count": json.Number("1"), + }, + }, + map[string]interface{}{ + "groupedBy": map[string]interface{}{ + "value": "Apple", + }, + "meta": map[string]interface{}{ + "count": json.Number("1"), + }, + }, + } + + companies := result.Get("Aggregate", "Company").Result.([]interface{}) + for _, company := range companies { + assert.Contains(t, expected, company) + } + }) + + t.Run("with nearObject and distance", func(t *testing.T) { + objectLimit := 1 + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, fmt.Sprintf(` + { + Aggregate{ + City ( + objectLimit: %d + nearObject: { + id: "9b9cbea5-e87e-4cd0-89af-e2f424fd52d6" + distance: 0.3 + } + ){ + meta { + count + } + } + } + } + `, objectLimit)) + + t.Run("validate objectLimit functions as expected", func(t *testing.T) { + res := result.Get("Aggregate", "City").AsSlice() + require.Len(t, res, 1) + meta := res[0].(map[string]interface{})["meta"] + count := meta.(map[string]interface{})["count"] + assert.Equal(t, json.Number(fmt.Sprint(objectLimit)), count) + }) + }) + + t.Run("with nearObject and certainty", func(t *testing.T) { + objectLimit := 1 + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, fmt.Sprintf(` + { + Aggregate{ + City ( + objectLimit: %d + nearObject: { + id: "9b9cbea5-e87e-4cd0-89af-e2f424fd52d6" + certainty: 0.7 + } + ){ + meta { + count + } + } + } + } + `, objectLimit)) + + t.Run("validate objectLimit functions as expected", func(t *testing.T) { + res := result.Get("Aggregate", "City").AsSlice() + require.Len(t, res, 1) + meta := res[0].(map[string]interface{})["meta"] + count := meta.(map[string]interface{})["count"] + assert.Equal(t, json.Number(fmt.Sprint(objectLimit)), count) + }) + }) + + t.Run("with nearObject and no certainty", func(t *testing.T) { + objectLimit := 2 + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, fmt.Sprintf(` + { + Aggregate{ + City ( + objectLimit: %d + nearObject: { + id: "9b9cbea5-e87e-4cd0-89af-e2f424fd52d6" + } + ){ + meta { + count + } + } + } + } + `, objectLimit)) + + t.Run("validate objectLimit functions as expected", func(t *testing.T) { + res := result.Get("Aggregate", "City").AsSlice() + require.Len(t, res, 1) + meta := res[0].(map[string]interface{})["meta"] + count := meta.(map[string]interface{})["count"] + assert.Equal(t, json.Number(fmt.Sprint(objectLimit)), count) + }) + }) + + t.Run("with nearObject and very high distance, no objectLimit", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, ` + { + Aggregate { + RansomNote( + nearText: { + concepts: ["abc"] + distance: 1.9998 + } + ) { + meta { + count + } + } + } + } + `) + + t.Run("validate nearMedia runs unlimited without objectLimit", func(t *testing.T) { + res := result.Get("Aggregate", "RansomNote").AsSlice() + require.Len(t, res, 1) + meta := res[0].(map[string]interface{})["meta"] + count := meta.(map[string]interface{})["count"] + countInt, err := count.(json.Number).Int64() + require.Nil(t, err) + assert.InDelta(t, 500, countInt, 1) + }) + }) + + t.Run("with nearObject and very low certainty, no objectLimit", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, ` + { + Aggregate { + RansomNote( + nearText: { + concepts: ["abc"] + certainty: 0.0001 + } + ) { + meta { + count + } + } + } + } + `) + + t.Run("validate nearMedia runs unlimited without objectLimit", func(t *testing.T) { + res := result.Get("Aggregate", "RansomNote").AsSlice() + require.Len(t, res, 1) + meta := res[0].(map[string]interface{})["meta"] + count := meta.(map[string]interface{})["count"] + countInt, err := count.(json.Number).Int64() + require.Nil(t, err) + assert.InDelta(t, 500, countInt, 1) + }) + }) + + t.Run("with nearObject and low distance (few results), high objectLimit", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, ` + { + Aggregate { + RansomNote( + nearText: { + concepts: ["abc"] + distance: 0.6 # should return about 6 elements + } + objectLimit:100, + ) { + meta { + count + } + } + } + } + `) + + t.Run("validate fewer than objectLimit elements are returned", func(t *testing.T) { + res := result.Get("Aggregate", "RansomNote").AsSlice() + require.Len(t, res, 1) + meta := res[0].(map[string]interface{})["meta"] + count := meta.(map[string]interface{})["count"] + countParsed, err := count.(json.Number).Int64() + require.Nil(t, err) + assert.Less(t, countParsed, int64(100)) + }) + }) + + t.Run("with nearObject and high certainty (few results), high objectLimit", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, ` + { + Aggregate { + RansomNote( + nearText: { + concepts: ["abc"] + certainty: 0.7 # should return about 6 elements + } + objectLimit:100, + ) { + meta { + count + } + } + } + } + `) + + t.Run("validate fewer than objectLimit elements are returned", func(t *testing.T) { + res := result.Get("Aggregate", "RansomNote").AsSlice() + require.Len(t, res, 1) + meta := res[0].(map[string]interface{})["meta"] + count := meta.(map[string]interface{})["count"] + countParsed, err := count.(json.Number).Int64() + require.Nil(t, err) + assert.Less(t, countParsed, int64(100)) + }) + }) + + t.Run("with nearText and no distance/certainty, where filter and groupBy", func(t *testing.T) { + objectLimit := 4 + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, fmt.Sprintf(` + { + Aggregate { + Company ( + groupBy: ["name"] + where: { + valueText: "Apple*", + operator: Like, + path: ["name"] + } + objectLimit: %d + nearText: { + concepts: ["Apple"] + certainty: 0.5 + } + ){ + meta { + count + } + groupedBy { + value + } + } + } + } + `, objectLimit)) + + expected := []interface{}{ + map[string]interface{}{ + "groupedBy": map[string]interface{}{ + "value": "Apple Incorporated", + }, + "meta": map[string]interface{}{ + "count": json.Number("1"), + }, + }, + map[string]interface{}{ + "groupedBy": map[string]interface{}{ + "value": "Apple Inc.", + }, + "meta": map[string]interface{}{ + "count": json.Number("1"), + }, + }, + map[string]interface{}{ + "groupedBy": map[string]interface{}{ + "value": "Apple", + }, + "meta": map[string]interface{}{ + "count": json.Number("1"), + }, + }, + } + + companies := result.Get("Aggregate", "Company").Result.([]interface{}) + for _, company := range companies { + assert.Contains(t, expected, company) + } + }) + + t.Run("with nearObject and certainty, where filter", func(t *testing.T) { + objectLimit := 1 + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, fmt.Sprintf(` + { + Aggregate{ + City ( + where: { + valueBoolean: true, + operator: Equal, + path: ["isCapital"] + } + objectLimit: %d + nearObject: { + id: "9b9cbea5-e87e-4cd0-89af-e2f424fd52d6" + } + ){ + meta { + count + } + } + } + } + `, objectLimit)) + + t.Run("validate objectLimit functions as expected", func(t *testing.T) { + res := result.Get("Aggregate", "City").AsSlice() + require.Len(t, res, 1) + meta := res[0].(map[string]interface{})["meta"] + count := meta.(map[string]interface{})["count"] + assert.Equal(t, json.Number(fmt.Sprint(objectLimit)), count) + }) + }) +} + +func aggregatesOnDateFields(t *testing.T) { + t.Run("without grouping", func(t *testing.T) { + query := ` + { + Aggregate { + HasDateField { + timestamp { + count + minimum + maximum + median + } + } + } + }` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query).Get("Aggregate", "HasDateField").AsSlice() + assert.Len(t, result, 1) + + expected := []interface{}{ + map[string]interface{}{ + "timestamp": map[string]interface{}{ + "count": json.Number("10"), + "maximum": "2022-06-16T22:19:11.837473Z", + "median": "2022-06-16T22:19:06.1449075Z", + "minimum": "2022-06-16T22:18:59.640162Z", + }, + }, + } + assert.Equal(t, expected, result) + }) + + t.Run("with grouping on a unique field", func(t *testing.T) { + query := ` + { + Aggregate { + HasDateField + ( + groupBy: "unique" + ) + { + timestamp { + count + minimum + maximum + median + mode + } + } + } + }` + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query).Get("Aggregate", "HasDateField").AsSlice() + assert.Len(t, result, 10) + + expected := []interface{}{ + map[string]interface{}{ + "timestamp": map[string]interface{}{ + "count": json.Number("1"), + "maximum": "2022-06-16T22:19:05.894857Z", + "median": "2022-06-16T22:19:05.894857Z", + "minimum": "2022-06-16T22:19:05.894857Z", + "mode": "2022-06-16T22:19:05.894857Z", + }, + }, + map[string]interface{}{ + "timestamp": map[string]interface{}{ + "count": json.Number("1"), + "maximum": "2022-06-16T22:19:08.112395Z", + "median": "2022-06-16T22:19:08.112395Z", + "minimum": "2022-06-16T22:19:08.112395Z", + "mode": "2022-06-16T22:19:08.112395Z", + }, + }, + map[string]interface{}{ + "timestamp": map[string]interface{}{ + "count": json.Number("1"), + "maximum": "2022-06-16T22:19:03.495596Z", + "median": "2022-06-16T22:19:03.495596Z", + "minimum": "2022-06-16T22:19:03.495596Z", + "mode": "2022-06-16T22:19:03.495596Z", + }, + }, + map[string]interface{}{ + "timestamp": map[string]interface{}{ + "count": json.Number("1"), + "maximum": "2022-06-16T22:19:07.589828Z", + "median": "2022-06-16T22:19:07.589828Z", + "minimum": "2022-06-16T22:19:07.589828Z", + "mode": "2022-06-16T22:19:07.589828Z", + }, + }, + map[string]interface{}{ + "timestamp": map[string]interface{}{ + "count": json.Number("1"), + "maximum": "2022-06-16T22:19:06.394958Z", + "median": "2022-06-16T22:19:06.394958Z", + "minimum": "2022-06-16T22:19:06.394958Z", + "mode": "2022-06-16T22:19:06.394958Z", + }, + }, + map[string]interface{}{ + "timestamp": map[string]interface{}{ + "count": json.Number("1"), + "maximum": "2022-06-16T22:19:11.837473Z", + "median": "2022-06-16T22:19:11.837473Z", + "minimum": "2022-06-16T22:19:11.837473Z", + "mode": "2022-06-16T22:19:11.837473Z", + }, + }, + map[string]interface{}{ + "timestamp": map[string]interface{}{ + "count": json.Number("1"), + "maximum": "2022-06-16T22:18:59.640162Z", + "median": "2022-06-16T22:18:59.640162Z", + "minimum": "2022-06-16T22:18:59.640162Z", + "mode": "2022-06-16T22:18:59.640162Z", + }, + }, + map[string]interface{}{ + "timestamp": map[string]interface{}{ + "count": json.Number("1"), + "maximum": "2022-06-16T22:19:01.495967Z", + "median": "2022-06-16T22:19:01.495967Z", + "minimum": "2022-06-16T22:19:01.495967Z", + "mode": "2022-06-16T22:19:01.495967Z", + }, + }, + map[string]interface{}{ + "timestamp": map[string]interface{}{ + "count": json.Number("1"), + "maximum": "2022-06-16T22:19:10.339493Z", + "median": "2022-06-16T22:19:10.339493Z", + "minimum": "2022-06-16T22:19:10.339493Z", + "mode": "2022-06-16T22:19:10.339493Z", + }, + }, + map[string]interface{}{ + "timestamp": map[string]interface{}{ + "count": json.Number("1"), + "maximum": "2022-06-16T22:19:04.3828349Z", + "median": "2022-06-16T22:19:04.3828349Z", + "minimum": "2022-06-16T22:19:04.3828349Z", + "mode": "2022-06-16T22:19:04.3828349Z", + }, + }, + } + + for _, res := range result { + assert.Contains(t, expected, res) + } + }) + + t.Run("group on identical field", func(t *testing.T) { + query := ` + { + Aggregate { + HasDateField + ( + groupBy: "identical" + ) + { + timestamp { + count + minimum + maximum + median + } + } + } + }` + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query).Get("Aggregate", "HasDateField").AsSlice() + + expected := []interface{}{ + map[string]interface{}{ + "timestamp": map[string]interface{}{ + "count": json.Number("10"), + "maximum": "2022-06-16T22:19:11.837473Z", + "median": "2022-06-16T22:19:06.1449075Z", + "minimum": "2022-06-16T22:18:59.640162Z", + }, + }, + } + + assert.Equal(t, expected, result) + }) +} + +func aggregatesUsingAlias(t *testing.T) { + client := helper.Client(t) + params := schemaclient.NewAliasesCreateParams().WithBody(&models.Alias{ + Alias: "CustomVectorClassAlias", + Class: "CustomVectorClass", + }) + _, err := client.Schema.AliasesCreate(params, nil) + defer func(t *testing.T) { + params := schemaclient.NewAliasesDeleteParams().WithAliasName("CustomVectorClassAlias") + _, err := client.Schema.AliasesDelete(params, nil) + if err != nil { + t.Logf("Error deleting aliases: %v", err) + } + }(t) + require.Nil(t, err) + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, ` + { + Aggregate{ + CustomVectorClassAlias( + nearVector: { + vector: [1,0,0] + distance: 0.0002 + } + ){ + meta { + count + } + name { + topOccurrences { + occurs + value + } + type + count + } + } + } + }`) + + t.Run("meta count", func(t *testing.T) { + meta := result.Get("Aggregate", "CustomVectorClassAlias").AsSlice()[0].(map[string]interface{})["meta"] + count := meta.(map[string]interface{})["count"] + expected := json.Number("1") + assert.Equal(t, expected, count) + }) + + t.Run("string prop", func(t *testing.T) { + name := result.Get("Aggregate", "CustomVectorClassAlias"). + AsSlice()[0].(map[string]interface{})["name"].(map[string]interface{}) + typeField := name["type"] + topOccurrences := name["topOccurrences"] + + assert.Equal(t, schema.DataTypeText.String(), typeField) + + expectedTopOccurrences := []interface{}{ + map[string]interface{}{ + "value": "Mercedes", + "occurs": json.Number("1"), + }, + } + assert.ElementsMatch(t, expectedTopOccurrences, topOccurrences) + }) + + t.Run("assert alias no longer in schema after deletion", func(t *testing.T) { + params := schemaclient.NewAliasesDeleteParams().WithAliasName("CustomVectorClassAlias") + _, err := client.Schema.AliasesDelete(params, nil) + require.Nil(t, err) + + _ = graphqlhelper.ErrorGraphQL(t, helper.RootAuth, ` + { + Aggregate{ + CustomVectorClassAlias( + nearVector: { + vector: [1,0,0] + distance: 0.0002 + } + ){ + meta { + count + } + name { + topOccurrences { + occurs + value + } + type + count + } + } + } + } + `) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_aggregate_with_expected_failures.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_aggregate_with_expected_failures.go new file mode 100644 index 0000000000000000000000000000000000000000..541d61b6b0b2c9242d2ddcec30e8275b558cdf4e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_aggregate_with_expected_failures.go @@ -0,0 +1,353 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" +) + +func aggregatesWithExpectedFailures(t *testing.T) { + t.Run("with nearVector, no certainty", func(t *testing.T) { + result := graphqlhelper.ErrorGraphQL(t, helper.RootAuth, ` + { + Aggregate{ + CustomVectorClass( + nearVector: { + vector: [1,0,0] + } + ){ + meta { + count + } + name { + topOccurrences { + occurs + value + } + type + count + } + } + } + } + `) + + require.NotEmpty(t, result) + require.Len(t, result, 1) + assert.True(t, strings.Contains(result[0].Message, + "must provide certainty or objectLimit with vector search"), + "unexpected error message: %s", result[0].Message) + }) + + t.Run("with nearObject, no certainty", func(t *testing.T) { + result := graphqlhelper.ErrorGraphQL(t, helper.RootAuth, ` + { + Aggregate{ + City( + nearObject: { + id: "9b9cbea5-e87e-4cd0-89af-e2f424fd52d6" + } + ){ + meta { + count + } + name { + topOccurrences { + occurs + value + } + type + count + } + } + } + } + `) + + require.NotEmpty(t, result) + require.Len(t, result, 1) + assert.True(t, strings.Contains(result[0].Message, + "must provide certainty or objectLimit with vector search"), + "unexpected error message: %s", result[0].Message) + }) + + t.Run("with nearText, no certainty", func(t *testing.T) { + result := graphqlhelper.ErrorGraphQL(t, helper.RootAuth, ` + { + Aggregate{ + City( + nearText: { + concepts: ["Amsterdam"] + } + ){ + meta { + count + } + name { + topOccurrences { + occurs + value + } + type + count + } + } + } + } + `) + + require.NotEmpty(t, result) + require.Len(t, result, 1) + assert.True(t, strings.Contains(result[0].Message, + "must provide certainty or objectLimit with vector search"), + "unexpected error message: %s", result[0].Message) + }) + + t.Run("with nearVector, where filter, no certainty", func(t *testing.T) { + result := graphqlhelper.ErrorGraphQL(t, helper.RootAuth, ` + { + Aggregate{ + CustomVectorClass( + where: { + valueText: "Mercedes", + operator: Equal, + path: ["name"] + } + nearVector: { + vector: [1,0,0] + } + ){ + meta { + count + } + name { + topOccurrences { + occurs + value + } + type + count + } + } + } + } + `) + + require.NotEmpty(t, result) + require.Len(t, result, 1) + assert.True(t, strings.Contains(result[0].Message, + "must provide certainty or objectLimit with vector search"), + "unexpected error message: %s", result[0].Message) + }) + + t.Run("with nearObject, where filter, no certainty", func(t *testing.T) { + result := graphqlhelper.ErrorGraphQL(t, helper.RootAuth, ` + { + Aggregate{ + City (where: { + valueBoolean: true, + operator: Equal, + path: ["isCapital"] + } + nearObject: { + id: "9b9cbea5-e87e-4cd0-89af-e2f424fd52d6" + } + ){ + meta { + count + } + isCapital { + count + percentageFalse + percentageTrue + totalFalse + totalTrue + type + } + population { + mean + count + maximum + minimum + sum + type + } + inCountry { + pointingTo + type + } + name { + topOccurrences { + occurs + value + } + type + count + } + } + } + } + `) + + require.NotEmpty(t, result) + require.Len(t, result, 1) + assert.True(t, strings.Contains(result[0].Message, + "must provide certainty or objectLimit with vector search"), + "unexpected error message: %s", result[0].Message) + }) + + t.Run("with nearText, where filter, no certainty", func(t *testing.T) { + result := graphqlhelper.ErrorGraphQL(t, helper.RootAuth, ` + { + Aggregate{ + City (where: { + valueBoolean: true, + operator: Equal, + path: ["isCapital"] + } + nearText: { + concepts: ["Amsterdam"] + } + ){ + meta { + count + } + isCapital { + count + percentageFalse + percentageTrue + totalFalse + totalTrue + type + } + population { + mean + count + maximum + minimum + sum + type + } + inCountry { + pointingTo + type + } + name { + topOccurrences { + occurs + value + } + type + count + } + } + } + } + `) + + require.NotEmpty(t, result) + require.Len(t, result, 1) + assert.True(t, strings.Contains(result[0].Message, + "must provide certainty or objectLimit with vector search"), + "unexpected error message: %s", result[0].Message) + }) + + t.Run("objectLimit passed with no nearMedia", func(t *testing.T) { + result := graphqlhelper.ErrorGraphQL(t, helper.RootAuth, ` + { + Aggregate{ + CustomVectorClass(objectLimit: 1){ + meta { + count + } + name { + topOccurrences { + occurs + value + } + type + count + } + } + } + } + `) + + require.NotEmpty(t, result) + require.Len(t, result, 1) + assert.True(t, strings.Contains(result[0].Message, "objectLimit can only be used with a near or hybrid filter")) + }) +} + +func exploreWithExpectedFailures(t *testing.T) { + t.Run("Explore called when classes have different distance configs", func(t *testing.T) { + className := "L2DistanceClass" + defer deleteObjectClass(t, className) + + t.Run("create class configured with non-default distance type", func(t *testing.T) { + createObjectClass(t, &models.Class{ + Class: className, + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + VectorIndexConfig: map[string]interface{}{ + "distance": "l2-squared", + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + }, + }) + }) + + t.Run("assert failure to Explore with mismatched distance types", func(t *testing.T) { + query := ` + { + Explore(nearVector: {vector:[1,1,1]}) { + distance + } + }` + + result := graphqlhelper.ErrorGraphQL(t, helper.RootAuth, query) + assert.Len(t, result, 1) + + errMsg := result[0].Message + assert.Contains(t, errMsg, "vector search across classes not possible") + assert.Contains(t, errMsg, "found different distance metrics") + assert.Contains(t, errMsg, "class 'L2DistanceClass' uses distance metric 'l2-squared'") + assert.Contains(t, errMsg, "class 'Airport' uses distance metric 'cosine'") + assert.Contains(t, errMsg, "class 'Person' uses distance metric 'cosine'") + assert.Contains(t, errMsg, "class 'ArrayClass' uses distance metric 'cosine'") + assert.Contains(t, errMsg, "class 'HasDateField' uses distance metric 'cosine'") + assert.Contains(t, errMsg, "class 'CustomVectorClass' uses distance metric 'cosine'") + assert.Contains(t, errMsg, "class 'RansomNote' uses distance metric 'cosine'") + assert.Contains(t, errMsg, "class 'MultiShard' uses distance metric 'cosine'") + assert.Contains(t, errMsg, "class 'Country' uses distance metric 'cosine'") + assert.Contains(t, errMsg, "class 'City' uses distance metric 'cosine'") + assert.Contains(t, errMsg, "class 'Company' uses distance metric 'cosine'") + assert.Contains(t, errMsg, "class 'Pizza' uses distance metric 'cosine'") + }) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_cursor_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_cursor_test.go new file mode 100644 index 0000000000000000000000000000000000000000..9c7257d56a920ce280936d106048898c66233dee --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_cursor_test.go @@ -0,0 +1,182 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "fmt" + "testing" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/test/helper" + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" + "github.com/weaviate/weaviate/test/helper/sample-schema/multishard" +) + +func getWithCursorSearch(t *testing.T) { + t.Run("listing objects using cursor api", func(t *testing.T) { + tests := []struct { + name string + className string + after string + limit int + filter string + expectedIDs []strfmt.UUID + expectedErrorMsg string + }{ + { + name: `cursor with after: "" limit: 2`, + className: "CursorClass", + after: "", + limit: 2, + expectedIDs: []strfmt.UUID{ + cursorClassID1, + cursorClassID2, + cursorClassID3, + cursorClassID4, + cursorClassID5, + cursorClassID6, + cursorClassID7, + }, + }, + { + name: fmt.Sprintf("cursor with after: \"%s\" limit: 1", cursorClassID4), + className: "CursorClass", + after: cursorClassID4.String(), + limit: 1, + expectedIDs: []strfmt.UUID{ + cursorClassID5, + cursorClassID6, + cursorClassID7, + }, + }, + { + name: "error with offset", + className: "CursorClass", + filter: `limit: 1 after: "" offset: 1`, + expectedErrorMsg: "cursor api: invalid 'after' parameter: offset cannot be set with after and limit parameters", + }, + { + name: "error with nearObject", + className: "CursorClass", + filter: fmt.Sprintf("limit: 1 after: \"\" nearObject:{id:\"%s\"}", cursorClassID1), + expectedErrorMsg: "cursor api: invalid 'after' parameter: other params cannot be set with after and limit parameters", + }, + { + name: "error with nearVector", + className: "CursorClass", + filter: `limit: 1 after: "" nearVector:{vector:[0.1, 0.2]}`, + expectedErrorMsg: "cursor api: invalid 'after' parameter: other params cannot be set with after and limit parameters", + }, + { + name: "error with hybrid", + className: "CursorClass", + filter: `limit: 1 after: "" hybrid:{query:"cursor api"}`, + expectedErrorMsg: "cursor api: invalid 'after' parameter: other params cannot be set with after and limit parameters", + }, + { + name: "error with bm25", + className: "CursorClass", + filter: `limit: 1 after: "" bm25:{query:"cursor api"}`, + expectedErrorMsg: "cursor api: invalid 'after' parameter: other params cannot be set with after and limit parameters", + }, + { + name: "error with sort", + className: "CursorClass", + filter: `limit: 1 after: "" sort:{path:"name"}`, + expectedErrorMsg: "cursor api: invalid 'after' parameter: sort cannot be set with after and limit parameters", + }, + { + name: "error with where", + className: "CursorClass", + filter: `limit: 1 after: "" where:{path:"id" operator:Like valueText:"*"}`, + expectedErrorMsg: "cursor api: invalid 'after' parameter: where cannot be set with after and limit parameters", + }, + { + name: "error with bm25, hybrid and offset", + className: "CursorClass", + filter: `limit: 1 after: "" bm25:{query:"cursor api"} hybrid:{query:"cursor api"} offset:1`, + expectedErrorMsg: "cursor api: invalid 'after' parameter: other params cannot be set with after and limit parameters", + }, + { + name: "error with no limit set", + className: "CursorClass", + filter: `after:"00000000-0000-0000-0000-000000000000"`, + expectedErrorMsg: "cursor api: invalid 'after' parameter: limit parameter must be set", + }, + // multi shard + { + name: `multi shard cursor with after: "" limit: 1`, + className: "MultiShard", + after: "", + limit: 1, + expectedIDs: []strfmt.UUID{ + multishard.MultiShardID1, + multishard.MultiShardID2, + multishard.MultiShardID3, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + query := "{ Get { " + tt.className + " %s { _additional { id } } } }" + if len(tt.expectedErrorMsg) > 0 { + errQuery := fmt.Sprintf(query, fmt.Sprintf("(%s)", tt.filter)) + result := graphqlhelper.ErrorGraphQL(t, helper.RootAuth, errQuery) + assert.Len(t, result, 1) + + errMsg := result[0].Message + assert.Equal(t, tt.expectedErrorMsg, errMsg) + } else { + parseResults := func(t *testing.T, cities []interface{}) []strfmt.UUID { + var ids []strfmt.UUID + for _, city := range cities { + id, ok := city.(map[string]interface{})["_additional"].(map[string]interface{})["id"] + require.True(t, ok) + + idString, ok := id.(string) + require.True(t, ok) + + ids = append(ids, strfmt.UUID(idString)) + } + return ids + } + // use cursor api + cursorSearch := func(t *testing.T, className, after string, limit int) []strfmt.UUID { + cursor := fmt.Sprintf(`(limit: %v after: "%s")`, limit, after) + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, fmt.Sprintf(query, cursor)) + cities := result.Get("Get", className).AsSlice() + return parseResults(t, cities) + } + + var cursorIDs []strfmt.UUID + after, limit := tt.after, tt.limit + for { + result := cursorSearch(t, tt.className, after, limit) + cursorIDs = append(cursorIDs, result...) + if len(result) == 0 { + break + } + after = result[len(result)-1].String() + } + + assert.ElementsMatch(t, tt.expectedIDs, cursorIDs) + require.Equal(t, len(tt.expectedIDs), len(cursorIDs)) + for i := range tt.expectedIDs { + assert.Equal(t, tt.expectedIDs[i], cursorIDs[i]) + } + } + }) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_hybrid_search_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_hybrid_search_test.go new file mode 100644 index 0000000000000000000000000000000000000000..7fd256d0961825e78b5fe9bfa8675a32b96b848a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_hybrid_search_test.go @@ -0,0 +1,257 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/test/helper" + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" +) + +// appleVec is the t2v-contextionary representation of "Apple Inc." +var appleVec = []float32{ + 0.1156649, -0.3561866, 0.4718789, 0.37318036, 0.39549947, 0.019409189, -0.5052104, -0.49448758, + 0.34452468, 0.46354344, 0.1932035, 0.51334095, 0.06032639, 0.022086846, 0.20391269, 0.3013975, + 0.18838425, -0.2362212, -0.25797912, -0.11189923, -0.14507815, 0.3113891, -0.90078014, 0.027230136, + -0.5541761, -0.33453932, 0.9467, 0.39270592, 0.0775289, -0.14601035, -0.5497628, 0.34385568, + 0.5363504, 0.03164669, 0.03510879, -0.37564012, 0.22805381, -0.66345274, -0.92397606, 0.85855925, + -0.5637805, 0.035184387, 0.23299722, -0.042199645, -0.52195567, -0.17418303, -0.029039165, 0.4399605, + 0.36524323, 0.21769615, -0.1977588, -0.17114285, 0.30731055, -0.6743735, 0.25451374, 0.41582933, + 0.61602086, 0.3382223, 0.39701316, -0.54065305, -0.16107371, -0.80420196, -0.42476287, 0.40522298, + -0.24763498, -0.7224363, -0.5512907, -0.0400732, -0.09994836, -0.2354202, 0.2904534, -0.12089672, + -0.07095274, -0.8213324, 0.3695029, 0.27129403, 0.28678897, -0.108535565, -0.30699188, 0.10705576, + 0.08372605, -0.64183795, 0.34861454, -0.30277634, 0.21602349, -0.23038381, -0.10144254, -0.47548878, + 0.3525676, 0.3357812, 0.031383604, -0.32346088, -0.7515443, -0.14595662, -0.1425658, 0.54312915, + -0.60661954, 0.10959545, -0.17200017, 0.60667217, -0.22193804, 0.5861486, 0.4714104, -0.4168524, + -0.23929326, 0.47505698, -0.5256647, 0.23308091, 0.16735256, -0.021147087, -0.6238067, -0.065388694, + 0.38134024, 0.17625189, -0.048189547, -0.40676376, -0.20627557, -0.6200684, 0.24607961, -0.7479579, + 0.36243674, -0.41451588, -0.3258561, 0.07216902, 0.15214325, 0.2363326, 1.7854439, 0.2354896, + -0.80430084, 0.39550564, 0.06727363, 0.45679152, 0.09223966, -0.17635022, 0.065364204, -0.6799169, + 0.46794528, -0.6863512, -0.007789179, 0.0216118, 0.3218315, -0.329095, -0.15101263, 0.054294955, + 0.35598493, 0.8095643, 0.4240984, 0.107904576, -0.65505075, -0.25601476, -0.040415946, 0.57646215, + -0.14216466, -0.5626221, 0.21731018, 0.25857863, 0.029463748, -0.043640777, -0.86262965, 0.0075217593, + -0.65511745, 0.30682194, 0.36109644, -0.34552526, -0.57620883, -0.111058705, 0.42360848, 0.22977945, + 0.058191486, -0.6967789, -0.083894424, 0.21894856, -0.15210733, 0.2840013, -0.66721946, -0.12251554, + -0.55239767, -0.06489324, -0.17015795, -0.15400846, 0.14791602, -0.76380575, 0.27046034, -0.47688308, + 0.25788718, -0.074898824, 0.181136, 0.6860475, -0.14676934, 0.13610536, 0.74407804, -0.26433572, + -0.09919782, -0.26012585, -0.18844572, 0.8116442, 0.24614683, 0.076953486, 0.41485175, -0.64702696, + -0.5514351, -0.44831908, 0.7871427, 0.1256176, -0.37650946, 0.26002303, 0.55952126, -0.5275842, + 0.7185946, 0.09147637, -0.3937243, 0.10171145, -0.6451931, 0.8872601, 0.011252741, 1.1493335, + 0.7991122, -0.16108659, -0.7322848, 0.5237607, -0.50677204, 0.12007416, -0.6966177, -0.5039344, + 0.020131318, 0.15328859, -1.0066653, 0.32302102, -0.36504102, 0.37823763, -0.19183074, -0.4154492, + 0.14257756, 0.6225165, -0.24297066, 0.014472419, 0.8159169, 1.2461865, 0.07883369, -0.35416773, + -0.06593153, -0.81301326, 0.17566697, -0.04062626, -0.112336636, -0.22738501, -0.42422646, 0.458409, + 0.79599, 0.33880755, 0.39182758, 0.054381482, 0.5805471, 0.25382927, -0.16633242, 0.08435115, + 0.53753984, -0.16825016, -0.69669664, 0.21506411, -0.35470957, 0.25212923, 0.20211501, 0.6161077, + -0.077442676, -0.024064686, -0.18163882, 0.6834761, -1.0793741, 0.25927436, -0.69374615, -0.025031673, + -0.1307808, -0.5026866, -0.14586367, -0.41198593, -0.4018977, 0.10252101, -0.22274522, 0.9635526, + -0.17163973, 0.1639396, 0.66181034, -0.42865846, -0.18711954, -0.23968346, -0.09696686, 0.38911402, + 0.0962325, 0.46173036, 0.10814153, 1.0249863, -0.2061986, 0.6657442, -0.3277397, 0.26586995, + -0.12981872, 0.40097368, -0.49962977, -0.61136127, +} + +func getWithHybridSearch(t *testing.T) { + t.Run("without references", func(t *testing.T) { + query := ` + { + Get { + Airport + ( + hybrid: { + alpha: 0 + query: "10000" + } + ) + { + code + } + } + }` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query).Get("Get", "Airport").AsSlice() + require.Len(t, result, 1) + assert.EqualValues(t, map[string]interface{}{"code": "10000"}, result[0]) + }) + + t.Run("with limit and vector", func(t *testing.T) { + limit := 2 + query := fmt.Sprintf(` + { + Get { + Company( + limit: %d + hybrid: { + query: "Apple", + alpha: 0.5, + vector: %s + } + ) { + name + } + } + }`, limit, graphqlhelper.Vec2String(appleVec)) + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query).Get("Get", "Company").AsSlice() + require.Len(t, result, limit) + assert.Contains(t, result, map[string]interface{}{ + "name": "Apple", + }) + assert.Contains(t, result, map[string]interface{}{ + "name": "Apple Inc.", + }) + }) + + t.Run("with limit and no vector", func(t *testing.T) { + limit := 2 + query := fmt.Sprintf(` + { + Get { + Company( + limit: %d + hybrid: { + query: "Apple", + alpha: 0.5, + } + ) { + name + } + } + }`, limit) + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query).Get("Get", "Company").AsSlice() + require.Len(t, result, limit) + assert.Contains(t, result, map[string]interface{}{ + "name": "Apple", + }) + assert.Contains(t, result, map[string]interface{}{ + "name": "Apple Inc.", + }) + }) + + t.Run("with no limit and vector", func(t *testing.T) { + query := fmt.Sprintf(` + { + Get { + Company( + hybrid: { + query: "Apple", + alpha: 0.5, + vector: %s + } + ) { + name + } + } + }`, graphqlhelper.Vec2String(appleVec)) + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query).Get("Get", "Company").AsSlice() + require.Len(t, result, 9) + }) + + t.Run("with no limit and no vector", func(t *testing.T) { + query := ` + { + Get { + Company( + hybrid: { + query: "Apple", + alpha: 0.5, + } + ) { + name + } + } + }` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query).Get("Get", "Company").AsSlice() + require.Len(t, result, 9) + }) + + t.Run("with _additional{vector}", func(t *testing.T) { + query := ` + { + Get { + Company( + hybrid: { + query: "Apple", + alpha: 0.5, + } + ) { + _additional { + vector + } + } + } + }` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query).Get("Get", "Company").AsSlice() + require.Len(t, result, 9) + for _, res := range result { + company := res.(map[string]interface{}) + addl := company["_additional"].(map[string]interface{}) + vec, found := addl["vector"] + assert.True(t, found) + assert.Len(t, vec, 300) + } + }) + + t.Run("with references", func(t *testing.T) { + query := ` + { + Get { + Airport + ( + hybrid: { + alpha: 0.5 + query: "1000" + } + ) + { + code + inCity { + ... on City { + name + } + } + } + } + }` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query).Get("Get", "Airport").AsSlice() + require.Len(t, result, 4) + assert.Contains(t, result, + map[string]interface{}{ + "code": "10000", + "inCity": []interface{}{ + map[string]interface{}{"name": "Amsterdam"}, + }, + }) + assert.Contains(t, result, + map[string]interface{}{ + "code": "20000", + "inCity": []interface{}{ + map[string]interface{}{"name": "Rotterdam"}, + }, + }) + assert.Contains(t, result, + map[string]interface{}{ + "code": "30000", + "inCity": []interface{}{ + map[string]interface{}{"name": "Dusseldorf"}, + }, + }) + assert.Contains(t, result, + map[string]interface{}{ + "code": "40000", + "inCity": []interface{}{ + map[string]interface{}{"name": "Berlin"}, + }, + }) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_shadow_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_shadow_test.go new file mode 100644 index 0000000000000000000000000000000000000000..59a459d270e860b9256601d95defbe7d55255b68 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_shadow_test.go @@ -0,0 +1,140 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/test/helper" + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" +) + +// run by setup_test.go +func runningGetNearObjectWithShadowedObjects(t *testing.T) { + t.Run("running Get nearObject against shadow class", func(t *testing.T) { + query := ` + { + Get { + NearObjectSearch ( + nearObject: { + id : "aa44bbee-ca5f-4db7-a412-5fc6a2300001" + certainty: 0.98 + } + ) { + name + } + } + } + ` + + for i := 0; i < 50; i++ { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + objs := result.Get("Get", "NearObjectSearch").AsSlice() + + expected := []interface{}{ + map[string]interface{}{"name": "Mount Everest"}, + } + + assert.Len(t, objs, 1) + assert.ElementsMatch(t, expected, objs) + } + }) +} + +func runningAggregateNearObjectWithShadowedObjects(t *testing.T) { + t.Run("running Aggregate nearObject against shadow class", func(t *testing.T) { + query := ` + { + Aggregate { + NearObjectSearch ( + nearObject: { + id : "aa44bbee-ca5f-4db7-a412-5fc6a2300001" + certainty: 0.98 + } + ) { + meta { + count + } + } + } + } + ` + + for i := 0; i < 50; i++ { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + meta := result.Get("Aggregate", "NearObjectSearch").AsSlice()[0].(map[string]interface{})["meta"] + count := meta.(map[string]interface{})["count"] + expected := json.Number("1") + assert.Equal(t, expected, count) + } + }) +} + +func runningExploreNearObjectWithShadowedObjects(t *testing.T) { + t.Run("running Explore nearObject against shadow class with same contents", func(t *testing.T) { + query := ` + { + Explore ( + nearObject: { + id : "aa44bbee-ca5f-4db7-a412-5fc6a2300011" + certainty: 0.98 + } + ) { + beacon + } + } + ` + + for i := 0; i < 50; i++ { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + objs := result.Get("Explore").AsSlice() + + expected := []interface{}{ + map[string]interface{}{"beacon": "weaviate://localhost/NearObjectSearch/aa44bbee-ca5f-4db7-a412-5fc6a2300011"}, + map[string]interface{}{"beacon": "weaviate://localhost/NearObjectSearchShadow/aa44bbee-ca5f-4db7-a412-5fc6a2300011"}, + } + + assert.Len(t, objs, 2) + assert.ElementsMatch(t, expected, objs) + } + }) + + t.Run("running Explore nearObject against shadow class with different contents", func(t *testing.T) { + query := ` + { + Explore ( + nearObject: { + id : "aa44bbee-ca5f-4db7-a412-5fc6a2300001" + certainty: 0.98 + } + ) { + beacon + } + } + ` + + for i := 0; i < 50; i++ { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + objs := result.Get("Explore").AsSlice() + + expected := []interface{}{ + map[string]interface{}{"beacon": "weaviate://localhost/NearObjectSearch/aa44bbee-ca5f-4db7-a412-5fc6a2300001"}, + map[string]interface{}{"beacon": "weaviate://localhost/NearObjectSearchShadow/aa44bbee-ca5f-4db7-a412-5fc6a2300001"}, + } + + assert.Len(t, objs, 2) + assert.ElementsMatch(t, expected, objs) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_test.go new file mode 100644 index 0000000000000000000000000000000000000000..744bd9676cf114ec0862476f57d4d8118e8d1d46 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_test.go @@ -0,0 +1,179 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "bytes" + "encoding/json" + "fmt" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" +) + +// run by setup_test.go +func gettingObjects(t *testing.T) { + t.Run("listing cities without references", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, "{ Get { City { name } } }") + cities := result.Get("Get", "City").AsSlice() + + expected := []interface{}{ + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": nil}, + } + + assert.ElementsMatch(t, expected, cities) + }) + + t.Run("listing with top-level aliases", func(t *testing.T) { + client := helper.Client(t) + params := schema.NewAliasesCreateParams().WithBody(&models.Alias{ + Alias: "CityAlias", + Class: "City", + }) + _, err := client.Schema.AliasesCreate(params, nil) + defer func(t *testing.T) { + params := schema.NewAliasesDeleteParams().WithAliasName("CityAlias") + _, err := client.Schema.AliasesDelete(params, nil) + if err != nil { + t.Logf("Error deleting aliases: %v", err) + } + }(t) + require.Nil(t, err) + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, "{ Get { CityAlias { name } } }") + cities := result.Get("Get", "CityAlias").AsSlice() + + expected := []interface{}{ + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": nil}, + } + + assert.ElementsMatch(t, expected, cities) + + t.Run("assert alias no longer in schema after deletion", func(t *testing.T) { + params := schema.NewAliasesDeleteParams().WithAliasName("CityAlias") + _, err := client.Schema.AliasesDelete(params, nil) + require.Nil(t, err) + + _ = graphqlhelper.ErrorGraphQL(t, helper.RootAuth, "{ Get { CityAlias { name } } }") + }) + }) + + t.Run("listing cities with relations", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, "{ Get { City { name, inCountry { ... on Country { name } } } } }") + cities := result.Get("Get", "City").AsSlice() + + expected := parseJSONSlice(`[ + { "name": "Amsterdam", "inCountry": [{ "name": "Netherlands" }] }, + { "name": "Rotterdam", "inCountry": [{ "name": "Netherlands" }] }, + { "name": "Berlin", "inCountry": [{ "name": "Germany" }] }, + { "name": "Dusseldorf", "inCountry": [{ "name": "Germany" }] }, + { "name": "Missing Island", "inCountry": null }, + { "name": null, "inCountry": null } + ]`) + + assert.ElementsMatch(t, expected, cities) + }) + + t.Run("make sure raw response contains no error key", func(t *testing.T) { + // This test prevents a regression on gh-1535 + + query := []byte(`{"query":"{ Get { City { name } } }"}`) + res, err := http.Post(fmt.Sprintf("%s%s", helper.GetWeaviateURL(), "/v1/graphql"), + "application/json", bytes.NewReader(query)) + require.Nil(t, err) + + defer res.Body.Close() + var body map[string]interface{} + err = json.NewDecoder(res.Body).Decode(&body) + require.Nil(t, err) + + _, ok := body["errors"] + assert.False(t, ok) + + cities := body["data"].(map[string]interface{})["Get"].(map[string]interface{})["City"].([]interface{}) + assert.Greater(t, len(cities), 0) + }) + + t.Run("listing cities with limit", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, "{ Get { City(limit: 2) { name } } }") + cities := result.Get("Get", "City").AsSlice() + + expected := []interface{}{ + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Dusseldorf"}, + } + + assert.ElementsMatch(t, expected, cities) + }) + + t.Run("listing cities with offset and limit", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, "{ Get { City(offset: 2 limit: 2) { name } } }") + cities := result.Get("Get", "City").AsSlice() + + expected := []interface{}{ + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": nil}, + } + + assert.ElementsMatch(t, expected, cities) + }) + + t.Run("listing cities with offset", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, "{ Get { City(offset: 2) { name } } }") + cities := result.Get("Get", "City").AsSlice() + + expected := []interface{}{ + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + } + + assert.ElementsMatch(t, expected, cities) + }) + + t.Run("listing cities with offset and limit beyond results size", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, "{ Get { City(offset: 5 limit: 10) { name } } }") + cities := result.Get("Get", "City").AsSlice() + + expected := []interface{}{ + map[string]interface{}{"name": "Berlin"}, + } + + assert.ElementsMatch(t, expected, cities) + }) + + t.Run("listing cities with offset beyond results size", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, "{ Get { City(offset: 6) { name } } }") + cities := result.Get("Get", "City").AsSlice() + + expected := []interface{}{} + + assert.ElementsMatch(t, expected, cities) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_additional_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_additional_test.go new file mode 100644 index 0000000000000000000000000000000000000000..8b04061424f7950cfcf238230781249f71165a3f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_additional_test.go @@ -0,0 +1,347 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "encoding/json" + "testing" + + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/test/helper" +) + +func gettingObjectsWithAdditionalProps(t *testing.T) { + t.Run("with vector set", func(t *testing.T) { + query := ` + { + Get { + Company { + _additional { + vector + } + name + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + companies := result.Get("Get", "Company").AsSlice() + + require.Greater(t, len(companies), 0) + for _, comp := range companies { + vec, ok := comp.(map[string]interface{})["_additional"].(map[string]interface{})["vector"] + require.True(t, ok) + + vecSlice, ok := vec.([]interface{}) + require.True(t, ok) + require.Greater(t, len(vecSlice), 0) + + asFloat, err := vecSlice[0].(json.Number).Float64() + require.Nil(t, err) + assert.True(t, asFloat >= -1) + assert.True(t, asFloat <= 1) + } + }) + + t.Run("with interpretation set", func(t *testing.T) { + query := ` + { + Get { + Company { + _additional { + interpretation{ + source { + concept + } + } + } + name + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + companies := result.Get("Get", "Company").AsSlice() + + expected := []interface{}{ + map[string]interface{}{ + "name": "Microsoft Inc.", + "_additional": map[string]interface{}{ + "interpretation": map[string]interface{}{ + "source": []interface{}{ + map[string]interface{}{ + "concept": "microsoft", + }, + map[string]interface{}{ + "concept": "inc", + }, + }, + }, + }, + }, + map[string]interface{}{ + "name": "Microsoft Incorporated", + "_additional": map[string]interface{}{ + "interpretation": map[string]interface{}{ + "source": []interface{}{ + map[string]interface{}{ + "concept": "microsoft", + }, + map[string]interface{}{ + "concept": "incorporated", + }, + }, + }, + }, + }, + map[string]interface{}{ + "name": "Microsoft", + "_additional": map[string]interface{}{ + "interpretation": map[string]interface{}{ + "source": []interface{}{ + map[string]interface{}{ + "concept": "microsoft", + }, + }, + }, + }, + }, + map[string]interface{}{ + "name": "Apple Inc.", + "_additional": map[string]interface{}{ + "interpretation": map[string]interface{}{ + "source": []interface{}{ + map[string]interface{}{ + "concept": "apple", + }, + map[string]interface{}{ + "concept": "inc", + }, + }, + }, + }, + }, + map[string]interface{}{ + "name": "Apple Incorporated", + "_additional": map[string]interface{}{ + "interpretation": map[string]interface{}{ + "source": []interface{}{ + map[string]interface{}{ + "concept": "apple", + }, + map[string]interface{}{ + "concept": "incorporated", + }, + }, + }, + }, + }, + map[string]interface{}{ + "name": "Apple", + "_additional": map[string]interface{}{ + "interpretation": map[string]interface{}{ + "source": []interface{}{ + map[string]interface{}{ + "concept": "apple", + }, + }, + }, + }, + }, + map[string]interface{}{ + "name": "Google Inc.", + "_additional": map[string]interface{}{ + "interpretation": map[string]interface{}{ + "source": []interface{}{ + map[string]interface{}{ + "concept": "google", + }, + map[string]interface{}{ + "concept": "inc", + }, + }, + }, + }, + }, + map[string]interface{}{ + "name": "Google Incorporated", + "_additional": map[string]interface{}{ + "interpretation": map[string]interface{}{ + "source": []interface{}{ + map[string]interface{}{ + "concept": "google", + }, + map[string]interface{}{ + "concept": "incorporated", + }, + }, + }, + }, + }, + map[string]interface{}{ + "name": "Google", + "_additional": map[string]interface{}{ + "interpretation": map[string]interface{}{ + "source": []interface{}{ + map[string]interface{}{ + "concept": "google", + }, + }, + }, + }, + }, + } + + assert.ElementsMatch(t, expected, companies) + }) + + t.Run("with _additional nearestNeighbors set", func(t *testing.T) { + query := ` + { + Get { + Company { + _additional { + nearestNeighbors{ + neighbors { + concept + distance + } + } + } + name + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + companies := result.Get("Get", "Company").AsSlice() + + extractNeighbors := func(in interface{}) []interface{} { + return in.(map[string]interface{})["_additional"].(map[string]interface{})["nearestNeighbors"].(map[string]interface{})["neighbors"].([]interface{}) + } + + neighbors0 := extractNeighbors(companies[0]) + neighbors1 := extractNeighbors(companies[1]) + neighbors2 := extractNeighbors(companies[2]) + + validateNeighbors(t, neighbors0, neighbors1, neighbors2) + }) + + t.Run("with _additional featureProjection set", func(t *testing.T) { + query := ` + { + Get { + Company { + _additional { + featureProjection(dimensions:3){ + vector + } + } + name + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + companies := result.Get("Get", "Company").AsSlice() + + extractProjections := func(in interface{}) []interface{} { + return in.(map[string]interface{})["_additional"].(map[string]interface{})["featureProjection"].(map[string]interface{})["vector"].([]interface{}) + } + + projections0 := extractProjections(companies[0]) + projections1 := extractProjections(companies[1]) + projections2 := extractProjections(companies[2]) + + validateProjections(t, 3, projections0, projections1, projections2) + }) + + t.Run("with _additional vector set in reference", func(t *testing.T) { + query := ` + { + Get { + City { + _additional { + vector + } + inCountry { + ... on Country { + _additional { + vector + } + } + } + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + cities := result.Get("Get", "City").AsSlice() + + vector := cities[0].(map[string]interface{})["inCountry"].([]interface{})[0].(map[string]interface{})["_additional"].(map[string]interface{})["vector"] + + assert.NotNil(t, vector) + }) + + t.Run("with _additional creationTimeUnix and lastUpdateTimeUnix set in reference", func(t *testing.T) { + query := ` + { + Get { + City { + inCountry { + ... on Country { + _additional { + creationTimeUnix + lastUpdateTimeUnix + } + } + } + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + cities := result.Get("Get", "City").AsSlice() + + created := cities[0].(map[string]interface{})["inCountry"].([]interface{})[0].(map[string]interface{})["_additional"].(map[string]interface{})["creationTimeUnix"] + updated := cities[0].(map[string]interface{})["inCountry"].([]interface{})[0].(map[string]interface{})["_additional"].(map[string]interface{})["lastUpdateTimeUnix"] + + assert.NotNil(t, created) + assert.NotNil(t, updated) + }) +} + +func validateNeighbors(t *testing.T, neighborsGroups ...[]interface{}) { + for i, group := range neighborsGroups { + if len(group) == 0 { + t.Fatalf("group %d: length of neighbors is 0", i) + } + + for j, neighbor := range group { + asMap := neighbor.(map[string]interface{}) + if len(asMap["concept"].(string)) == 0 { + t.Fatalf("group %d: element %d: concept has length 0", i, j) + } + } + } +} + +func validateProjections(t *testing.T, dims int, vectors ...[]interface{}) { + for i := range vectors { + if len(vectors[i]) != dims { + t.Fatalf("expected feature projection vector to have length 3, got: %d", len(vectors[i])) + } + } +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_custom_vectors_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_custom_vectors_test.go new file mode 100644 index 0000000000000000000000000000000000000000..c121ad10b0e1519c65431b57a900d5f3c371f423 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_custom_vectors_test.go @@ -0,0 +1,70 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "fmt" + "testing" + + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/test/helper" +) + +func gettingObjectsWithCustomVectors(t *testing.T) { + t.Run("through Get {}", func(t *testing.T) { + query := ` + { + Get { + CustomVectorClass(nearVector:{vector:[1,1,1]}) { + _additional { + id + } + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + results := result.Get("Get", "CustomVectorClass").AsSlice() + + expected := []interface{}{ + map[string]interface{}{"_additional": map[string]interface{}{"id": string(cvc1)}}, + map[string]interface{}{"_additional": map[string]interface{}{"id": string(cvc2)}}, + map[string]interface{}{"_additional": map[string]interface{}{"id": string(cvc3)}}, + } + + assert.Equal(t, expected, results) + }) +} + +func exploreObjectsWithCustomVectors(t *testing.T) { + t.Run("through Explore {}", func(t *testing.T) { + query := ` + { + Explore(nearVector: {vector:[1,1,1]}) { + beacon + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + results := result.Get("Explore").AsSlice() + + expected := []interface{}{ + map[string]interface{}{"beacon": fmt.Sprintf("weaviate://localhost/CustomVectorClass/%s", cvc1)}, + map[string]interface{}{"beacon": fmt.Sprintf("weaviate://localhost/CustomVectorClass/%s", cvc2)}, + map[string]interface{}{"beacon": fmt.Sprintf("weaviate://localhost/CustomVectorClass/%s", cvc3)}, + } + + assert.Equal(t, expected, results) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_expected_failures.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_expected_failures.go new file mode 100644 index 0000000000000000000000000000000000000000..080c8416a0ccfda4ca0f564b2fa8404291c542c2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_expected_failures.go @@ -0,0 +1,111 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "testing" + + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" +) + +func getsWithExpectedFailures(t *testing.T) { + t.Run("get with certainty on l2-squared distancer", func(t *testing.T) { + className := "L2DistanceClass" + defer deleteObjectClass(t, className) + + t.Run("create class configured with distance type l2-squared", func(t *testing.T) { + createObjectClass(t, &models.Class{ + Class: className, + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + VectorIndexConfig: map[string]interface{}{ + "distance": "l2-squared", + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + }, + }) + }) + + t.Run("assert failure to get", func(t *testing.T) { + query := ` + { + Get { + L2DistanceClass(nearVector: {vector:[1,1,1], certainty: 0.8}) { + name + } + } + }` + + result := graphqlhelper.ErrorGraphQL(t, helper.RootAuth, query) + assert.Len(t, result, 1) + + errMsg := result[0].Message + assert.Equal(t, "can't compute and return certainty when vector index is configured with l2-squared distance", errMsg) + }) + }) + + t.Run("get with certainty on dot distancer", func(t *testing.T) { + className := "DotDistanceClass" + defer deleteObjectClass(t, className) + + t.Run("create class configured with distance type dot", func(t *testing.T) { + createObjectClass(t, &models.Class{ + Class: className, + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + VectorIndexConfig: map[string]interface{}{ + "distance": "dot", + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + }, + }) + }) + + t.Run("assert failure to get", func(t *testing.T) { + query := ` + { + Get { + DotDistanceClass(nearVector: {vector:[1,1,1], certainty: 0.8}) { + name + } + } + }` + + result := graphqlhelper.ErrorGraphQL(t, helper.RootAuth, query) + assert.Len(t, result, 1) + + errMsg := result[0].Message + assert.Equal(t, "can't compute and return certainty when vector index is configured with dot distance", errMsg) + }) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_filter_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_filter_test.go new file mode 100644 index 0000000000000000000000000000000000000000..2fe6e1169f3f11832ca69116e2fe3534336d87d7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_filter_test.go @@ -0,0 +1,729 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "encoding/json" + "fmt" + "strconv" + "testing" + "time" + + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" +) + +func gettingObjectsWithFilters(t *testing.T) { + t.Run("without filters <- this is the control", func(t *testing.T) { + query := ` + { + Get { + Airport { + code + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + airports := result.Get("Get", "Airport").AsSlice() + + expected := []interface{}{ + map[string]interface{}{"code": "10000"}, + map[string]interface{}{"code": "20000"}, + map[string]interface{}{"code": "30000"}, + map[string]interface{}{"code": "40000"}, + } + + assert.ElementsMatch(t, expected, airports) + }) + + t.Run("nearText with prop length", func(t *testing.T) { + query := ` + { + Get { + City ( + nearText: { + concepts: ["hi"], + distance: 0.9 + }, + where: { + path: "len(name)" + operator: GreaterThanEqual + valueInt: 0 + } + ) { + name + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + cities := result.Get("Get", "City").AsSlice() + assert.Len(t, cities, 5) + }) + + t.Run("nearText with null filter", func(t *testing.T) { + query := ` + { + Get { + City ( + nearText: { + concepts: ["hi"], + distance: 0.9 + }, + where: { + path: "name" + operator: IsNull + valueBoolean: true + } + ) { + name + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + cities := result.Get("Get", "City").AsSlice() + assert.Len(t, cities, 1) + }) + + t.Run("with filters applied", func(t *testing.T) { + query := ` + { + Get { + Airport(where:{ + operator:And + operands: [ + { + operator: GreaterThan, + valueInt: 600000, + path:["inCity", "City", "population"] + } + { + operator: Equal, + valueText:"Germany" + path:["inCity", "City", "inCountry", "Country", "name"] + } + ] + }){ + code + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + airports := result.Get("Get", "Airport").AsSlice() + + expected := []interface{}{ + map[string]interface{}{"code": "40000"}, + } + + assert.ElementsMatch(t, expected, airports) + }) + + t.Run("with or filters applied", func(t *testing.T) { + // this test was added to prevent a regression on the bugfix for gh-758 + + query := ` + { + Aggregate { + City(where:{ + operator:Or + operands:[{ + valueText:"Amsterdam", + operator:Equal, + path:["name"] + }, { + valueText:"Berlin", + operator:Equal, + path:["name"] + }] + }) { + __typename + name { + __typename + count + } + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + cityMeta := result.Get("Aggregate", "City").AsSlice()[0] + + expected := map[string]interface{}{ + "__typename": "AggregateCity", + "name": map[string]interface{}{ + "__typename": "AggregateCitynameObj", + "count": json.Number("2"), + }, + } + + assert.Equal(t, expected, cityMeta) + }) + + t.Run("with filters and ref showing a phone number", func(t *testing.T) { + // this is the journey test for gh-1088 + + query := ` + { + Get { + Airport(where:{ + valueText:"Amsterdam", + operator:Equal, + path:["inCity", "City", "name"] + }) { + phone { + internationalFormatted + countryCode + nationalFormatted + } + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + airport := result.Get("Get", "Airport").AsSlice()[0] + + expected := map[string]interface{}{ + "phone": map[string]interface{}{ + "internationalFormatted": "+31 1234567", + "countryCode": json.Number("31"), + "nationalFormatted": "1234567", + }, + } + + assert.Equal(t, expected, airport) + }) + + t.Run("with uuid filters applied", func(t *testing.T) { + query := ` + { + Get { + Airport(where:{ + operator:And + operands: [ + { + operator: GreaterThan, + valueText: "00000000-0000-0000-0000-000000010000", + path:["airportId"] + }, + { + operator: LessThan, + valueText: "00000000-0000-0000-0000-000000030000", + path:["airportId"] + }, + { + operator: NotEqual, + valueText: "00000000-0000-0000-0000-000000040000", + path:["airportId"] + } + ] + }){ + code + airportId + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + airports := result.Get("Get", "Airport").AsSlice() + + expected := []interface{}{ + map[string]interface{}{ + "code": "20000", + "airportId": "00000000-0000-0000-0000-000000020000", + }, + } + + assert.ElementsMatch(t, expected, airports) + }) + + t.Run("filtering for ref counts", func(t *testing.T) { + // this is the journey test for gh-1101 + + query := func(op string, count int) string { + return fmt.Sprintf(` + { + Get { + Person(where:{ + valueInt: %d + operator:%s, + path:["livesIn"] + }) { + name + } + } + } + `, count, op) + } + + t.Run("no refs", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query("Equal", 0)) + // Alice should be the only person that has zero places she lives in + require.Len(t, result.Get("Get", "Person").AsSlice(), 1) + name := result.Get("Get", "Person").AsSlice()[0].(map[string]interface{})["name"] + assert.Equal(t, "Alice", name) + }) + + t.Run("exactly one", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query("Equal", 1)) + // bob should be the only person that has zero places she lives in + require.Len(t, result.Get("Get", "Person").AsSlice(), 1) + name := result.Get("Get", "Person").AsSlice()[0].(map[string]interface{})["name"] + assert.Equal(t, "Bob", name) + }) + + t.Run("2 or more", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query("GreaterThanEqual", 2)) + // both john(2) and petra(3) should match + require.Len(t, result.Get("Get", "Person").AsSlice(), 2) + name1 := result.Get("Get", "Person").AsSlice()[0].(map[string]interface{})["name"] + name2 := result.Get("Get", "Person").AsSlice()[1].(map[string]interface{})["name"] + assert.ElementsMatch(t, []string{"John", "Petra"}, []string{name1.(string), name2.(string)}) + }) + }) + + t.Run("filtering by property len", func(t *testing.T) { + query := `{ + Get { + ArrayClass(where:{ + valueInt: 4, + operator:Equal, + path:["len(texts)"] + }) { + texts + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + require.Len(t, result.Get("Get", "ArrayClass").AsSlice(), 1) + }) + + t.Run("filtering by null property", func(t *testing.T) { + query := `{ + Get { + ArrayClass(where:{ + valueBoolean: true, + operator:IsNull, + path:["texts"] + }) { + texts + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + require.Len(t, result.Get("Get", "ArrayClass").AsSlice(), 3) // empty, nil and len==0 objects + }) + + t.Run("filtering by property with field tokenization", func(t *testing.T) { + // tests gh-1821 feature + + query := func(value string) string { + return fmt.Sprintf(` + { + Get { + Person(where:{ + valueText: "%s" + operator:Equal, + path:["profession"] + }) { + name + } + } + } + `, value) + } + + t.Run("noone", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query("Quality")) + // Quality is not full field for anyone, therefore noone should be returned + require.Len(t, result.Get("Get", "Person").AsSlice(), 0) + }) + + t.Run("just one is Mechanical Engineer", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query("Mechanical Engineer")) + // Bob is Mechanical Engineer, though John is Senior + require.Len(t, result.Get("Get", "Person").AsSlice(), 1) + name := result.Get("Get", "Person").AsSlice()[0].(map[string]interface{})["name"] + assert.Equal(t, "Bob", name) + }) + + t.Run("just one is Senior Mechanical Engineer", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query("Senior Mechanical Engineer")) + // so to get John, his full profession name has to be used + require.Len(t, result.Get("Get", "Person").AsSlice(), 1) + name := result.Get("Get", "Person").AsSlice()[0].(map[string]interface{})["name"] + assert.Equal(t, "John", name) + }) + + t.Run("just one is Quality Assurance Manager", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query("Quality Assurance Manager")) + // petra is Quality Assurance Manager + require.Len(t, result.Get("Get", "Person").AsSlice(), 1) + name := result.Get("Get", "Person").AsSlice()[0].(map[string]interface{})["name"] + assert.Equal(t, "Petra", name) + }) + }) + + t.Run("filtering by array property with field tokenization", func(t *testing.T) { + // tests gh-1821 feature + + query := func(value string) string { + return fmt.Sprintf(` + { + Get { + Person(where:{ + valueText: "%s" + operator:Equal, + path:["about"] + }) { + name + } + } + } + `, value) + } + + t.Run("noone", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query("swimming")) + // swimming is not full field for anyone, therefore noone should be returned + require.Len(t, result.Get("Get", "Person").AsSlice(), 0) + }) + + t.Run("just one hates swimming", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query("hates swimming")) + // but only john hates swimming + require.Len(t, result.Get("Get", "Person").AsSlice(), 1) + name := result.Get("Get", "Person").AsSlice()[0].(map[string]interface{})["name"].(string) + assert.Equal(t, "John", name) + }) + + t.Run("exactly 2 loves travelling", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query("loves travelling")) + // bob and john loves travelling, alice loves traveling very much + require.Len(t, result.Get("Get", "Person").AsSlice(), 2) + name1 := result.Get("Get", "Person").AsSlice()[0].(map[string]interface{})["name"].(string) + name2 := result.Get("Get", "Person").AsSlice()[1].(map[string]interface{})["name"].(string) + assert.ElementsMatch(t, []string{"Bob", "John"}, []string{name1, name2}) + }) + + t.Run("only one likes cooking for family", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query("likes cooking for family")) + // petra likes cooking for family, john simply likes cooking + require.Len(t, result.Get("Get", "Person").AsSlice(), 1) + name := result.Get("Get", "Person").AsSlice()[0].(map[string]interface{})["name"] + assert.Equal(t, "Petra", name) + }) + }) + + t.Run("filtering by stopwords", func(t *testing.T) { + query := func(value string) string { + return fmt.Sprintf(` + { + Get { + Pizza(where:{ + valueText: "%s" + operator:Equal, + path:["description"] + }) { + name + _additional{ + id + } + } + } + } + `, value) + } + + t.Run("2 results by partial description", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query("italian")) + pizzas := result.Get("Get", "Pizza").AsSlice() + require.Len(t, pizzas, 2) + id1 := pizzas[0].(map[string]interface{})["_additional"].(map[string]interface{})["id"] + id2 := pizzas[1].(map[string]interface{})["_additional"].(map[string]interface{})["id"] + assert.Equal(t, quattroFormaggi.String(), id1) + assert.Equal(t, fruttiDiMare.String(), id2) + }) + + t.Run("1 result by full description containing stopwords", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query("Universally accepted to be the best pizza ever created.")) + pizzas := result.Get("Get", "Pizza").AsSlice() + require.Len(t, pizzas, 1) + id1 := pizzas[0].(map[string]interface{})["_additional"].(map[string]interface{})["id"] + assert.Equal(t, hawaii.String(), id1) + }) + + t.Run("error by description containing just stopwords", func(t *testing.T) { + errors := graphqlhelper.ErrorGraphQL(t, helper.RootAuth, query("to be or not to be")) + require.Len(t, errors, 1) + assert.Contains(t, errors[0].Message, "invalid search term, only stopwords provided. Stopwords can be configured in class.invertedIndexConfig.stopwords") + }) + }) + + t.Run("with filtering by id", func(t *testing.T) { + // this is the journey test for gh-1088 + + query := ` + { + Get { + Airport(where:{ + valueText:"4770bb19-20fd-406e-ac64-9dac54c27a0f", + operator:Equal, + path:["id"] + }) { + phone { + internationalFormatted + countryCode + nationalFormatted + } + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + airport := result.Get("Get", "Airport").AsSlice()[0] + + expected := map[string]interface{}{ + "phone": map[string]interface{}{ + "internationalFormatted": "+31 1234567", + "countryCode": json.Number("31"), + "nationalFormatted": "1234567", + }, + } + + assert.Equal(t, expected, airport) + }) + + t.Run("with filtering by timestamps", func(t *testing.T) { + query := ` + { + Get { + Airport { + _additional { + id + creationTimeUnix + lastUpdateTimeUnix + } + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + airport := result.Get("Get", "Airport").AsSlice()[0] + additional := airport.(map[string]interface{})["_additional"] + targetID := additional.(map[string]interface{})["id"].(string) + targetCreationTime := additional.(map[string]interface{})["creationTimeUnix"].(string) + targetUpdateTime := additional.(map[string]interface{})["lastUpdateTimeUnix"].(string) + + creationTimestamp, err := strconv.ParseInt(targetCreationTime, 10, 64) + assert.Nil(t, err) + creationDate := time.UnixMilli(creationTimestamp).Format(time.RFC3339) + updateTimestamp, err := strconv.ParseInt(targetUpdateTime, 10, 64) + assert.Nil(t, err) + updateDate := time.UnixMilli(updateTimestamp).Format(time.RFC3339) + + t.Run("creationTimeUnix as timestamp", func(t *testing.T) { + query := fmt.Sprintf(` + { + Get { + Airport( + where: { + path: ["_creationTimeUnix"] + operator: Equal + valueText: "%s" + } + ) + { + _additional { + id + } + } + } + } + `, targetCreationTime) + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + airport := result.Get("Get", "Airport").AsSlice()[0] + additional := airport.(map[string]interface{})["_additional"] + resultID := additional.(map[string]interface{})["id"].(string) + assert.Equal(t, targetID, resultID) + }) + + t.Run("creationTimeUnix as date", func(t *testing.T) { + query := fmt.Sprintf(` + { + Get { + Airport( + where: { + path: ["_creationTimeUnix"] + operator: GreaterThanEqual + valueDate: "%s" + } + ) + { + _additional { + id + } + } + } + } + `, creationDate) + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + airport := result.Get("Get", "Airport").AsSlice()[0] + additional := airport.(map[string]interface{})["_additional"] + resultID := additional.(map[string]interface{})["id"].(string) + assert.Equal(t, targetID, resultID) + }) + + t.Run("lastUpdateTimeUnix as timestamp", func(t *testing.T) { + query := fmt.Sprintf(` + { + Get { + Airport( + where: { + path: ["_lastUpdateTimeUnix"] + operator: Equal + valueText: "%s" + } + ) + { + _additional { + id + } + } + } + } + `, targetUpdateTime) + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + airport := result.Get("Get", "Airport").AsSlice()[0] + additional := airport.(map[string]interface{})["_additional"] + resultID := additional.(map[string]interface{})["id"].(string) + assert.Equal(t, targetID, resultID) + }) + + t.Run("lastUpdateTimeUnix as date", func(t *testing.T) { + query := fmt.Sprintf(` + { + Get { + Airport( + where: { + path: ["_lastUpdateTimeUnix"] + operator: GreaterThanEqual + valueDate: "%s" + } + ) + { + _additional { + id + } + } + } + } + `, updateDate) + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + airport := result.Get("Get", "Airport").AsSlice()[0] + additional := airport.(map[string]interface{})["_additional"] + resultID := additional.(map[string]interface{})["id"].(string) + assert.Equal(t, targetID, resultID) + }) + }) + + t.Run("with id filter on object with no props", func(t *testing.T) { + id := strfmt.UUID("f0ea8fb8-5a1f-449d-aed5-d68dc65cd644") + defer deleteObjectClass(t, "NoProps") + + t.Run("setup test class and obj", func(t *testing.T) { + createObjectClass(t, &models.Class{ + Class: "NoProps", Properties: []*models.Property{ + {Name: "unused", DataType: schema.DataTypeText.PropString(), Tokenization: models.PropertyTokenizationWhitespace}, + }, + }) + + createObject(t, &models.Object{Class: "NoProps", ID: id}) + }) + + t.Run("do query", func(t *testing.T) { + query := fmt.Sprintf(` + { + Get { + NoProps(where:{operator:Equal path:["_id"] valueText:"%s"}) + { + _additional {id} + } + } + } + `, id) + response := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + result := response.Get("Get", "NoProps").AsSlice() + require.Len(t, result, 1) + additional := result[0].(map[string]interface{})["_additional"] + resultID := additional.(map[string]interface{})["id"].(string) + assert.Equal(t, id.String(), resultID) + }) + }) + + t.Run("with nul filter", func(t *testing.T) { + tests := []struct { + name string + value bool + Results []interface{} + }{ + { + name: "Null values", + value: true, + Results: []interface{}{"Missing Island", nil}, // one entry with null history has no name + }, + { + name: "Non-null values", + value: false, + Results: []interface{}{"Amsterdam", "Rotterdam", "Berlin", "Dusseldorf"}, + }, + } + query := ` + { + Get { + City(where:{ + valueBoolean: %v, + operator:IsNull, + path:["history"] + }) { + name + } + } + } + ` + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, fmt.Sprintf(query, tt.value)) + cities := result.Get("Get", "City").AsSlice() + require.Len(t, cities, len(tt.Results)) + for _, city := range cities { + cityMap := city.(map[string]interface{}) + require.Contains(t, tt.Results, cityMap["name"]) + } + }) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_geo_range_filter_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_geo_range_filter_test.go new file mode 100644 index 0000000000000000000000000000000000000000..26943bb61392ef0a2d824bb27c6ece496c6db35f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_geo_range_filter_test.go @@ -0,0 +1,134 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "encoding/json" + "testing" + + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/test/helper" +) + +func gettingObjectsWithGeoFilters(t *testing.T) { + t.Run("Only Dusseldorf should be within 100km of Dusseldorf", func(t *testing.T) { + query := ` + { + Get { + City(where:{ + operator: WithinGeoRange + path: ["location"] + valueGeoRange: { geoCoordinates: {latitude: 51.225556, longitude: 6.782778} distance: { max: 100000 } } + }){ + name + location { + latitude + longitude + } + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + cities := result.Get("Get", "City").AsSlice() + + expectedResults := []interface{}{ + map[string]interface{}{ + "name": "Dusseldorf", + "location": map[string]interface{}{ + "latitude": json.Number("51.225555"), + "longitude": json.Number("6.782778"), + }, + }, + } + + assert.Equal(t, expectedResults, cities) + }) + + t.Run("Dusseldorf and Amsterdam should be within 200km of Dusseldorf", func(t *testing.T) { + query := ` + { + Get { + City(where:{ + operator: WithinGeoRange + path: ["location"] + valueGeoRange: { geoCoordinates: {latitude: 51.225556, longitude: 6.782778} distance: { max: 200000 } } + }){ + name + location { + latitude + longitude + } + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + cities := result.Get("Get", "City").AsSlice() + + expectedResults := []interface{}{ + map[string]interface{}{ + "name": "Dusseldorf", + "location": map[string]interface{}{ + "latitude": json.Number("51.225555"), + "longitude": json.Number("6.782778"), + }, + }, + map[string]interface{}{ + "name": "Amsterdam", + "location": map[string]interface{}{ + "latitude": json.Number("52.36667"), + "longitude": json.Number("4.9"), + }, + }, + } + + assert.ElementsMatch(t, expectedResults, cities) + }) + + // This test prevents a regression on gh-825 + t.Run("Missing island is displayed correctly", func(t *testing.T) { + query := ` + { + Get { + City(where:{ + operator: WithinGeoRange + path: ["location"] + valueGeoRange: { geoCoordinates: {latitude: 0, longitude: 0} distance: { max: 20 } } + }){ + name + location { + latitude + longitude + } + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + cities := result.Get("Get", "City").AsSlice() + + expectedResults := []interface{}{ + map[string]interface{}{ + "name": "Missing Island", + "location": map[string]interface{}{ + "latitude": json.Number("0"), + "longitude": json.Number("0"), + }, + }, + } + + assert.ElementsMatch(t, expectedResults, cities) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_group_by_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_group_by_test.go new file mode 100644 index 0000000000000000000000000000000000000000..9c19e43d4d0c1314519cecc261d944c304f27dd8 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_group_by_test.go @@ -0,0 +1,371 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "testing" + + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" + "github.com/weaviate/weaviate/test/helper/journey" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/test/helper" +) + +func groupByObjects(t *testing.T) { + t.Run("group by: people by city", func(t *testing.T) { + getGroup := func(value interface{}) map[string]interface{} { + group := value.(map[string]interface{})["_additional"].(map[string]interface{})["group"].(map[string]interface{}) + return group + } + getGroupHits := func(group map[string]interface{}) (string, []string) { + result := []string{} + hits := group["hits"].([]interface{}) + for _, hit := range hits { + additional := hit.(map[string]interface{})["_additional"].(map[string]interface{}) + result = append(result, additional["id"].(string)) + } + groupedBy := group["groupedBy"].(map[string]interface{}) + groupedByValue := groupedBy["value"].(string) + return groupedByValue, result + } + query := ` + { + Get{ + Person( + nearObject:{ + id: "8615585a-2960-482d-b19d-8bee98ade52c" + } + groupBy:{ + path:["livesIn"] + groups:4 + objectsPerGroup: 10 + } + ){ + _additional{ + id + group{ + groupedBy{value} + count + maxDistance + minDistance + hits { + _additional { + id + distance + } + } + } + } + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + groups := result.Get("Get", "Person").AsSlice() + + require.Len(t, groups, 4) + + expectedResults := map[string][]string{} + + groupedBy1 := `weaviate://localhost/City/8f5f8e44-d348-459c-88b1-c1a44bb8f8be` + expectedGroup1 := []string{ + "8615585a-2960-482d-b19d-8bee98ade52c", + "3ef44474-b5e5-455d-91dc-d917b5b76165", + "15d222c9-8c36-464b-bedb-113faa1c1e4c", + } + expectedResults[groupedBy1] = expectedGroup1 + + groupedBy2 := `weaviate://localhost/City/9b9cbea5-e87e-4cd0-89af-e2f424fd52d6` + expectedGroup2 := []string{ + "3ef44474-b5e5-455d-91dc-d917b5b76165", + "15d222c9-8c36-464b-bedb-113faa1c1e4c", + } + expectedResults[groupedBy2] = expectedGroup2 + + groupedBy3 := `weaviate://localhost/City/6ffb03f8-a853-4ec5-a5d8-302e45aaaf13` + expectedGroup3 := []string{ + "15d222c9-8c36-464b-bedb-113faa1c1e4c", + } + expectedResults[groupedBy3] = expectedGroup3 + + groupedBy4 := "" + expectedGroup4 := []string{ + "5d0fa6ee-21c4-4b46-a735-f0208717837d", + } + expectedResults[groupedBy4] = expectedGroup4 + + groupsOrder := []string{groupedBy1, groupedBy2, groupedBy4, groupedBy3} + for i, current := range groups { + group := getGroup(current) + groupedBy, ids := getGroupHits(group) + assert.Equal(t, groupsOrder[i], groupedBy) + assert.ElementsMatch(t, expectedResults[groupedBy], ids) + } + }) + + t.Run("group by: passages by documents", func(t *testing.T) { + journey.GroupBySingleAndMultiShardTests(t, "") + }) +} + +func groupByBm25(t *testing.T) { + t.Run("group by: companies by city bm25", func(t *testing.T) { + getGroup := func(value interface{}) map[string]interface{} { + group := value.(map[string]interface{})["_additional"].(map[string]interface{})["group"].(map[string]interface{}) + return group + } + getGroupHits := func(group map[string]interface{}) (string, []string) { + result := []string{} + hits := group["hits"].([]interface{}) + for _, hit := range hits { + additional := hit.(map[string]interface{})["_additional"].(map[string]interface{}) + result = append(result, additional["id"].(string)) + } + groupedBy := group["groupedBy"].(map[string]interface{}) + groupedByValue := groupedBy["value"].(string) + return groupedByValue, result + } + + query := ` + { + Get{ + CompanyGroup( + bm25:{ + query:"Inc Apple Microsoft" + } + groupBy:{ + path:["city"] + groups:4 + objectsPerGroup: 10 + } + ){ + _additional{ + group{ + id + groupedBy{value path} + count + maxDistance + minDistance + hits { + name city + _additional { + id + distance + } + } + } + } + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + groups := result.Get("Get", "CompanyGroup").AsSlice() + + require.Len(t, groups, 3) + + group1 := getGroup(groups[0]) + groupby, hits := getGroupHits(group1) + + t.Logf("groupby: %s, hits: %+v\n", groupby, hits) + require.Equal(t, "dusseldorf", groupby) + require.Len(t, hits, 2) + require.Equal(t, hits[0], "1fa3b21e-ca4f-4db7-a432-7fc6a23c534d") + require.Equal(t, hits[1], "1b2cfdba-d4ba-4cf8-abda-e719ef35ac33") + + group2 := getGroup(groups[1]) + groupby, hits = getGroupHits(group2) + t.Logf("groupby: %s, hits: %+v\n", groupby, hits) + require.Equal(t, "berlin", groupby) + require.Len(t, hits, 2) + require.Equal(t, hits[0], "177fec91-1292-4928-8f53-f0ff49c76900") + require.Equal(t, hits[1], "1343f51d-7e05-4084-bd66-d504db3b6bec") + + group3 := getGroup(groups[2]) + groupby, hits = getGroupHits(group3) + t.Logf("groupby: %s, hits: %+v\n", groupby, hits) + require.Equal(t, "amsterdam", groupby) + require.Len(t, hits, 3) + require.Equal(t, hits[0], "171d2b4c-3da1-4684-9c5e-aabd2a4f2998") + require.Equal(t, hits[1], "1f75ed97-39dd-4294-bff7-ecabd7923062") + require.Equal(t, hits[2], "1c2e21fc-46fe-4999-b41c-a800595129af") + }) +} + +func groupByHybridNearVector(t *testing.T) { + defaultLimit := 100 + t.Run("nearVector only hybrid search", func(t *testing.T) { + query := ` + { + Get { + RansomNote( + hybrid: { + searches: { + nearVector: { + distance: 1.8 + vector: [-0.07853702, -0.33730024, 0.62998116, 0.08703484, -0.0011832615, 0.23041481, -0.091878965, 0.1184808, 0.060692377, 0.1748896, 0.53659165, 0.12019079, 0.54373807, -0.43369776, 0.1843199, -0.19319294, 0.122559674, -0.09465141, -0.14069664, 0.031092037, -0.1763922, 0.0074394196, -0.2586067, 0.10990611, -0.18623954, -0.038631044, -0.22795723, 0.09296776, -0.31110525, -0.37963995, -0.19045947, 0.48089907, 0.46725857, 0.28541213, 0.08294283, -0.18865398, 0.09647029, 0.2321466, -0.03435125, -0.09602424, -0.3831683, -0.027315892, 0.4215511, -0.35865632, 0.41955224, 0.090477064, 0.29026023, -0.48485047, -0.24656451, -0.06478625, 0.07755969, -0.049564634, 0.026147474, -0.028342195, -0.035627227, 0.49309397, 0.3705331, 0.04615483, 0.14789912, -0.01220134, 0.300666, -0.246646, 0.0038986988, 0.16730541, 0.46581128, -0.04931062, 0.040290095, 0.32867354, -0.18300997, 0.30411696, -0.1969807, 0.4727539, -0.31915516, -0.32722718, 0.12694982, 0.22583495, -0.014532595, -0.14432396, 0.2469766, 0.14872919, -0.06750808, 0.06351308, -0.287756, -0.32118404, 0.25326216, 0.45288888, -0.36307186, 0.05369787, -0.3283361, 0.07754738, 0.38473788, -0.5120014, -0.3344492, -0.1102767, -0.16755687, -0.3575448, -0.2555015, -0.42976367, -0.2695758, 0.04068499, 0.591914, -0.008395256, 0.2613976, -0.51722556, -0.22581989, 0.036452737, 0.42190477, -0.256124, 0.25849363, -0.073515825, -0.08690646, 0.013338611, 0.14928678, 0.16371651, 0.111465625, -0.117571846, -0.44434816, 0.07603647, 0.4188736, -0.16967061, 0.040313836, 0.41924894, -0.36998197, 0.23204626, -0.23309743, -0.18061559, 1.0674918, -0.51468146, -0.37230963, 0.02214618, -0.5616187, -0.07473461, -0.3314831, -0.24691144, -0.34061712, -0.1511554, 0.33711013, 0.1055847, -0.047220375, -0.06317049, -0.22572862, -0.21646689, 0.090705894, 0.018562902, 0.020744732, -0.5342965, -0.23402104, -0.17680043, 0.1363935, -0.17916845, 0.37825805, -0.07233101, -0.28244817, 0.4055966, 0.19701958, 0.6236174, 0.078134544, 0.46439224, -0.60451704, 0.16722181, -0.20011653, 0.36931068, -0.39967215, 0.21178648, 0.47920865, -0.033521328, 0.57077545, -0.8003054, -0.4028354, 0.27799648, -0.23070334, 0.57747835, 0.49984616, -0.12409506, -0.26694623, -0.20168623, -0.19415514, -0.4626071, 0.10374411, 0.24878122, 0.47352287, -0.6494472, -0.26087105, 0.418008, -0.2789803, -0.60986733, -0.54914564, 0.4734504, 0.04347568, -0.13549352, 0.1530609, 0.085039385, -0.014595425, -0.1106091, 0.014441653, 0.14899726, -0.107090004, 0.03979257, 0.20897605, -0.040235993, 0.1928343, -0.048328623, 0.5435577, -0.1704212, -0.016530415, 0.11402996, 0.24666561, -0.62601864, 0.6729872, -0.21594357, -0.3161654, 0.2899072, -0.05281632, 0.026857251, 0.13927892, 0.26362655, 0.37995058, -0.056429606, 0.27310744, -0.34237143, -0.6419976, -0.02513231, -0.18217334, 0.021232722, -0.35155025, 0.055071075, -0.22192729, 0.4597671, 0.09872845, -0.41803727, -0.08897542, -0.63276047, 0.38059604, 0.45347637, 0.52723855, 0.25096536, -0.3165448, 0.43803728, 0.02419832, 0.317004, -0.059602205, 0.15561013, 0.11867607, 0.7157601, 0.08024589, -0.013107148, 0.3127224, -0.08844044, 0.5374578, 0.39421698, -0.054171022, 0.0913302, -0.081881694, 0.24596375, -0.2841653, -0.5482517, -0.5673938, 0.05889957, -0.1146344, 0.39452744, -0.03414711, 0.32027423, 0.2599335, -0.31470263, 0.45967287, -0.5710101, -0.21222454, 0.38154987, -0.21218868, -0.4366558, 0.13715877, 0.23925674, 0.34832072, -0.03769251, 0.25530148, 0.10662722, -0.5269836, 0.32952255, 0.46165445, 0.3794754, -0.061259665, 0.02883365, -0.3199015, 0.40625557, -0.3794913, 0.42420092, 0.4631467, 0.54236996, 0.031472385, 0.2635622, -0.25566247, -0.040713936, 0.48734123, 0.2742017, -0.15524681, 0.025654443, 0.056942068, -0.48883253, 0.60433495, 0.03514151] + } + } + } + ) { + _additional { + vector + } + contents + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.NotEmpty(t, notes) + require.Equal(t, len(notes), defaultLimit) + }) +} + +func conflictingSubSearches(t *testing.T) { + t.Run("conflicting subsearches disallowed", func(t *testing.T) { + query := ` + { + Get { + RansomNote( + hybrid: { + searches: { + nearText: { + concepts: ["apple", "banana"] + } + nearVector: { + distance: 1.8 + vector: [-0.07853702, -0.33730024, 0.62998116, 0.08703484, -0.0011832615, 0.23041481, -0.091878965, 0.1184808, 0.060692377, 0.1748896, 0.53659165, 0.12019079, 0.54373807, -0.43369776, 0.1843199, -0.19319294, 0.122559674, -0.09465141, -0.14069664, 0.031092037, -0.1763922, 0.0074394196, -0.2586067, 0.10990611, -0.18623954, -0.038631044, -0.22795723, 0.09296776, -0.31110525, -0.37963995, -0.19045947, 0.48089907, 0.46725857, 0.28541213, 0.08294283, -0.18865398, 0.09647029, 0.2321466, -0.03435125, -0.09602424, -0.3831683, -0.027315892, 0.4215511, -0.35865632, 0.41955224, 0.090477064, 0.29026023, -0.48485047, -0.24656451, -0.06478625, 0.07755969, -0.049564634, 0.026147474, -0.028342195, -0.035627227, 0.49309397, 0.3705331, 0.04615483, 0.14789912, -0.01220134, 0.300666, -0.246646, 0.0038986988, 0.16730541, 0.46581128, -0.04931062, 0.040290095, 0.32867354, -0.18300997, 0.30411696, -0.1969807, 0.4727539, -0.31915516, -0.32722718, 0.12694982, 0.22583495, -0.014532595, -0.14432396, 0.2469766, 0.14872919, -0.06750808, 0.06351308, -0.287756, -0.32118404, 0.25326216, 0.45288888, -0.36307186, 0.05369787, -0.3283361, 0.07754738, 0.38473788, -0.5120014, -0.3344492, -0.1102767, -0.16755687, -0.3575448, -0.2555015, -0.42976367, -0.2695758, 0.04068499, 0.591914, -0.008395256, 0.2613976, -0.51722556, -0.22581989, 0.036452737, 0.42190477, -0.256124, 0.25849363, -0.073515825, -0.08690646, 0.013338611, 0.14928678, 0.16371651, 0.111465625, -0.117571846, -0.44434816, 0.07603647, 0.4188736, -0.16967061, 0.040313836, 0.41924894, -0.36998197, 0.23204626, -0.23309743, -0.18061559, 1.0674918, -0.51468146, -0.37230963, 0.02214618, -0.5616187, -0.07473461, -0.3314831, -0.24691144, -0.34061712, -0.1511554, 0.33711013, 0.1055847, -0.047220375, -0.06317049, -0.22572862, -0.21646689, 0.090705894, 0.018562902, 0.020744732, -0.5342965, -0.23402104, -0.17680043, 0.1363935, -0.17916845, 0.37825805, -0.07233101, -0.28244817, 0.4055966, 0.19701958, 0.6236174, 0.078134544, 0.46439224, -0.60451704, 0.16722181, -0.20011653, 0.36931068, -0.39967215, 0.21178648, 0.47920865, -0.033521328, 0.57077545, -0.8003054, -0.4028354, 0.27799648, -0.23070334, 0.57747835, 0.49984616, -0.12409506, -0.26694623, -0.20168623, -0.19415514, -0.4626071, 0.10374411, 0.24878122, 0.47352287, -0.6494472, -0.26087105, 0.418008, -0.2789803, -0.60986733, -0.54914564, 0.4734504, 0.04347568, -0.13549352, 0.1530609, 0.085039385, -0.014595425, -0.1106091, 0.014441653, 0.14899726, -0.107090004, 0.03979257, 0.20897605, -0.040235993, 0.1928343, -0.048328623, 0.5435577, -0.1704212, -0.016530415, 0.11402996, 0.24666561, -0.62601864, 0.6729872, -0.21594357, -0.3161654, 0.2899072, -0.05281632, 0.026857251, 0.13927892, 0.26362655, 0.37995058, -0.056429606, 0.27310744, -0.34237143, -0.6419976, -0.02513231, -0.18217334, 0.021232722, -0.35155025, 0.055071075, -0.22192729, 0.4597671, 0.09872845, -0.41803727, -0.08897542, -0.63276047, 0.38059604, 0.45347637, 0.52723855, 0.25096536, -0.3165448, 0.43803728, 0.02419832, 0.317004, -0.059602205, 0.15561013, 0.11867607, 0.7157601, 0.08024589, -0.013107148, 0.3127224, -0.08844044, 0.5374578, 0.39421698, -0.054171022, 0.0913302, -0.081881694, 0.24596375, -0.2841653, -0.5482517, -0.5673938, 0.05889957, -0.1146344, 0.39452744, -0.03414711, 0.32027423, 0.2599335, -0.31470263, 0.45967287, -0.5710101, -0.21222454, 0.38154987, -0.21218868, -0.4366558, 0.13715877, 0.23925674, 0.34832072, -0.03769251, 0.25530148, 0.10662722, -0.5269836, 0.32952255, 0.46165445, 0.3794754, -0.061259665, 0.02883365, -0.3199015, 0.40625557, -0.3794913, 0.42420092, 0.4631467, 0.54236996, 0.031472385, 0.2635622, -0.25566247, -0.040713936, 0.48734123, 0.2742017, -0.15524681, 0.025654443, 0.056942068, -0.48883253, 0.60433495, 0.03514151] + } + } + } + ) { + _additional { + vector + } + contents + } + } + } + ` + errors := graphqlhelper.ErrorGraphQL(t, helper.RootAuth, query) + require.Len(t, errors, 1) + require.Contains(t, errors[0].Message, "hybrid search cannot have both nearText and nearVector parameters") + }) +} + +func twoVector(t *testing.T) { + t.Run("vectors in subsearch and default fields", func(t *testing.T) { + query := ` + { + Get { + RansomNote( + hybrid: { + alpha: 0.0 + query: "" + searches: { + nearVector: { + distance: 1.8 + vector: [-0.07853702, -0.33730024, 0.62998116, 0.08703484, -0.0011832615, 0.23041481, -0.091878965, 0.1184808, 0.060692377, 0.1748896, 0.53659165, 0.12019079, 0.54373807, -0.43369776, 0.1843199, -0.19319294, 0.122559674, -0.09465141, -0.14069664, 0.031092037, -0.1763922, 0.0074394196, -0.2586067, 0.10990611, -0.18623954, -0.038631044, -0.22795723, 0.09296776, -0.31110525, -0.37963995, -0.19045947, 0.48089907, 0.46725857, 0.28541213, 0.08294283, -0.18865398, 0.09647029, 0.2321466, -0.03435125, -0.09602424, -0.3831683, -0.027315892, 0.4215511, -0.35865632, 0.41955224, 0.090477064, 0.29026023, -0.48485047, -0.24656451, -0.06478625, 0.07755969, -0.049564634, 0.026147474, -0.028342195, -0.035627227, 0.49309397, 0.3705331, 0.04615483, 0.14789912, -0.01220134, 0.300666, -0.246646, 0.0038986988, 0.16730541, 0.46581128, -0.04931062, 0.040290095, 0.32867354, -0.18300997, 0.30411696, -0.1969807, 0.4727539, -0.31915516, -0.32722718, 0.12694982, 0.22583495, -0.014532595, -0.14432396, 0.2469766, 0.14872919, -0.06750808, 0.06351308, -0.287756, -0.32118404, 0.25326216, 0.45288888, -0.36307186, 0.05369787, -0.3283361, 0.07754738, 0.38473788, -0.5120014, -0.3344492, -0.1102767, -0.16755687, -0.3575448, -0.2555015, -0.42976367, -0.2695758, 0.04068499, 0.591914, -0.008395256, 0.2613976, -0.51722556, -0.22581989, 0.036452737, 0.42190477, -0.256124, 0.25849363, -0.073515825, -0.08690646, 0.013338611, 0.14928678, 0.16371651, 0.111465625, -0.117571846, -0.44434816, 0.07603647, 0.4188736, -0.16967061, 0.040313836, 0.41924894, -0.36998197, 0.23204626, -0.23309743, -0.18061559, 1.0674918, -0.51468146, -0.37230963, 0.02214618, -0.5616187, -0.07473461, -0.3314831, -0.24691144, -0.34061712, -0.1511554, 0.33711013, 0.1055847, -0.047220375, -0.06317049, -0.22572862, -0.21646689, 0.090705894, 0.018562902, 0.020744732, -0.5342965, -0.23402104, -0.17680043, 0.1363935, -0.17916845, 0.37825805, -0.07233101, -0.28244817, 0.4055966, 0.19701958, 0.6236174, 0.078134544, 0.46439224, -0.60451704, 0.16722181, -0.20011653, 0.36931068, -0.39967215, 0.21178648, 0.47920865, -0.033521328, 0.57077545, -0.8003054, -0.4028354, 0.27799648, -0.23070334, 0.57747835, 0.49984616, -0.12409506, -0.26694623, -0.20168623, -0.19415514, -0.4626071, 0.10374411, 0.24878122, 0.47352287, -0.6494472, -0.26087105, 0.418008, -0.2789803, -0.60986733, -0.54914564, 0.4734504, 0.04347568, -0.13549352, 0.1530609, 0.085039385, -0.014595425, -0.1106091, 0.014441653, 0.14899726, -0.107090004, 0.03979257, 0.20897605, -0.040235993, 0.1928343, -0.048328623, 0.5435577, -0.1704212, -0.016530415, 0.11402996, 0.24666561, -0.62601864, 0.6729872, -0.21594357, -0.3161654, 0.2899072, -0.05281632, 0.026857251, 0.13927892, 0.26362655, 0.37995058, -0.056429606, 0.27310744, -0.34237143, -0.6419976, -0.02513231, -0.18217334, 0.021232722, -0.35155025, 0.055071075, -0.22192729, 0.4597671, 0.09872845, -0.41803727, -0.08897542, -0.63276047, 0.38059604, 0.45347637, 0.52723855, 0.25096536, -0.3165448, 0.43803728, 0.02419832, 0.317004, -0.059602205, 0.15561013, 0.11867607, 0.7157601, 0.08024589, -0.013107148, 0.3127224, -0.08844044, 0.5374578, 0.39421698, -0.054171022, 0.0913302, -0.081881694, 0.24596375, -0.2841653, -0.5482517, -0.5673938, 0.05889957, -0.1146344, 0.39452744, -0.03414711, 0.32027423, 0.2599335, -0.31470263, 0.45967287, -0.5710101, -0.21222454, 0.38154987, -0.21218868, -0.4366558, 0.13715877, 0.23925674, 0.34832072, -0.03769251, 0.25530148, 0.10662722, -0.5269836, 0.32952255, 0.46165445, 0.3794754, -0.061259665, 0.02883365, -0.3199015, 0.40625557, -0.3794913, 0.42420092, 0.4631467, 0.54236996, 0.031472385, 0.2635622, -0.25566247, -0.040713936, 0.48734123, 0.2742017, -0.15524681, 0.025654443, 0.056942068, -0.48883253, 0.60433495, 0.03514151] + } + } + + vector: [-0.07853702, -0.33730024, 0.62998116, 0.08703484, -0.0011832615, 0.23041481, -0.091878965, 0.1184808, 0.060692377, 0.1748896, 0.53659165, 0.12019079, 0.54373807, -0.43369776, 0.1843199, -0.19319294, 0.122559674, -0.09465141, -0.14069664, 0.031092037, -0.1763922, 0.0074394196, -0.2586067, 0.10990611, -0.18623954, -0.038631044, -0.22795723, 0.09296776, -0.31110525, -0.37963995, -0.19045947, 0.48089907, 0.46725857, 0.28541213, 0.08294283, -0.18865398, 0.09647029, 0.2321466, -0.03435125, -0.09602424, -0.3831683, -0.027315892, 0.4215511, -0.35865632, 0.41955224, 0.090477064, 0.29026023, -0.48485047, -0.24656451, -0.06478625, 0.07755969, -0.049564634, 0.026147474, -0.028342195, -0.035627227, 0.49309397, 0.3705331, 0.04615483, 0.14789912, -0.01220134, 0.300666, -0.246646, 0.0038986988, 0.16730541, 0.46581128, -0.04931062, 0.040290095, 0.32867354, -0.18300997, 0.30411696, -0.1969807, 0.4727539, -0.31915516, -0.32722718, 0.12694982, 0.22583495, -0.014532595, -0.14432396, 0.2469766, 0.14872919, -0.06750808, 0.06351308, -0.287756, -0.32118404, 0.25326216, 0.45288888, -0.36307186, 0.05369787, -0.3283361, 0.07754738, 0.38473788, -0.5120014, -0.3344492, -0.1102767, -0.16755687, -0.3575448, -0.2555015, -0.42976367, -0.2695758, 0.04068499, 0.591914, -0.008395256, 0.2613976, -0.51722556, -0.22581989, 0.036452737, 0.42190477, -0.256124, 0.25849363, -0.073515825, -0.08690646, 0.013338611, 0.14928678, 0.16371651, 0.111465625, -0.117571846, -0.44434816, 0.07603647, 0.4188736, -0.16967061, 0.040313836, 0.41924894, -0.36998197, 0.23204626, -0.23309743, -0.18061559, 1.0674918, -0.51468146, -0.37230963, 0.02214618, -0.5616187, -0.07473461, -0.3314831, -0.24691144, -0.34061712, -0.1511554, 0.33711013, 0.1055847, -0.047220375, -0.06317049, -0.22572862, -0.21646689, 0.090705894, 0.018562902, 0.020744732, -0.5342965, -0.23402104, -0.17680043, 0.1363935, -0.17916845, 0.37825805, -0.07233101, -0.28244817, 0.4055966, 0.19701958, 0.6236174, 0.078134544, 0.46439224, -0.60451704, 0.16722181, -0.20011653, 0.36931068, -0.39967215, 0.21178648, 0.47920865, -0.033521328, 0.57077545, -0.8003054, -0.4028354, 0.27799648, -0.23070334, 0.57747835, 0.49984616, -0.12409506, -0.26694623, -0.20168623, -0.19415514, -0.4626071, 0.10374411, 0.24878122, 0.47352287, -0.6494472, -0.26087105, 0.418008, -0.2789803, -0.60986733, -0.54914564, 0.4734504, 0.04347568, -0.13549352, 0.1530609, 0.085039385, -0.014595425, -0.1106091, 0.014441653, 0.14899726, -0.107090004, 0.03979257, 0.20897605, -0.040235993, 0.1928343, -0.048328623, 0.5435577, -0.1704212, -0.016530415, 0.11402996, 0.24666561, -0.62601864, 0.6729872, -0.21594357, -0.3161654, 0.2899072, -0.05281632, 0.026857251, 0.13927892, 0.26362655, 0.37995058, -0.056429606, 0.27310744, -0.34237143, -0.6419976, -0.02513231, -0.18217334, 0.021232722, -0.35155025, 0.055071075, -0.22192729, 0.4597671, 0.09872845, -0.41803727, -0.08897542, -0.63276047, 0.38059604, 0.45347637, 0.52723855, 0.25096536, -0.3165448, 0.43803728, 0.02419832, 0.317004, -0.059602205, 0.15561013, 0.11867607, 0.7157601, 0.08024589, -0.013107148, 0.3127224, -0.08844044, 0.5374578, 0.39421698, -0.054171022, 0.0913302, -0.081881694, 0.24596375, -0.2841653, -0.5482517, -0.5673938, 0.05889957, -0.1146344, 0.39452744, -0.03414711, 0.32027423, 0.2599335, -0.31470263, 0.45967287, -0.5710101, -0.21222454, 0.38154987, -0.21218868, -0.4366558, 0.13715877, 0.23925674, 0.34832072, -0.03769251, 0.25530148, 0.10662722, -0.5269836, 0.32952255, 0.46165445, 0.3794754, -0.061259665, 0.02883365, -0.3199015, 0.40625557, -0.3794913, 0.42420092, 0.4631467, 0.54236996, 0.031472385, 0.2635622, -0.25566247, -0.040713936, 0.48734123, 0.2742017, -0.15524681, 0.025654443, 0.056942068, -0.48883253, 0.60433495, 0.03514151] + + } + ) { + _additional { + vector + } + contents + } + } + } + ` + errors := graphqlhelper.ErrorGraphQL(t, helper.RootAuth, query) + require.Len(t, errors, 1) + require.Contains(t, errors[0].Message, "cannot have both vector and nearVectorParams") + }) +} + +func vectorNearText(t *testing.T) { + t.Run("default vector and nearText subsearch", func(t *testing.T) { + query := ` +{ + Get { + RansomNote( + hybrid: { + query: "banana" + searches: { + nearText: { + concepts: ["apple", "banana", "cactus"] + } + } + + vector: [-0.07853702, -0.33730024, 0.62998116, 0.08703484] + } + ) + { + _additional { + vector + } + contents + } + } +} +` + errors := graphqlhelper.ErrorGraphQL(t, helper.RootAuth, query) + require.Len(t, errors, 1) + require.Contains(t, errors[0].Message, "cannot have both vector and nearTextParams") + }) +} + +func aggregateHybridGroupBy(t *testing.T) { + t.Run("aggregate groupby", func(t *testing.T) { + query := ` + + { + Aggregate { + CompanyGroup ( + objectLimit: 30 + hybrid: { + alpha: 0.5 + query: "Apple" + + searches:{ + nearText: { + concepts: ["Apple"] + } + } + } + groupBy:["city"]) { + meta { + count + } + + groupedBy { + value + path + } + } + } + } +` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + groups := result.Get("Aggregate", "CompanyGroup").AsSlice() + + require.Len(t, groups, 3) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_grouping_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_grouping_test.go new file mode 100644 index 0000000000000000000000000000000000000000..c6044ba60fffaded336e3b221463dddcf3435b58 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_grouping_test.go @@ -0,0 +1,353 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "strings" + "testing" + + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/test/helper" +) + +func gettingObjectsWithGrouping(t *testing.T) { + t.Run("without grouping <- this is the control", func(t *testing.T) { + query := ` + { + Get { + Company { + name + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + companies := result.Get("Get", "Company").AsSlice() + + expected := []interface{}{ + map[string]interface{}{"name": "Microsoft Inc."}, + map[string]interface{}{"name": "Microsoft Incorporated"}, + map[string]interface{}{"name": "Microsoft"}, + map[string]interface{}{"name": "Apple Inc."}, + map[string]interface{}{"name": "Apple Incorporated"}, + map[string]interface{}{"name": "Apple"}, + map[string]interface{}{"name": "Google Inc."}, + map[string]interface{}{"name": "Google Incorporated"}, + map[string]interface{}{"name": "Google"}, + } + + assert.ElementsMatch(t, expected, companies) + }) + + t.Run("grouping mode set to merge and force to 1.0", func(t *testing.T) { + query := ` + { + Get { + Company(group: {type: merge, force:1.0}) { + name + inCity { + ... on City {name} + } + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + companies := result.Get("Get", "Company").AsSlice() + + require.Len(t, companies, 1) + + companyNames := companies[0].(map[string]interface{})["name"].(string) + assert.NotEmpty(t, companies) + + mustContain := []string{"Apple", "Google", "Microsoft"} + for _, companyName := range mustContain { + if !strings.Contains(companyNames, companyName) { + t.Errorf("%s not contained in %v", companyName, companyNames) + } + } + + companyCities := companies[0].(map[string]interface{})["inCity"].([]interface{}) + expectedCities := []map[string]interface{}{ + {"name": "Dusseldorf"}, + {"name": "Amsterdam"}, + {"name": "Berlin"}, + } + + assert.ElementsMatch(t, expectedCities, companyCities) + }) + + t.Run("grouping mode set to merge and force to 0.0", func(t *testing.T) { + query := ` + { + Get { + Company(group: {type: merge, force:0.0}) { + name + inCity { + ... on City { + name + } + } + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + companies := result.Get("Get", "Company").AsSlice() + + require.Len(t, companies, 9) + + getName := func(value map[string]interface{}) string { + return value["name"].(string) + } + + getCities := func(value map[string]interface{}) []string { + inCity := value["inCity"].([]interface{}) + cities := make([]string, len(inCity)) + for i := range inCity { + cityVal := inCity[i].(map[string]interface{}) + cities[i] = getName(cityVal) + } + return cities + } + + for _, current := range companies { + currentMap := current.(map[string]interface{}) + if getName(currentMap) == "Microsoft Incorporated" { + assert.Len(t, getCities(currentMap), 2) + } + if getName(currentMap) == "Microsoft Inc." { + assert.Len(t, getCities(currentMap), 1) + } + if getName(currentMap) == "Microsoft" { + assert.Len(t, getCities(currentMap), 1) + } + } + }) + + t.Run("grouping mode set to closest and force to 0.1", func(t *testing.T) { + query := ` + { + Get { + Company(group: {type: closest, force:0.1}) { + name + inCity { + ... on City { + name + } + } + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + companies := result.Get("Get", "Company").AsSlice() + + assert.True(t, len(companies) > 0) + }) + + t.Run("grouping mode set to closest with near text", func(t *testing.T) { + query := ` + { + Get { + Company(nearText: {concepts: "Apple"}, group: {type: closest, force:1.0}) { + name + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + companies := result.Get("Get", "Company").AsSlice() + + assert.True(t, len(companies) == 1) + }) + + t.Run("grouping with where filter", func(t *testing.T) { + query := ` + { + Get { + Company(group:{type:merge force:1.0} where:{path:["id"] operator:Like valueText:"*"}) { + name + inCity { + ... on City { + name + } + } + } + } + } + ` + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + grouped := result.Get("Get", "Company").AsSlice() + require.Len(t, grouped, 1) + groupedName := grouped[0].(map[string]interface{})["name"].(string) + assert.Equal(t, "Microsoft Inc. (Microsoft Incorporated, Microsoft, Apple Inc., "+ + "Apple Incorporated, Apple, Google Inc., Google Incorporated, Google)", + groupedName) + + companyCities := grouped[0].(map[string]interface{})["inCity"].([]interface{}) + expectedCities := []map[string]interface{}{ + {"name": "Dusseldorf"}, + {"name": "Amsterdam"}, + {"name": "Berlin"}, + } + + assert.ElementsMatch(t, expectedCities, companyCities) + + // this query should yield the same results as the above, as the above where filter will + // match all records. checking the previous payload with the one below is a sanity check + // for the sake of validating the fix for [github issue 1958] + // (https://github.com/weaviate/weaviate/issues/1958) + // UPDATE: due to introducing roaring bitmaps as set holding docIDs of filtered documents + // internal order of results returned has changed from property value based to docID based, + // but set content remain unchanged + // for that reason grouped name in the following test is different with and without filters, + // though it still contains the same elements + queryWithoutWhere := ` + { + Get { + Company(group:{type:merge force:1.0}) { + name + inCity { + ... on City { + name + } + } + } + } + } + ` + result = graphqlhelper.AssertGraphQL(t, helper.RootAuth, queryWithoutWhere) + groupedWithoutWhere := result.Get("Get", "Company").AsSlice() + groupedWithoutWhereName := groupedWithoutWhere[0].(map[string]interface{})["name"].(string) + assert.Equal(t, "Apple Inc. (Google Incorporated, Google Inc., Microsoft Incorporated, "+ + "Apple, Apple Incorporated, Google, Microsoft Inc., Microsoft)", + groupedWithoutWhereName) + + companyCities = groupedWithoutWhere[0].(map[string]interface{})["inCity"].([]interface{}) + assert.ElementsMatch(t, expectedCities, companyCities) + }) + + t.Run("grouping with sort", func(t *testing.T) { + query := ` + { + Get { + Company(group:{type:merge force:1.0} sort:{path:["name"]}) { + name + inCity { + ... on City { + name + } + } + } + } + } + ` + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + grouped := result.Get("Get", "Company").AsSlice() + require.Len(t, grouped, 1) + groupedName := grouped[0].(map[string]interface{})["name"].(string) + assert.Equal(t, "Apple (Apple Inc., Apple Incorporated, Google, Google Inc., "+ + "Google Incorporated, Microsoft, Microsoft Inc., Microsoft Incorporated)", + groupedName) + + groupedCities := grouped[0].(map[string]interface{})["inCity"].([]interface{}) + expectedCities := []map[string]interface{}{ + {"name": "Dusseldorf"}, + {"name": "Amsterdam"}, + {"name": "Berlin"}, + } + + assert.ElementsMatch(t, expectedCities, groupedCities) + }) + + // temporarily removed due to + // https://github.com/weaviate/weaviate/issues/1302 + // t.Run("grouping mode set to closest", func(t *testing.T) { + // query := ` + // { + // Get { + // Company(group: {type: closest, force:0.10}) { + // name + // } + // } + // } + // ` + // result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + // companies := result.Get("Get", "Company").AsSlice() + + // assert.Len(t, companies, 3) + // mustContain := []string{"Apple", "Microsoft", "Google"} + // outer: + // for _, toContain := range mustContain { + // for _, current := range companies { + // if strings.Contains(current.(map[string]interface{})["name"].(string), toContain) { + // continue outer + // } + // } + + // t.Errorf("%s not contained in %v", toContain, companies) + // } + // }) + + // ignore as 0.16.0 contextionaries aren't compatible with this test + // t.Run("grouping mode set to merge", func(t *testing.T) { + // query := ` + // { + // Get { + // Company(group: {type: merge, force:0.1}) { + // name + // inCity { + // ... on City { + // name + // } + // } + // } + // } + // } + // ` + // result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + // companies := result.Get("Get", "Company").AsSlice() + + // assert.Len(t, companies, 3) + // mustContain := [][]string{ + // []string{"Apple", "Apple Inc.", "Apple Incorporated"}, + // []string{"Microsoft", "Microsoft Inc.", "Microsoft Incorporated"}, + // []string{"Google", "Google Inc.", "Google Incorporated"}, + // } + + // allContained := func(current map[string]interface{}, toContains []string) bool { + // for _, toContain := range toContains { + // if !strings.Contains(current["name"].(string), toContain) { + // return false + // } + // } + // return true + // } + + // outer: + // for _, toContain := range mustContain { + // for _, current := range companies { + // if allContained(current.(map[string]interface{}), toContain) { + // continue outer + // } + // } + + // t.Errorf("%s not contained in %v", toContain, companies) + // } + // }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_sort_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_sort_test.go new file mode 100644 index 0000000000000000000000000000000000000000..8e5f725c0a224203822b28eb7c80ec00e0506d75 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_sort_test.go @@ -0,0 +1,1220 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "fmt" + "reflect" + "strings" + "testing" + + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/test/helper" +) + +func gettingObjectsWithSort(t *testing.T) { + buildSort := func(path []string, order string) string { + pathArgs := make([]string, len(path)) + for i := range path { + pathArgs[i] = fmt.Sprintf("\"%s\"", path[i]) + } + return fmt.Sprintf("{path:[%s] order:%s}", strings.Join(pathArgs, ","), order) + } + buildSortFilter := func(sort []string) string { + return fmt.Sprintf("sort:[%s]", strings.Join(sort, ",")) + } + + t.Run("simple sort", func(t *testing.T) { + query := ` + { + Get { + City( + sort: [{ + path: ["%s"] + order: %s + }] + ) { + name + } + } + } + ` + tests := []struct { + name string + property, order string + expected []interface{} + }{ + { + name: "sort by name asc", + property: "name", + order: "asc", + expected: []interface{}{ + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": "Rotterdam"}, + }, + }, + { + name: "sort by name desc", + property: "name", + order: "desc", + expected: []interface{}{ + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": nil}, + }, + }, + { + name: "sort by population asc", + property: "population", + order: "asc", + expected: []interface{}{ + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + }, + }, + { + name: "sort by population desc", + property: "population", + order: "desc", + expected: []interface{}{ + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": nil}, + }, + }, + { + name: "sort by isCapital asc", + property: "isCapital", + order: "asc", + expected: []interface{}{ + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + }, + }, + { + name: "sort by isCapital desc", + property: "isCapital", + order: "desc", + expected: []interface{}{ + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": nil}, + }, + }, + { + name: "sort by cityArea asc", + property: "cityArea", + order: "asc", + expected: []interface{}{ + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + }, + }, + { + name: "sort by cityArea desc", + property: "cityArea", + order: "desc", + expected: []interface{}{ + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": nil}, + }, + }, + { + name: "sort by cityRights asc", + property: "cityRights", + order: "asc", + expected: []interface{}{ + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + }, + }, + { + name: "sort by cityRights desc", + property: "cityRights", + order: "desc", + expected: []interface{}{ + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": nil}, + }, + }, + { + name: "sort by timezones asc", + property: "timezones", + order: "asc", + expected: []interface{}{ + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + }, + }, + { + name: "sort by timezones desc", + property: "timezones", + order: "desc", + expected: []interface{}{ + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": nil}, + }, + }, + { + name: "sort by museums asc", + property: "museums", + order: "asc", + expected: []interface{}{ + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Amsterdam"}, + }, + }, + { + name: "sort by museums desc", + property: "museums", + order: "desc", + expected: []interface{}{ + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": nil}, + }, + }, + { + name: "sort by history asc", + property: "history", + order: "asc", + expected: []interface{}{ + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Dusseldorf"}, + }, + }, + { + name: "sort by history desc", + property: "history", + order: "desc", + expected: []interface{}{ + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": nil}, + }, + }, + { + name: "sort by phoneNumber asc", + property: "phoneNumber", + order: "asc", + expected: []interface{}{ + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Amsterdam"}, + }, + }, + { + name: "sort by phoneNumber desc", + property: "phoneNumber", + order: "desc", + expected: []interface{}{ + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": nil}, + }, + }, + { + name: "sort by location asc", + property: "location", + order: "asc", + expected: []interface{}{ + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Dusseldorf"}, + }, + }, + { + name: "sort by location desc", + property: "location", + order: "desc", + expected: []interface{}{ + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Berlin"}, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, fmt.Sprintf(query, tt.property, tt.order)) + got := result.Get("Get", "City").AsSlice() + if !reflect.DeepEqual(got, tt.expected) { + t.Errorf("sort objects got = %v, want %v", got, tt.expected) + } + }) + } + }) + + t.Run("complex sort", func(t *testing.T) { + query := ` + { + Get { + City( + %s + ) { + name + } + } + } + ` + queryLimit := ` + { + Get { + City( + limit: %d + %s + ) { + name + } + } + } + ` + tests := []struct { + name string + sort []string + expected []interface{} + }{ + { + name: "sort by population and name asc", + sort: []string{ + buildSort([]string{"population"}, "asc"), + buildSort([]string{"name"}, "asc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + }, + }, + { + name: "sort by population asc and name desc", + sort: []string{ + buildSort([]string{"population"}, "asc"), + buildSort([]string{"name"}, "desc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + }, + }, + { + name: "sort by name asc and population desc", + sort: []string{ + buildSort([]string{"name"}, "asc"), + buildSort([]string{"population"}, "desc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": "Rotterdam"}, + }, + }, + { + name: "sort by population and name desc", + sort: []string{ + buildSort([]string{"population"}, "desc"), + buildSort([]string{"name"}, "desc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": nil}, + }, + }, + { + name: "sort by phoneNumber and population and name asc", + sort: []string{ + buildSort([]string{"phoneNumber"}, "asc"), + buildSort([]string{"population"}, "asc"), + buildSort([]string{"name"}, "asc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Amsterdam"}, + }, + }, + { + name: "sort by isCapital asc and name asc", + sort: []string{ + buildSort([]string{"isCapital"}, "asc"), + buildSort([]string{"name"}, "asc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + }, + }, + { + name: "sort by isCapital asc and name desc", + sort: []string{ + buildSort([]string{"isCapital"}, "asc"), + buildSort([]string{"name"}, "desc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Amsterdam"}, + }, + }, + { + name: "sort by isCapital desc and name asc", + sort: []string{ + buildSort([]string{"isCapital"}, "desc"), + buildSort([]string{"name"}, "asc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": nil}, + }, + }, + { + name: "sort by isCapital desc and name desc", + sort: []string{ + buildSort([]string{"isCapital"}, "desc"), + buildSort([]string{"name"}, "desc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": nil}, + }, + }, + { + name: "sort by isCapital asc and population desc and name asc", + sort: []string{ + buildSort([]string{"isCapital"}, "asc"), + buildSort([]string{"population"}, "desc"), + buildSort([]string{"name"}, "asc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Amsterdam"}, + }, + }, + { + name: "sort by isCapital desc and population desc and name desc", + sort: []string{ + buildSort([]string{"isCapital"}, "desc"), + buildSort([]string{"population"}, "desc"), + buildSort([]string{"name"}, "desc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": nil}, + }, + }, + { + name: "sort by isCapital asc and timezones asc and city rights asc and name asc", + sort: []string{ + buildSort([]string{"isCapital"}, "asc"), + buildSort([]string{"timezones"}, "asc"), + buildSort([]string{"cityRights"}, "asc"), + buildSort([]string{"name"}, "asc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + }, + }, + { + name: "sort by isCapital desc and timezones asc and city rights asc and name desc", + sort: []string{ + buildSort([]string{"isCapital"}, "desc"), + buildSort([]string{"timezones"}, "asc"), + buildSort([]string{"cityRights"}, "asc"), + buildSort([]string{"name"}, "desc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": nil}, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Run("without limit", func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, fmt.Sprintf(query, buildSortFilter(tt.sort))) + got := result.Get("Get", "City").AsSlice() + if !reflect.DeepEqual(got, tt.expected) { + t.Errorf("sort objects got = %v, want %v", got, tt.expected) + } + }) + t.Run("with limit", func(t *testing.T) { + limit := 4 + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, fmt.Sprintf(queryLimit, limit, buildSortFilter(tt.sort))) + got := result.Get("Get", "City").AsSlice() + if !reflect.DeepEqual(got, tt.expected[:limit]) { + t.Errorf("sort objects got = %v, want %v", got, tt.expected) + } + }) + }) + } + }) + + t.Run("sort with where", func(t *testing.T) { + query := ` + { + Get { + City( + sort: [{ + path: ["location"] + order: %s + }] + where: { + operator: Or, + operands: [ + {valueText: "6ffb03f8-a853-4ec5-a5d8-302e45aaaf13", path: ["id"], operator: Equal}, + {valueText: "823abeca-eef3-41c7-b587-7a6977b08003", path: ["id"], operator: Equal} + ]} + ) { + name + } + } + } + ` + tests := []struct { + name string + order string + expected []interface{} + }{ + { + name: "location asc", + order: "asc", + expected: []interface{}{ + map[string]interface{}{"name": "Missing Island"}, + map[string]interface{}{"name": "Dusseldorf"}, + }, + }, + { + name: "location desc", + order: "desc", + expected: []interface{}{ + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Missing Island"}, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, fmt.Sprintf(query, tt.order)) + got := result.Get("Get", "City").AsSlice() + if !reflect.DeepEqual(got, tt.expected) { + t.Errorf("sort objects got = %v, want %v", got, tt.expected) + } + }) + } + }) + + t.Run("sort with where with non-existent-uuid", func(t *testing.T) { + query := ` + { + Get { + City( + sort: [{ + path: ["location"] + order: asc + }] + where: { + valueText: "non-existent-uuid", path: ["id"], operator: Equal + } + ) { + name + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + got := result.Get("Get", "City").AsSlice() + assert.Empty(t, got) + }) + + t.Run("sort with nearText (with distance)", func(t *testing.T) { + query := ` + { + Get { + City( + nearText: { + concepts: ["Berlin"] + distance: 0.6 + } + %s + ) { + name + } + } + } + ` + tests := []struct { + name string + sort []string + expected []interface{} + }{ + { + name: "name asc", + sort: []string{ + buildSort([]string{"name"}, "asc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Rotterdam"}, + }, + }, + { + name: "name desc", + sort: []string{ + buildSort([]string{"name"}, "desc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": nil}, + }, + }, + { + name: "population asc", + sort: []string{ + buildSort([]string{"population"}, "asc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + }, + }, + { + name: "population desc", + sort: []string{ + buildSort([]string{"population"}, "desc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": nil}, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, fmt.Sprintf(query, buildSortFilter(tt.sort))) + got := result.Get("Get", "City").AsSlice() + if !reflect.DeepEqual(got, tt.expected) { + t.Errorf("sort objects got = %v, want %v", got, tt.expected) + } + }) + } + }) + + t.Run("sort with nearText (with certainty)", func(t *testing.T) { + query := ` + { + Get { + City( + nearText: { + concepts: ["Berlin"] + certainty: 0.7 + } + %s + ) { + name + } + } + } + ` + tests := []struct { + name string + sort []string + expected []interface{} + }{ + { + name: "name asc", + sort: []string{ + buildSort([]string{"name"}, "asc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Rotterdam"}, + }, + }, + { + name: "name desc", + sort: []string{ + buildSort([]string{"name"}, "desc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": nil}, + }, + }, + { + name: "population asc", + sort: []string{ + buildSort([]string{"population"}, "asc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": nil}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + }, + }, + { + name: "population desc", + sort: []string{ + buildSort([]string{"population"}, "desc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Dusseldorf"}, + map[string]interface{}{"name": "Rotterdam"}, + map[string]interface{}{"name": nil}, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, fmt.Sprintf(query, buildSortFilter(tt.sort))) + got := result.Get("Get", "City").AsSlice() + if !reflect.DeepEqual(got, tt.expected) { + t.Errorf("sort objects got = %v, want %v", got, tt.expected) + } + }) + } + }) + + t.Run("sort with nearText and limit (with distance)", func(t *testing.T) { + query := ` + { + Get { + City( + nearText: { + concepts: ["Berlin"] + distance: 0.6 + } + %s + limit: 2 + ) { + name + } + } + } + ` + tests := []struct { + name string + sort []string + expected []interface{} + }{ + { + name: "name asc", + sort: []string{ + buildSort([]string{"name"}, "asc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + }, + }, + { + name: "name desc", + sort: []string{ + buildSort([]string{"name"}, "desc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Amsterdam"}, + }, + }, + { + name: "population asc", + sort: []string{ + buildSort([]string{"population"}, "asc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + }, + }, + { + name: "population desc", + sort: []string{ + buildSort([]string{"population"}, "desc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Amsterdam"}, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, fmt.Sprintf(query, buildSortFilter(tt.sort))) + got := result.Get("Get", "City").AsSlice() + if !reflect.DeepEqual(got, tt.expected) { + t.Errorf("sort objects got = %v, want %v", got, tt.expected) + } + }) + } + }) + + t.Run("sort with nearText and limit (with certainty)", func(t *testing.T) { + query := ` + { + Get { + City( + nearText: { + concepts: ["Berlin"] + certainty: 0.7 + } + %s + limit: 2 + ) { + name + } + } + } + ` + tests := []struct { + name string + sort []string + expected []interface{} + }{ + { + name: "name asc", + sort: []string{ + buildSort([]string{"name"}, "asc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + }, + }, + { + name: "name desc", + sort: []string{ + buildSort([]string{"name"}, "desc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Amsterdam"}, + }, + }, + { + name: "population asc", + sort: []string{ + buildSort([]string{"population"}, "asc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + }, + }, + { + name: "population desc", + sort: []string{ + buildSort([]string{"population"}, "desc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Amsterdam"}, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, fmt.Sprintf(query, buildSortFilter(tt.sort))) + got := result.Get("Get", "City").AsSlice() + if !reflect.DeepEqual(got, tt.expected) { + t.Errorf("sort objects got = %v, want %v", got, tt.expected) + } + }) + } + }) + + t.Run("sort with where and nearText and limit (with distance)", func(t *testing.T) { + query := ` + { + Get { + City( + where: { + valueBoolean: true, + operator: Equal, + path: ["isCapital"] + } + nearText: { + concepts: ["Amsterdam"] + distance: 0.6 + } + %s + limit: 2 + ) { + name + } + } + } + ` + tests := []struct { + name string + sort []string + expected []interface{} + }{ + { + name: "name asc", + sort: []string{ + buildSort([]string{"name"}, "asc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + }, + }, + { + name: "name desc", + sort: []string{ + buildSort([]string{"name"}, "desc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Amsterdam"}, + }, + }, + { + name: "population asc", + sort: []string{ + buildSort([]string{"population"}, "asc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + }, + }, + { + name: "population desc", + sort: []string{ + buildSort([]string{"population"}, "desc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Amsterdam"}, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, fmt.Sprintf(query, buildSortFilter(tt.sort))) + got := result.Get("Get", "City").AsSlice() + if !reflect.DeepEqual(got, tt.expected) { + t.Errorf("sort objects got = %v, want %v", got, tt.expected) + } + }) + } + }) + + t.Run("sort with where and nearText and limit (with certainty)", func(t *testing.T) { + query := ` + { + Get { + City( + where: { + valueBoolean: true, + operator: Equal, + path: ["isCapital"] + } + nearText: { + concepts: ["Amsterdam"] + certainty: 0.7 + } + %s + limit: 2 + ) { + name + } + } + } + ` + tests := []struct { + name string + sort []string + expected []interface{} + }{ + { + name: "name asc", + sort: []string{ + buildSort([]string{"name"}, "asc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + }, + }, + { + name: "name desc", + sort: []string{ + buildSort([]string{"name"}, "desc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Amsterdam"}, + }, + }, + { + name: "population asc", + sort: []string{ + buildSort([]string{"population"}, "asc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Amsterdam"}, + map[string]interface{}{"name": "Berlin"}, + }, + }, + { + name: "population desc", + sort: []string{ + buildSort([]string{"population"}, "desc"), + }, + expected: []interface{}{ + map[string]interface{}{"name": "Berlin"}, + map[string]interface{}{"name": "Amsterdam"}, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, fmt.Sprintf(query, buildSortFilter(tt.sort))) + got := result.Get("Get", "City").AsSlice() + if !reflect.DeepEqual(got, tt.expected) { + t.Errorf("sort objects got = %v, want %v", got, tt.expected) + } + }) + } + }) + + t.Run("broken sort clause", func(t *testing.T) { + query := ` + { + Get { + %s( + %s + ) { + name + } + } + } + ` + tests := []struct { + name string + className string + sort []string + expectedMsg string + }{ + { + name: "empty path", + className: "City", + sort: []string{ + buildSort([]string{}, "asc"), + }, + expectedMsg: "invalid 'sort' parameter: sort parameter at position 0: " + + "path parameter cannot be empty", + }, + { + name: "empty property in path", + className: "City", + sort: []string{ + buildSort([]string{""}, "asc"), + }, + expectedMsg: "invalid 'sort' parameter: sort parameter at position 0: " + + "no such prop with name '' found in class 'City' in the schema. " + + "Check your schema files for which properties in this class are available", + }, + { + name: "reference prop in path", + className: "City", + sort: []string{ + buildSort([]string{"ref", "prop"}, "asc"), + }, + expectedMsg: "invalid 'sort' parameter: sort parameter at position 0: " + + "sorting by reference not supported, path must have exactly one argument", + }, + { + name: "non-existent class", + className: "NonExistentClass", + sort: []string{ + buildSort([]string{"property"}, "asc"), + }, + expectedMsg: "Cannot query field \"NonExistentClass\" on type \"GetObjectsObj\".", + }, + { + name: "non-existent property", + className: "City", + sort: []string{ + buildSort([]string{"nonexistentproperty"}, "asc"), + }, + expectedMsg: "invalid 'sort' parameter: sort parameter at position 0: " + + "no such prop with name 'nonexistentproperty' found in class 'City' in the schema. " + + "Check your schema files for which properties in this class are available", + }, + { + name: "reference property", + className: "City", + sort: []string{ + buildSort([]string{"inCountry"}, "asc"), + }, + expectedMsg: "invalid 'sort' parameter: sort parameter at position 0: " + + "sorting by reference not supported, " + + "property \"inCountry\" is a ref prop to the class \"Country\"", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := graphqlhelper.ErrorGraphQL(t, helper.RootAuth, fmt.Sprintf(query, tt.className, buildSortFilter(tt.sort))) + for _, gqlError := range result { + assert.Equal(t, tt.expectedMsg, gqlError.Message) + } + }) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_unlimited_vector_search_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_unlimited_vector_search_test.go new file mode 100644 index 0000000000000000000000000000000000000000..e6fb7cb4ab9c60925a58072fde4c8eb35881068f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/local_get_with_unlimited_vector_search_test.go @@ -0,0 +1,678 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "fmt" + "testing" + + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/test/helper" +) + +func gettingObjectsWithNearFields(t *testing.T) { + defaultLimit := 100 + + // nearVector + + t.Run("nearVector: with implicit unlimited search - no limit provided (with distance)", func(t *testing.T) { + query := ` + { + Get { + RansomNote( + nearVector: { + distance: 1.8 + vector: [-0.07853702, -0.33730024, 0.62998116, 0.08703484, -0.0011832615, 0.23041481, -0.091878965, 0.1184808, 0.060692377, 0.1748896, 0.53659165, 0.12019079, 0.54373807, -0.43369776, 0.1843199, -0.19319294, 0.122559674, -0.09465141, -0.14069664, 0.031092037, -0.1763922, 0.0074394196, -0.2586067, 0.10990611, -0.18623954, -0.038631044, -0.22795723, 0.09296776, -0.31110525, -0.37963995, -0.19045947, 0.48089907, 0.46725857, 0.28541213, 0.08294283, -0.18865398, 0.09647029, 0.2321466, -0.03435125, -0.09602424, -0.3831683, -0.027315892, 0.4215511, -0.35865632, 0.41955224, 0.090477064, 0.29026023, -0.48485047, -0.24656451, -0.06478625, 0.07755969, -0.049564634, 0.026147474, -0.028342195, -0.035627227, 0.49309397, 0.3705331, 0.04615483, 0.14789912, -0.01220134, 0.300666, -0.246646, 0.0038986988, 0.16730541, 0.46581128, -0.04931062, 0.040290095, 0.32867354, -0.18300997, 0.30411696, -0.1969807, 0.4727539, -0.31915516, -0.32722718, 0.12694982, 0.22583495, -0.014532595, -0.14432396, 0.2469766, 0.14872919, -0.06750808, 0.06351308, -0.287756, -0.32118404, 0.25326216, 0.45288888, -0.36307186, 0.05369787, -0.3283361, 0.07754738, 0.38473788, -0.5120014, -0.3344492, -0.1102767, -0.16755687, -0.3575448, -0.2555015, -0.42976367, -0.2695758, 0.04068499, 0.591914, -0.008395256, 0.2613976, -0.51722556, -0.22581989, 0.036452737, 0.42190477, -0.256124, 0.25849363, -0.073515825, -0.08690646, 0.013338611, 0.14928678, 0.16371651, 0.111465625, -0.117571846, -0.44434816, 0.07603647, 0.4188736, -0.16967061, 0.040313836, 0.41924894, -0.36998197, 0.23204626, -0.23309743, -0.18061559, 1.0674918, -0.51468146, -0.37230963, 0.02214618, -0.5616187, -0.07473461, -0.3314831, -0.24691144, -0.34061712, -0.1511554, 0.33711013, 0.1055847, -0.047220375, -0.06317049, -0.22572862, -0.21646689, 0.090705894, 0.018562902, 0.020744732, -0.5342965, -0.23402104, -0.17680043, 0.1363935, -0.17916845, 0.37825805, -0.07233101, -0.28244817, 0.4055966, 0.19701958, 0.6236174, 0.078134544, 0.46439224, -0.60451704, 0.16722181, -0.20011653, 0.36931068, -0.39967215, 0.21178648, 0.47920865, -0.033521328, 0.57077545, -0.8003054, -0.4028354, 0.27799648, -0.23070334, 0.57747835, 0.49984616, -0.12409506, -0.26694623, -0.20168623, -0.19415514, -0.4626071, 0.10374411, 0.24878122, 0.47352287, -0.6494472, -0.26087105, 0.418008, -0.2789803, -0.60986733, -0.54914564, 0.4734504, 0.04347568, -0.13549352, 0.1530609, 0.085039385, -0.014595425, -0.1106091, 0.014441653, 0.14899726, -0.107090004, 0.03979257, 0.20897605, -0.040235993, 0.1928343, -0.048328623, 0.5435577, -0.1704212, -0.016530415, 0.11402996, 0.24666561, -0.62601864, 0.6729872, -0.21594357, -0.3161654, 0.2899072, -0.05281632, 0.026857251, 0.13927892, 0.26362655, 0.37995058, -0.056429606, 0.27310744, -0.34237143, -0.6419976, -0.02513231, -0.18217334, 0.021232722, -0.35155025, 0.055071075, -0.22192729, 0.4597671, 0.09872845, -0.41803727, -0.08897542, -0.63276047, 0.38059604, 0.45347637, 0.52723855, 0.25096536, -0.3165448, 0.43803728, 0.02419832, 0.317004, -0.059602205, 0.15561013, 0.11867607, 0.7157601, 0.08024589, -0.013107148, 0.3127224, -0.08844044, 0.5374578, 0.39421698, -0.054171022, 0.0913302, -0.081881694, 0.24596375, -0.2841653, -0.5482517, -0.5673938, 0.05889957, -0.1146344, 0.39452744, -0.03414711, 0.32027423, 0.2599335, -0.31470263, 0.45967287, -0.5710101, -0.21222454, 0.38154987, -0.21218868, -0.4366558, 0.13715877, 0.23925674, 0.34832072, -0.03769251, 0.25530148, 0.10662722, -0.5269836, 0.32952255, 0.46165445, 0.3794754, -0.061259665, 0.02883365, -0.3199015, 0.40625557, -0.3794913, 0.42420092, 0.4631467, 0.54236996, 0.031472385, 0.2635622, -0.25566247, -0.040713936, 0.48734123, 0.2742017, -0.15524681, 0.025654443, 0.056942068, -0.48883253, 0.60433495, 0.03514151] + } + ) { + _additional { + vector + } + contents + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.NotEmpty(t, notes) + require.Greater(t, len(notes), defaultLimit) + }) + + t.Run("nearVector: with implicit unlimited search - no limit provided (with certainty)", func(t *testing.T) { + query := ` + { + Get { + RansomNote( + nearVector: { + certainty: 0.1 + vector: [-0.07853702, -0.33730024, 0.62998116, 0.08703484, -0.0011832615, 0.23041481, -0.091878965, 0.1184808, 0.060692377, 0.1748896, 0.53659165, 0.12019079, 0.54373807, -0.43369776, 0.1843199, -0.19319294, 0.122559674, -0.09465141, -0.14069664, 0.031092037, -0.1763922, 0.0074394196, -0.2586067, 0.10990611, -0.18623954, -0.038631044, -0.22795723, 0.09296776, -0.31110525, -0.37963995, -0.19045947, 0.48089907, 0.46725857, 0.28541213, 0.08294283, -0.18865398, 0.09647029, 0.2321466, -0.03435125, -0.09602424, -0.3831683, -0.027315892, 0.4215511, -0.35865632, 0.41955224, 0.090477064, 0.29026023, -0.48485047, -0.24656451, -0.06478625, 0.07755969, -0.049564634, 0.026147474, -0.028342195, -0.035627227, 0.49309397, 0.3705331, 0.04615483, 0.14789912, -0.01220134, 0.300666, -0.246646, 0.0038986988, 0.16730541, 0.46581128, -0.04931062, 0.040290095, 0.32867354, -0.18300997, 0.30411696, -0.1969807, 0.4727539, -0.31915516, -0.32722718, 0.12694982, 0.22583495, -0.014532595, -0.14432396, 0.2469766, 0.14872919, -0.06750808, 0.06351308, -0.287756, -0.32118404, 0.25326216, 0.45288888, -0.36307186, 0.05369787, -0.3283361, 0.07754738, 0.38473788, -0.5120014, -0.3344492, -0.1102767, -0.16755687, -0.3575448, -0.2555015, -0.42976367, -0.2695758, 0.04068499, 0.591914, -0.008395256, 0.2613976, -0.51722556, -0.22581989, 0.036452737, 0.42190477, -0.256124, 0.25849363, -0.073515825, -0.08690646, 0.013338611, 0.14928678, 0.16371651, 0.111465625, -0.117571846, -0.44434816, 0.07603647, 0.4188736, -0.16967061, 0.040313836, 0.41924894, -0.36998197, 0.23204626, -0.23309743, -0.18061559, 1.0674918, -0.51468146, -0.37230963, 0.02214618, -0.5616187, -0.07473461, -0.3314831, -0.24691144, -0.34061712, -0.1511554, 0.33711013, 0.1055847, -0.047220375, -0.06317049, -0.22572862, -0.21646689, 0.090705894, 0.018562902, 0.020744732, -0.5342965, -0.23402104, -0.17680043, 0.1363935, -0.17916845, 0.37825805, -0.07233101, -0.28244817, 0.4055966, 0.19701958, 0.6236174, 0.078134544, 0.46439224, -0.60451704, 0.16722181, -0.20011653, 0.36931068, -0.39967215, 0.21178648, 0.47920865, -0.033521328, 0.57077545, -0.8003054, -0.4028354, 0.27799648, -0.23070334, 0.57747835, 0.49984616, -0.12409506, -0.26694623, -0.20168623, -0.19415514, -0.4626071, 0.10374411, 0.24878122, 0.47352287, -0.6494472, -0.26087105, 0.418008, -0.2789803, -0.60986733, -0.54914564, 0.4734504, 0.04347568, -0.13549352, 0.1530609, 0.085039385, -0.014595425, -0.1106091, 0.014441653, 0.14899726, -0.107090004, 0.03979257, 0.20897605, -0.040235993, 0.1928343, -0.048328623, 0.5435577, -0.1704212, -0.016530415, 0.11402996, 0.24666561, -0.62601864, 0.6729872, -0.21594357, -0.3161654, 0.2899072, -0.05281632, 0.026857251, 0.13927892, 0.26362655, 0.37995058, -0.056429606, 0.27310744, -0.34237143, -0.6419976, -0.02513231, -0.18217334, 0.021232722, -0.35155025, 0.055071075, -0.22192729, 0.4597671, 0.09872845, -0.41803727, -0.08897542, -0.63276047, 0.38059604, 0.45347637, 0.52723855, 0.25096536, -0.3165448, 0.43803728, 0.02419832, 0.317004, -0.059602205, 0.15561013, 0.11867607, 0.7157601, 0.08024589, -0.013107148, 0.3127224, -0.08844044, 0.5374578, 0.39421698, -0.054171022, 0.0913302, -0.081881694, 0.24596375, -0.2841653, -0.5482517, -0.5673938, 0.05889957, -0.1146344, 0.39452744, -0.03414711, 0.32027423, 0.2599335, -0.31470263, 0.45967287, -0.5710101, -0.21222454, 0.38154987, -0.21218868, -0.4366558, 0.13715877, 0.23925674, 0.34832072, -0.03769251, 0.25530148, 0.10662722, -0.5269836, 0.32952255, 0.46165445, 0.3794754, -0.061259665, 0.02883365, -0.3199015, 0.40625557, -0.3794913, 0.42420092, 0.4631467, 0.54236996, 0.031472385, 0.2635622, -0.25566247, -0.040713936, 0.48734123, 0.2742017, -0.15524681, 0.025654443, 0.056942068, -0.48883253, 0.60433495, 0.03514151] + } + ) { + _additional { + vector + } + contents + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.NotEmpty(t, notes) + require.Greater(t, len(notes), defaultLimit) + }) + + t.Run("nearVector: with implicit unlimited search - negative limit provided (with distance)", func(t *testing.T) { + query := ` + { + Get { + RansomNote( + limit: -1 + nearVector: { + distance: 0.9 + vector: [-0.07853702, -0.33730024, 0.62998116, 0.08703484, -0.0011832615, 0.23041481, -0.091878965, 0.1184808, 0.060692377, 0.1748896, 0.53659165, 0.12019079, 0.54373807, -0.43369776, 0.1843199, -0.19319294, 0.122559674, -0.09465141, -0.14069664, 0.031092037, -0.1763922, 0.0074394196, -0.2586067, 0.10990611, -0.18623954, -0.038631044, -0.22795723, 0.09296776, -0.31110525, -0.37963995, -0.19045947, 0.48089907, 0.46725857, 0.28541213, 0.08294283, -0.18865398, 0.09647029, 0.2321466, -0.03435125, -0.09602424, -0.3831683, -0.027315892, 0.4215511, -0.35865632, 0.41955224, 0.090477064, 0.29026023, -0.48485047, -0.24656451, -0.06478625, 0.07755969, -0.049564634, 0.026147474, -0.028342195, -0.035627227, 0.49309397, 0.3705331, 0.04615483, 0.14789912, -0.01220134, 0.300666, -0.246646, 0.0038986988, 0.16730541, 0.46581128, -0.04931062, 0.040290095, 0.32867354, -0.18300997, 0.30411696, -0.1969807, 0.4727539, -0.31915516, -0.32722718, 0.12694982, 0.22583495, -0.014532595, -0.14432396, 0.2469766, 0.14872919, -0.06750808, 0.06351308, -0.287756, -0.32118404, 0.25326216, 0.45288888, -0.36307186, 0.05369787, -0.3283361, 0.07754738, 0.38473788, -0.5120014, -0.3344492, -0.1102767, -0.16755687, -0.3575448, -0.2555015, -0.42976367, -0.2695758, 0.04068499, 0.591914, -0.008395256, 0.2613976, -0.51722556, -0.22581989, 0.036452737, 0.42190477, -0.256124, 0.25849363, -0.073515825, -0.08690646, 0.013338611, 0.14928678, 0.16371651, 0.111465625, -0.117571846, -0.44434816, 0.07603647, 0.4188736, -0.16967061, 0.040313836, 0.41924894, -0.36998197, 0.23204626, -0.23309743, -0.18061559, 1.0674918, -0.51468146, -0.37230963, 0.02214618, -0.5616187, -0.07473461, -0.3314831, -0.24691144, -0.34061712, -0.1511554, 0.33711013, 0.1055847, -0.047220375, -0.06317049, -0.22572862, -0.21646689, 0.090705894, 0.018562902, 0.020744732, -0.5342965, -0.23402104, -0.17680043, 0.1363935, -0.17916845, 0.37825805, -0.07233101, -0.28244817, 0.4055966, 0.19701958, 0.6236174, 0.078134544, 0.46439224, -0.60451704, 0.16722181, -0.20011653, 0.36931068, -0.39967215, 0.21178648, 0.47920865, -0.033521328, 0.57077545, -0.8003054, -0.4028354, 0.27799648, -0.23070334, 0.57747835, 0.49984616, -0.12409506, -0.26694623, -0.20168623, -0.19415514, -0.4626071, 0.10374411, 0.24878122, 0.47352287, -0.6494472, -0.26087105, 0.418008, -0.2789803, -0.60986733, -0.54914564, 0.4734504, 0.04347568, -0.13549352, 0.1530609, 0.085039385, -0.014595425, -0.1106091, 0.014441653, 0.14899726, -0.107090004, 0.03979257, 0.20897605, -0.040235993, 0.1928343, -0.048328623, 0.5435577, -0.1704212, -0.016530415, 0.11402996, 0.24666561, -0.62601864, 0.6729872, -0.21594357, -0.3161654, 0.2899072, -0.05281632, 0.026857251, 0.13927892, 0.26362655, 0.37995058, -0.056429606, 0.27310744, -0.34237143, -0.6419976, -0.02513231, -0.18217334, 0.021232722, -0.35155025, 0.055071075, -0.22192729, 0.4597671, 0.09872845, -0.41803727, -0.08897542, -0.63276047, 0.38059604, 0.45347637, 0.52723855, 0.25096536, -0.3165448, 0.43803728, 0.02419832, 0.317004, -0.059602205, 0.15561013, 0.11867607, 0.7157601, 0.08024589, -0.013107148, 0.3127224, -0.08844044, 0.5374578, 0.39421698, -0.054171022, 0.0913302, -0.081881694, 0.24596375, -0.2841653, -0.5482517, -0.5673938, 0.05889957, -0.1146344, 0.39452744, -0.03414711, 0.32027423, 0.2599335, -0.31470263, 0.45967287, -0.5710101, -0.21222454, 0.38154987, -0.21218868, -0.4366558, 0.13715877, 0.23925674, 0.34832072, -0.03769251, 0.25530148, 0.10662722, -0.5269836, 0.32952255, 0.46165445, 0.3794754, -0.061259665, 0.02883365, -0.3199015, 0.40625557, -0.3794913, 0.42420092, 0.4631467, 0.54236996, 0.031472385, 0.2635622, -0.25566247, -0.040713936, 0.48734123, 0.2742017, -0.15524681, 0.025654443, 0.056942068, -0.48883253, 0.60433495, 0.03514151] + } + ) { + _additional { + vector + } + contents + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.NotEmpty(t, notes) + require.Greater(t, len(notes), defaultLimit) + }) + + t.Run("nearVector: with implicit unlimited search - negative limit provided (with certainty)", func(t *testing.T) { + query := ` + { + Get { + RansomNote( + limit: -1 + nearVector: { + certainty: 0.1 + vector: [-0.07853702, -0.33730024, 0.62998116, 0.08703484, -0.0011832615, 0.23041481, -0.091878965, 0.1184808, 0.060692377, 0.1748896, 0.53659165, 0.12019079, 0.54373807, -0.43369776, 0.1843199, -0.19319294, 0.122559674, -0.09465141, -0.14069664, 0.031092037, -0.1763922, 0.0074394196, -0.2586067, 0.10990611, -0.18623954, -0.038631044, -0.22795723, 0.09296776, -0.31110525, -0.37963995, -0.19045947, 0.48089907, 0.46725857, 0.28541213, 0.08294283, -0.18865398, 0.09647029, 0.2321466, -0.03435125, -0.09602424, -0.3831683, -0.027315892, 0.4215511, -0.35865632, 0.41955224, 0.090477064, 0.29026023, -0.48485047, -0.24656451, -0.06478625, 0.07755969, -0.049564634, 0.026147474, -0.028342195, -0.035627227, 0.49309397, 0.3705331, 0.04615483, 0.14789912, -0.01220134, 0.300666, -0.246646, 0.0038986988, 0.16730541, 0.46581128, -0.04931062, 0.040290095, 0.32867354, -0.18300997, 0.30411696, -0.1969807, 0.4727539, -0.31915516, -0.32722718, 0.12694982, 0.22583495, -0.014532595, -0.14432396, 0.2469766, 0.14872919, -0.06750808, 0.06351308, -0.287756, -0.32118404, 0.25326216, 0.45288888, -0.36307186, 0.05369787, -0.3283361, 0.07754738, 0.38473788, -0.5120014, -0.3344492, -0.1102767, -0.16755687, -0.3575448, -0.2555015, -0.42976367, -0.2695758, 0.04068499, 0.591914, -0.008395256, 0.2613976, -0.51722556, -0.22581989, 0.036452737, 0.42190477, -0.256124, 0.25849363, -0.073515825, -0.08690646, 0.013338611, 0.14928678, 0.16371651, 0.111465625, -0.117571846, -0.44434816, 0.07603647, 0.4188736, -0.16967061, 0.040313836, 0.41924894, -0.36998197, 0.23204626, -0.23309743, -0.18061559, 1.0674918, -0.51468146, -0.37230963, 0.02214618, -0.5616187, -0.07473461, -0.3314831, -0.24691144, -0.34061712, -0.1511554, 0.33711013, 0.1055847, -0.047220375, -0.06317049, -0.22572862, -0.21646689, 0.090705894, 0.018562902, 0.020744732, -0.5342965, -0.23402104, -0.17680043, 0.1363935, -0.17916845, 0.37825805, -0.07233101, -0.28244817, 0.4055966, 0.19701958, 0.6236174, 0.078134544, 0.46439224, -0.60451704, 0.16722181, -0.20011653, 0.36931068, -0.39967215, 0.21178648, 0.47920865, -0.033521328, 0.57077545, -0.8003054, -0.4028354, 0.27799648, -0.23070334, 0.57747835, 0.49984616, -0.12409506, -0.26694623, -0.20168623, -0.19415514, -0.4626071, 0.10374411, 0.24878122, 0.47352287, -0.6494472, -0.26087105, 0.418008, -0.2789803, -0.60986733, -0.54914564, 0.4734504, 0.04347568, -0.13549352, 0.1530609, 0.085039385, -0.014595425, -0.1106091, 0.014441653, 0.14899726, -0.107090004, 0.03979257, 0.20897605, -0.040235993, 0.1928343, -0.048328623, 0.5435577, -0.1704212, -0.016530415, 0.11402996, 0.24666561, -0.62601864, 0.6729872, -0.21594357, -0.3161654, 0.2899072, -0.05281632, 0.026857251, 0.13927892, 0.26362655, 0.37995058, -0.056429606, 0.27310744, -0.34237143, -0.6419976, -0.02513231, -0.18217334, 0.021232722, -0.35155025, 0.055071075, -0.22192729, 0.4597671, 0.09872845, -0.41803727, -0.08897542, -0.63276047, 0.38059604, 0.45347637, 0.52723855, 0.25096536, -0.3165448, 0.43803728, 0.02419832, 0.317004, -0.059602205, 0.15561013, 0.11867607, 0.7157601, 0.08024589, -0.013107148, 0.3127224, -0.08844044, 0.5374578, 0.39421698, -0.054171022, 0.0913302, -0.081881694, 0.24596375, -0.2841653, -0.5482517, -0.5673938, 0.05889957, -0.1146344, 0.39452744, -0.03414711, 0.32027423, 0.2599335, -0.31470263, 0.45967287, -0.5710101, -0.21222454, 0.38154987, -0.21218868, -0.4366558, 0.13715877, 0.23925674, 0.34832072, -0.03769251, 0.25530148, 0.10662722, -0.5269836, 0.32952255, 0.46165445, 0.3794754, -0.061259665, 0.02883365, -0.3199015, 0.40625557, -0.3794913, 0.42420092, 0.4631467, 0.54236996, 0.031472385, 0.2635622, -0.25566247, -0.040713936, 0.48734123, 0.2742017, -0.15524681, 0.025654443, 0.056942068, -0.48883253, 0.60433495, 0.03514151] + } + ) { + _additional { + vector + } + contents + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.NotEmpty(t, notes) + require.Greater(t, len(notes), defaultLimit) + }) + + t.Run("nearVector: with limited search - limit provided (with distance)", func(t *testing.T) { + query := ` + { + Get { + RansomNote( + limit: 10 + nearVector: { + distance: 0.9 + vector: [-0.07853702, -0.33730024, 0.62998116, 0.08703484, -0.0011832615, 0.23041481, -0.091878965, 0.1184808, 0.060692377, 0.1748896, 0.53659165, 0.12019079, 0.54373807, -0.43369776, 0.1843199, -0.19319294, 0.122559674, -0.09465141, -0.14069664, 0.031092037, -0.1763922, 0.0074394196, -0.2586067, 0.10990611, -0.18623954, -0.038631044, -0.22795723, 0.09296776, -0.31110525, -0.37963995, -0.19045947, 0.48089907, 0.46725857, 0.28541213, 0.08294283, -0.18865398, 0.09647029, 0.2321466, -0.03435125, -0.09602424, -0.3831683, -0.027315892, 0.4215511, -0.35865632, 0.41955224, 0.090477064, 0.29026023, -0.48485047, -0.24656451, -0.06478625, 0.07755969, -0.049564634, 0.026147474, -0.028342195, -0.035627227, 0.49309397, 0.3705331, 0.04615483, 0.14789912, -0.01220134, 0.300666, -0.246646, 0.0038986988, 0.16730541, 0.46581128, -0.04931062, 0.040290095, 0.32867354, -0.18300997, 0.30411696, -0.1969807, 0.4727539, -0.31915516, -0.32722718, 0.12694982, 0.22583495, -0.014532595, -0.14432396, 0.2469766, 0.14872919, -0.06750808, 0.06351308, -0.287756, -0.32118404, 0.25326216, 0.45288888, -0.36307186, 0.05369787, -0.3283361, 0.07754738, 0.38473788, -0.5120014, -0.3344492, -0.1102767, -0.16755687, -0.3575448, -0.2555015, -0.42976367, -0.2695758, 0.04068499, 0.591914, -0.008395256, 0.2613976, -0.51722556, -0.22581989, 0.036452737, 0.42190477, -0.256124, 0.25849363, -0.073515825, -0.08690646, 0.013338611, 0.14928678, 0.16371651, 0.111465625, -0.117571846, -0.44434816, 0.07603647, 0.4188736, -0.16967061, 0.040313836, 0.41924894, -0.36998197, 0.23204626, -0.23309743, -0.18061559, 1.0674918, -0.51468146, -0.37230963, 0.02214618, -0.5616187, -0.07473461, -0.3314831, -0.24691144, -0.34061712, -0.1511554, 0.33711013, 0.1055847, -0.047220375, -0.06317049, -0.22572862, -0.21646689, 0.090705894, 0.018562902, 0.020744732, -0.5342965, -0.23402104, -0.17680043, 0.1363935, -0.17916845, 0.37825805, -0.07233101, -0.28244817, 0.4055966, 0.19701958, 0.6236174, 0.078134544, 0.46439224, -0.60451704, 0.16722181, -0.20011653, 0.36931068, -0.39967215, 0.21178648, 0.47920865, -0.033521328, 0.57077545, -0.8003054, -0.4028354, 0.27799648, -0.23070334, 0.57747835, 0.49984616, -0.12409506, -0.26694623, -0.20168623, -0.19415514, -0.4626071, 0.10374411, 0.24878122, 0.47352287, -0.6494472, -0.26087105, 0.418008, -0.2789803, -0.60986733, -0.54914564, 0.4734504, 0.04347568, -0.13549352, 0.1530609, 0.085039385, -0.014595425, -0.1106091, 0.014441653, 0.14899726, -0.107090004, 0.03979257, 0.20897605, -0.040235993, 0.1928343, -0.048328623, 0.5435577, -0.1704212, -0.016530415, 0.11402996, 0.24666561, -0.62601864, 0.6729872, -0.21594357, -0.3161654, 0.2899072, -0.05281632, 0.026857251, 0.13927892, 0.26362655, 0.37995058, -0.056429606, 0.27310744, -0.34237143, -0.6419976, -0.02513231, -0.18217334, 0.021232722, -0.35155025, 0.055071075, -0.22192729, 0.4597671, 0.09872845, -0.41803727, -0.08897542, -0.63276047, 0.38059604, 0.45347637, 0.52723855, 0.25096536, -0.3165448, 0.43803728, 0.02419832, 0.317004, -0.059602205, 0.15561013, 0.11867607, 0.7157601, 0.08024589, -0.013107148, 0.3127224, -0.08844044, 0.5374578, 0.39421698, -0.054171022, 0.0913302, -0.081881694, 0.24596375, -0.2841653, -0.5482517, -0.5673938, 0.05889957, -0.1146344, 0.39452744, -0.03414711, 0.32027423, 0.2599335, -0.31470263, 0.45967287, -0.5710101, -0.21222454, 0.38154987, -0.21218868, -0.4366558, 0.13715877, 0.23925674, 0.34832072, -0.03769251, 0.25530148, 0.10662722, -0.5269836, 0.32952255, 0.46165445, 0.3794754, -0.061259665, 0.02883365, -0.3199015, 0.40625557, -0.3794913, 0.42420092, 0.4631467, 0.54236996, 0.031472385, 0.2635622, -0.25566247, -0.040713936, 0.48734123, 0.2742017, -0.15524681, 0.025654443, 0.056942068, -0.48883253, 0.60433495, 0.03514151] + } + ) { + _additional { + vector + } + contents + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.NotEmpty(t, notes) + require.Equal(t, 10, len(notes)) + }) + + t.Run("nearVector: with limited search - limit provided (with certainty)", func(t *testing.T) { + query := ` + { + Get { + RansomNote( + limit: 10 + nearVector: { + certainty: 0.1 + vector: [-0.07853702, -0.33730024, 0.62998116, 0.08703484, -0.0011832615, 0.23041481, -0.091878965, 0.1184808, 0.060692377, 0.1748896, 0.53659165, 0.12019079, 0.54373807, -0.43369776, 0.1843199, -0.19319294, 0.122559674, -0.09465141, -0.14069664, 0.031092037, -0.1763922, 0.0074394196, -0.2586067, 0.10990611, -0.18623954, -0.038631044, -0.22795723, 0.09296776, -0.31110525, -0.37963995, -0.19045947, 0.48089907, 0.46725857, 0.28541213, 0.08294283, -0.18865398, 0.09647029, 0.2321466, -0.03435125, -0.09602424, -0.3831683, -0.027315892, 0.4215511, -0.35865632, 0.41955224, 0.090477064, 0.29026023, -0.48485047, -0.24656451, -0.06478625, 0.07755969, -0.049564634, 0.026147474, -0.028342195, -0.035627227, 0.49309397, 0.3705331, 0.04615483, 0.14789912, -0.01220134, 0.300666, -0.246646, 0.0038986988, 0.16730541, 0.46581128, -0.04931062, 0.040290095, 0.32867354, -0.18300997, 0.30411696, -0.1969807, 0.4727539, -0.31915516, -0.32722718, 0.12694982, 0.22583495, -0.014532595, -0.14432396, 0.2469766, 0.14872919, -0.06750808, 0.06351308, -0.287756, -0.32118404, 0.25326216, 0.45288888, -0.36307186, 0.05369787, -0.3283361, 0.07754738, 0.38473788, -0.5120014, -0.3344492, -0.1102767, -0.16755687, -0.3575448, -0.2555015, -0.42976367, -0.2695758, 0.04068499, 0.591914, -0.008395256, 0.2613976, -0.51722556, -0.22581989, 0.036452737, 0.42190477, -0.256124, 0.25849363, -0.073515825, -0.08690646, 0.013338611, 0.14928678, 0.16371651, 0.111465625, -0.117571846, -0.44434816, 0.07603647, 0.4188736, -0.16967061, 0.040313836, 0.41924894, -0.36998197, 0.23204626, -0.23309743, -0.18061559, 1.0674918, -0.51468146, -0.37230963, 0.02214618, -0.5616187, -0.07473461, -0.3314831, -0.24691144, -0.34061712, -0.1511554, 0.33711013, 0.1055847, -0.047220375, -0.06317049, -0.22572862, -0.21646689, 0.090705894, 0.018562902, 0.020744732, -0.5342965, -0.23402104, -0.17680043, 0.1363935, -0.17916845, 0.37825805, -0.07233101, -0.28244817, 0.4055966, 0.19701958, 0.6236174, 0.078134544, 0.46439224, -0.60451704, 0.16722181, -0.20011653, 0.36931068, -0.39967215, 0.21178648, 0.47920865, -0.033521328, 0.57077545, -0.8003054, -0.4028354, 0.27799648, -0.23070334, 0.57747835, 0.49984616, -0.12409506, -0.26694623, -0.20168623, -0.19415514, -0.4626071, 0.10374411, 0.24878122, 0.47352287, -0.6494472, -0.26087105, 0.418008, -0.2789803, -0.60986733, -0.54914564, 0.4734504, 0.04347568, -0.13549352, 0.1530609, 0.085039385, -0.014595425, -0.1106091, 0.014441653, 0.14899726, -0.107090004, 0.03979257, 0.20897605, -0.040235993, 0.1928343, -0.048328623, 0.5435577, -0.1704212, -0.016530415, 0.11402996, 0.24666561, -0.62601864, 0.6729872, -0.21594357, -0.3161654, 0.2899072, -0.05281632, 0.026857251, 0.13927892, 0.26362655, 0.37995058, -0.056429606, 0.27310744, -0.34237143, -0.6419976, -0.02513231, -0.18217334, 0.021232722, -0.35155025, 0.055071075, -0.22192729, 0.4597671, 0.09872845, -0.41803727, -0.08897542, -0.63276047, 0.38059604, 0.45347637, 0.52723855, 0.25096536, -0.3165448, 0.43803728, 0.02419832, 0.317004, -0.059602205, 0.15561013, 0.11867607, 0.7157601, 0.08024589, -0.013107148, 0.3127224, -0.08844044, 0.5374578, 0.39421698, -0.054171022, 0.0913302, -0.081881694, 0.24596375, -0.2841653, -0.5482517, -0.5673938, 0.05889957, -0.1146344, 0.39452744, -0.03414711, 0.32027423, 0.2599335, -0.31470263, 0.45967287, -0.5710101, -0.21222454, 0.38154987, -0.21218868, -0.4366558, 0.13715877, 0.23925674, 0.34832072, -0.03769251, 0.25530148, 0.10662722, -0.5269836, 0.32952255, 0.46165445, 0.3794754, -0.061259665, 0.02883365, -0.3199015, 0.40625557, -0.3794913, 0.42420092, 0.4631467, 0.54236996, 0.031472385, 0.2635622, -0.25566247, -0.040713936, 0.48734123, 0.2742017, -0.15524681, 0.025654443, 0.056942068, -0.48883253, 0.60433495, 0.03514151] + } + ) { + _additional { + vector + } + contents + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.NotEmpty(t, notes) + require.Equal(t, 10, len(notes)) + }) + + t.Run("nearVector: results limited by distance", func(t *testing.T) { + query := ` + { + Get { + RansomNote( + nearVector: { + distance: 0.01 + vector: [-0.07853702, -0.33730024, 0.62998116, 0.08703484, -0.0011832615, 0.23041481, -0.091878965, 0.1184808, 0.060692377, 0.1748896, 0.53659165, 0.12019079, 0.54373807, -0.43369776, 0.1843199, -0.19319294, 0.122559674, -0.09465141, -0.14069664, 0.031092037, -0.1763922, 0.0074394196, -0.2586067, 0.10990611, -0.18623954, -0.038631044, -0.22795723, 0.09296776, -0.31110525, -0.37963995, -0.19045947, 0.48089907, 0.46725857, 0.28541213, 0.08294283, -0.18865398, 0.09647029, 0.2321466, -0.03435125, -0.09602424, -0.3831683, -0.027315892, 0.4215511, -0.35865632, 0.41955224, 0.090477064, 0.29026023, -0.48485047, -0.24656451, -0.06478625, 0.07755969, -0.049564634, 0.026147474, -0.028342195, -0.035627227, 0.49309397, 0.3705331, 0.04615483, 0.14789912, -0.01220134, 0.300666, -0.246646, 0.0038986988, 0.16730541, 0.46581128, -0.04931062, 0.040290095, 0.32867354, -0.18300997, 0.30411696, -0.1969807, 0.4727539, -0.31915516, -0.32722718, 0.12694982, 0.22583495, -0.014532595, -0.14432396, 0.2469766, 0.14872919, -0.06750808, 0.06351308, -0.287756, -0.32118404, 0.25326216, 0.45288888, -0.36307186, 0.05369787, -0.3283361, 0.07754738, 0.38473788, -0.5120014, -0.3344492, -0.1102767, -0.16755687, -0.3575448, -0.2555015, -0.42976367, -0.2695758, 0.04068499, 0.591914, -0.008395256, 0.2613976, -0.51722556, -0.22581989, 0.036452737, 0.42190477, -0.256124, 0.25849363, -0.073515825, -0.08690646, 0.013338611, 0.14928678, 0.16371651, 0.111465625, -0.117571846, -0.44434816, 0.07603647, 0.4188736, -0.16967061, 0.040313836, 0.41924894, -0.36998197, 0.23204626, -0.23309743, -0.18061559, 1.0674918, -0.51468146, -0.37230963, 0.02214618, -0.5616187, -0.07473461, -0.3314831, -0.24691144, -0.34061712, -0.1511554, 0.33711013, 0.1055847, -0.047220375, -0.06317049, -0.22572862, -0.21646689, 0.090705894, 0.018562902, 0.020744732, -0.5342965, -0.23402104, -0.17680043, 0.1363935, -0.17916845, 0.37825805, -0.07233101, -0.28244817, 0.4055966, 0.19701958, 0.6236174, 0.078134544, 0.46439224, -0.60451704, 0.16722181, -0.20011653, 0.36931068, -0.39967215, 0.21178648, 0.47920865, -0.033521328, 0.57077545, -0.8003054, -0.4028354, 0.27799648, -0.23070334, 0.57747835, 0.49984616, -0.12409506, -0.26694623, -0.20168623, -0.19415514, -0.4626071, 0.10374411, 0.24878122, 0.47352287, -0.6494472, -0.26087105, 0.418008, -0.2789803, -0.60986733, -0.54914564, 0.4734504, 0.04347568, -0.13549352, 0.1530609, 0.085039385, -0.014595425, -0.1106091, 0.014441653, 0.14899726, -0.107090004, 0.03979257, 0.20897605, -0.040235993, 0.1928343, -0.048328623, 0.5435577, -0.1704212, -0.016530415, 0.11402996, 0.24666561, -0.62601864, 0.6729872, -0.21594357, -0.3161654, 0.2899072, -0.05281632, 0.026857251, 0.13927892, 0.26362655, 0.37995058, -0.056429606, 0.27310744, -0.34237143, -0.6419976, -0.02513231, -0.18217334, 0.021232722, -0.35155025, 0.055071075, -0.22192729, 0.4597671, 0.09872845, -0.41803727, -0.08897542, -0.63276047, 0.38059604, 0.45347637, 0.52723855, 0.25096536, -0.3165448, 0.43803728, 0.02419832, 0.317004, -0.059602205, 0.15561013, 0.11867607, 0.7157601, 0.08024589, -0.013107148, 0.3127224, -0.08844044, 0.5374578, 0.39421698, -0.054171022, 0.0913302, -0.081881694, 0.24596375, -0.2841653, -0.5482517, -0.5673938, 0.05889957, -0.1146344, 0.39452744, -0.03414711, 0.32027423, 0.2599335, -0.31470263, 0.45967287, -0.5710101, -0.21222454, 0.38154987, -0.21218868, -0.4366558, 0.13715877, 0.23925674, 0.34832072, -0.03769251, 0.25530148, 0.10662722, -0.5269836, 0.32952255, 0.46165445, 0.3794754, -0.061259665, 0.02883365, -0.3199015, 0.40625557, -0.3794913, 0.42420092, 0.4631467, 0.54236996, 0.031472385, 0.2635622, -0.25566247, -0.040713936, 0.48734123, 0.2742017, -0.15524681, 0.025654443, 0.056942068, -0.48883253, 0.60433495, 0.03514151] + } + ) { + _additional { + id + } + contents + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.NotEmpty(t, notes) + require.Less(t, len(notes), defaultLimit) + }) + + t.Run("nearVector: results limited by certainty", func(t *testing.T) { + query := ` + { + Get { + RansomNote( + nearVector: { + certainty: 0.99 + vector: [-0.07853702, -0.33730024, 0.62998116, 0.08703484, -0.0011832615, 0.23041481, -0.091878965, 0.1184808, 0.060692377, 0.1748896, 0.53659165, 0.12019079, 0.54373807, -0.43369776, 0.1843199, -0.19319294, 0.122559674, -0.09465141, -0.14069664, 0.031092037, -0.1763922, 0.0074394196, -0.2586067, 0.10990611, -0.18623954, -0.038631044, -0.22795723, 0.09296776, -0.31110525, -0.37963995, -0.19045947, 0.48089907, 0.46725857, 0.28541213, 0.08294283, -0.18865398, 0.09647029, 0.2321466, -0.03435125, -0.09602424, -0.3831683, -0.027315892, 0.4215511, -0.35865632, 0.41955224, 0.090477064, 0.29026023, -0.48485047, -0.24656451, -0.06478625, 0.07755969, -0.049564634, 0.026147474, -0.028342195, -0.035627227, 0.49309397, 0.3705331, 0.04615483, 0.14789912, -0.01220134, 0.300666, -0.246646, 0.0038986988, 0.16730541, 0.46581128, -0.04931062, 0.040290095, 0.32867354, -0.18300997, 0.30411696, -0.1969807, 0.4727539, -0.31915516, -0.32722718, 0.12694982, 0.22583495, -0.014532595, -0.14432396, 0.2469766, 0.14872919, -0.06750808, 0.06351308, -0.287756, -0.32118404, 0.25326216, 0.45288888, -0.36307186, 0.05369787, -0.3283361, 0.07754738, 0.38473788, -0.5120014, -0.3344492, -0.1102767, -0.16755687, -0.3575448, -0.2555015, -0.42976367, -0.2695758, 0.04068499, 0.591914, -0.008395256, 0.2613976, -0.51722556, -0.22581989, 0.036452737, 0.42190477, -0.256124, 0.25849363, -0.073515825, -0.08690646, 0.013338611, 0.14928678, 0.16371651, 0.111465625, -0.117571846, -0.44434816, 0.07603647, 0.4188736, -0.16967061, 0.040313836, 0.41924894, -0.36998197, 0.23204626, -0.23309743, -0.18061559, 1.0674918, -0.51468146, -0.37230963, 0.02214618, -0.5616187, -0.07473461, -0.3314831, -0.24691144, -0.34061712, -0.1511554, 0.33711013, 0.1055847, -0.047220375, -0.06317049, -0.22572862, -0.21646689, 0.090705894, 0.018562902, 0.020744732, -0.5342965, -0.23402104, -0.17680043, 0.1363935, -0.17916845, 0.37825805, -0.07233101, -0.28244817, 0.4055966, 0.19701958, 0.6236174, 0.078134544, 0.46439224, -0.60451704, 0.16722181, -0.20011653, 0.36931068, -0.39967215, 0.21178648, 0.47920865, -0.033521328, 0.57077545, -0.8003054, -0.4028354, 0.27799648, -0.23070334, 0.57747835, 0.49984616, -0.12409506, -0.26694623, -0.20168623, -0.19415514, -0.4626071, 0.10374411, 0.24878122, 0.47352287, -0.6494472, -0.26087105, 0.418008, -0.2789803, -0.60986733, -0.54914564, 0.4734504, 0.04347568, -0.13549352, 0.1530609, 0.085039385, -0.014595425, -0.1106091, 0.014441653, 0.14899726, -0.107090004, 0.03979257, 0.20897605, -0.040235993, 0.1928343, -0.048328623, 0.5435577, -0.1704212, -0.016530415, 0.11402996, 0.24666561, -0.62601864, 0.6729872, -0.21594357, -0.3161654, 0.2899072, -0.05281632, 0.026857251, 0.13927892, 0.26362655, 0.37995058, -0.056429606, 0.27310744, -0.34237143, -0.6419976, -0.02513231, -0.18217334, 0.021232722, -0.35155025, 0.055071075, -0.22192729, 0.4597671, 0.09872845, -0.41803727, -0.08897542, -0.63276047, 0.38059604, 0.45347637, 0.52723855, 0.25096536, -0.3165448, 0.43803728, 0.02419832, 0.317004, -0.059602205, 0.15561013, 0.11867607, 0.7157601, 0.08024589, -0.013107148, 0.3127224, -0.08844044, 0.5374578, 0.39421698, -0.054171022, 0.0913302, -0.081881694, 0.24596375, -0.2841653, -0.5482517, -0.5673938, 0.05889957, -0.1146344, 0.39452744, -0.03414711, 0.32027423, 0.2599335, -0.31470263, 0.45967287, -0.5710101, -0.21222454, 0.38154987, -0.21218868, -0.4366558, 0.13715877, 0.23925674, 0.34832072, -0.03769251, 0.25530148, 0.10662722, -0.5269836, 0.32952255, 0.46165445, 0.3794754, -0.061259665, 0.02883365, -0.3199015, 0.40625557, -0.3794913, 0.42420092, 0.4631467, 0.54236996, 0.031472385, 0.2635622, -0.25566247, -0.040713936, 0.48734123, 0.2742017, -0.15524681, 0.025654443, 0.056942068, -0.48883253, 0.60433495, 0.03514151] + } + ) { + _additional { + id + } + contents + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.NotEmpty(t, notes) + require.Less(t, len(notes), defaultLimit) + }) + + // nearObject + + nearObjID := getOneExistingID(t, "RansomNote") + + t.Run("nearObject: with implicit unlimited search - no limit provided (with distance)", func(t *testing.T) { + query := fmt.Sprintf(` + { + Get { + RansomNote( + nearObject: { + distance: 1.8 + id: "%s" + } + ) { + contents + } + } + } + `, nearObjID) + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.NotEmpty(t, notes) + require.Greater(t, len(notes), defaultLimit) + }) + + t.Run("nearObject: with implicit unlimited search - no limit provided (with certainty)", func(t *testing.T) { + query := fmt.Sprintf(` + { + Get { + RansomNote( + nearObject: { + certainty: 0.1 + id: "%s" + } + ) { + contents + } + } + } + `, nearObjID) + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.NotEmpty(t, notes) + require.Greater(t, len(notes), defaultLimit) + }) + + t.Run("nearObject: with implicit unlimited search - negative limit provided (with distance)", func(t *testing.T) { + query := fmt.Sprintf(` + { + Get { + RansomNote( + limit: -1 + nearObject: { + distance: 0.9 + id: "%s" + } + ) { + contents + } + } + } + `, nearObjID) + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.NotEmpty(t, notes) + require.Greater(t, len(notes), defaultLimit) + }) + + t.Run("nearObject: with implicit unlimited search - negative limit provided (with certainty)", func(t *testing.T) { + query := fmt.Sprintf(` + { + Get { + RansomNote( + limit: -1 + nearObject: { + certainty: 0.1 + id: "%s" + } + ) { + contents + } + } + } + `, nearObjID) + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.NotEmpty(t, notes) + require.Greater(t, len(notes), defaultLimit) + }) + + t.Run("nearObject: with limited search - limit provided (with distance)", func(t *testing.T) { + query := fmt.Sprintf(` + { + Get { + RansomNote( + limit: 13 + nearObject: { + distance: 0.9 + id: "%s" + } + ) { + contents + } + } + } + `, nearObjID) + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.NotEmpty(t, notes) + require.Equal(t, 13, len(notes)) + }) + + t.Run("nearObject: with limited search - limit provided (with certainty)", func(t *testing.T) { + query := fmt.Sprintf(` + { + Get { + RansomNote( + limit: 13 + nearObject: { + certainty: 0.1 + id: "%s" + } + ) { + contents + } + } + } + `, nearObjID) + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.NotEmpty(t, notes) + require.Equal(t, 13, len(notes)) + }) + + t.Run("nearObject: results limited by distance", func(t *testing.T) { + query := fmt.Sprintf(` + { + Get { + RansomNote( + nearObject: { + distance: 0.01 + id: "%s" + } + ) { + _additional { + id + } + contents + } + } + } + `, nearObjID) + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.NotEmpty(t, notes) + require.Less(t, len(notes), defaultLimit) + }) + + t.Run("nearObject: results limited by certainty", func(t *testing.T) { + query := fmt.Sprintf(` + { + Get { + RansomNote( + nearObject: { + certainty: 0.99 + id: "%s" + } + ) { + _additional { + id + } + contents + } + } + } + `, nearObjID) + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.NotEmpty(t, notes) + require.Less(t, len(notes), defaultLimit) + }) + + // nearText + + t.Run("nearText: with implicit unlimited search - no limit provided (with distance)", func(t *testing.T) { + query := ` + { + Get { + RansomNote( + nearText: { + distance: 1.8 + concepts: ["abcd"] + } + ) { + contents + } + } + } + ` + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.NotEmpty(t, notes) + require.Greater(t, len(notes), defaultLimit) + }) + + t.Run("nearText: with implicit unlimited search - no limit provided (with certainty)", func(t *testing.T) { + query := ` + { + Get { + RansomNote( + nearText: { + certainty: 0.1 + concepts: ["abcd"] + } + ) { + contents + } + } + } + ` + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.NotEmpty(t, notes) + require.Greater(t, len(notes), defaultLimit) + }) + + t.Run("nearText: with implicit unlimited search - negative limit provided (with distance)", func(t *testing.T) { + query := ` + { + Get { + RansomNote( + limit: -1 + nearText: { + distance: 1.8 + concepts: ["abcd"] + } + ) { + contents + } + } + } + ` + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.NotEmpty(t, notes) + require.Greater(t, len(notes), defaultLimit) + }) + + t.Run("nearText: with implicit unlimited search - negative limit provided (with certainty)", func(t *testing.T) { + query := ` + { + Get { + RansomNote( + limit: -1 + nearText: { + certainty: 0.1 + concepts: ["abcd"] + } + ) { + contents + } + } + } + ` + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.NotEmpty(t, notes) + require.Greater(t, len(notes), defaultLimit) + }) + + t.Run("nearText: with limited search - limit provided (with distance)", func(t *testing.T) { + query := ` + { + Get { + RansomNote( + limit: 5 + nearText: { + distance: 0.9 + concepts: ["abcd"] + } + ) { + contents + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.NotEmpty(t, notes) + require.Equal(t, 5, len(notes)) + }) + + t.Run("nearText: with limited search - limit provided (with certainty)", func(t *testing.T) { + query := ` + { + Get { + RansomNote( + limit: 5 + nearText: { + certainty: 0.1 + concepts: ["abcd"] + } + ) { + contents + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.NotEmpty(t, notes) + require.Equal(t, 5, len(notes)) + }) + + t.Run("nearText: results limited by distance", func(t *testing.T) { + query := ` + { + Get { + RansomNote( + nearText: { + distance: 0.2 + concepts: ["abcd"] + } + ) { + _additional { + id + } + contents + } + } + } + ` + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.Less(t, len(notes), defaultLimit) + }) + + t.Run("nearText: results limited by certainty", func(t *testing.T) { + query := ` + { + Get { + RansomNote( + nearText: { + certainty: 0.9 + concepts: ["abcd"] + } + ) { + _additional { + id + } + contents + } + } + } + ` + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "RansomNote").AsSlice() + require.Less(t, len(notes), defaultLimit) + }) +} + +func gettingObjectsWithNearFieldsMultiShard(t *testing.T) { + t.Run("nearText: results limited by distance with multi shard", func(t *testing.T) { + query := ` + { + Get { + MultiShard( + nearText: { + distance: 0.9 + concepts: ["multi shard"] + } + ) { + _additional { + id + } + } + } + } + ` + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "MultiShard").AsSlice() + require.Equal(t, len(notes), 3) + }) + + t.Run("nearText: results limited by certainty with multi shard", func(t *testing.T) { + query := ` + { + Get { + MultiShard( + nearText: { + certainty: 0.1 + concepts: ["multi shard"] + } + ) { + _additional { + id + } + } + } + } + ` + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + notes := result.Get("Get", "MultiShard").AsSlice() + require.Equal(t, len(notes), 3) + }) +} + +func getOneExistingID(t *testing.T, className string) string { + query := fmt.Sprintf("{Get {%s(limit: 1) {_additional {id}}}}", className) + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query).Result.(map[string]interface{}) + + classes, ok := result["Get"].(map[string]interface{}) + if !ok { + t.Fatalf("Get response is not a map[string]interface{}") + } + + class, ok := classes[className] + if !ok { + t.Fatalf("class %s was not included in response", className) + } + + classSlice, ok := class.([]interface{}) + if !ok { + t.Fatalf("%s response is not a []interface{}", className) + } + require.NotEmpty(t, class, "getOneExistingID empty class response for %s", className) + + firstItem, ok := classSlice[0].(map[string]interface{}) + if !ok { + t.Fatalf("first item is not a map[string]interface{}") + } + + additional, ok := firstItem["_additional"].(map[string]interface{}) + if !ok { + t.Fatalf("_additional result is not a map[string]interface{}") + } + + id, ok := additional["id"].(string) + if !ok { + t.Fatalf("id is not a string") + } + + return id +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/metrics_stability_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/metrics_stability_test.go new file mode 100644 index 0000000000000000000000000000000000000000..a4555a8684407e2754f852536c877de5557a1242 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/metrics_stability_test.go @@ -0,0 +1,231 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "bufio" + "context" + "fmt" + "math" + "math/rand" + "net/http" + "strings" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/backups" + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" +) + +const metricClassPrefix = "MetricsClassPrefix" + +func metricsCount(t *testing.T) { + defer cleanupMetricsClasses(t, 0, 20) + createImportQueryMetricsClasses(t, 0, 10) + backupID := startBackup(t, 0, 10) + helper.ExpectBackupEventuallyCreated(t, backupID, "filesystem", nil, helper.WithPollInterval(time.Second), helper.WithDeadline(helper.MaxDeadline)) + metricsLinesBefore := countMetricsLines(t) + createImportQueryMetricsClasses(t, 10, 20) + backupID = startBackup(t, 0, 20) + helper.ExpectBackupEventuallyCreated(t, backupID, "filesystem", nil, helper.WithPollInterval(time.Second), helper.WithDeadline(helper.MaxDeadline)) + metricsLinesAfter := countMetricsLines(t) + assert.Equal(t, metricsLinesBefore, metricsLinesAfter, "number of metrics should not have changed") +} + +func createImportQueryMetricsClasses(t *testing.T, start, end int) { + for i := start; i < end; i++ { + createMetricsClass(t, i) + importMetricsClass(t, i) + queryMetricsClass(t, i) + } +} + +func createMetricsClass(t *testing.T, classIndex int) { + createObjectClass(t, &models.Class{ + Class: metricsClassName(classIndex), + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "some_text", + DataType: schema.DataTypeText.PropString(), + }, + }, + VectorIndexConfig: map[string]any{ + "efConstruction": 10, + "maxConnextions": 2, + "ef": 10, + }, + }) +} + +func queryMetricsClass(t *testing.T, classIndex int) { + // object by ID which exists + resp, err := helper.Client(t).Objects. + ObjectsClassGet( + objects.NewObjectsClassGetParams(). + WithID(helper.IntToUUID(1)). + WithClassName(metricsClassName(classIndex)), + nil) + + require.Nil(t, err) + assert.NotNil(t, resp.Payload) + + // object by ID which doesn't exist + // ignore any return values + helper.Client(t).Objects. + ObjectsClassGet( + objects.NewObjectsClassGetParams(). + WithID(helper.IntToUUID(math.MaxUint64)). + WithClassName(metricsClassName(classIndex)), + nil) + + // vector search + assert.EventuallyWithT(t, func(collectT *assert.CollectT) { + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, + fmt.Sprintf( + "{ Get { %s(nearVector:{vector: [0.3,0.3,0.7,0.7]}, limit:5) { some_text } } }", + metricsClassName(classIndex), + ), + ) + objs := result.Get("Get", metricsClassName(classIndex)).AsSlice() + assert.Len(collectT, objs, 5) + }, 15*time.Second, 500*time.Millisecond) + + // filtered vector search (which has specific metrics) + // vector search + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, + fmt.Sprintf( + "{ Get { %s(nearVector:{vector:[0.3,0.3,0.7,0.7]}, limit:5, where: %s) { some_text } } }", + metricsClassName(classIndex), + `{operator:Equal, valueText: "individually", path:["some_text"]}`, + ), + ) + objs := result.Get("Get", metricsClassName(classIndex)).AsSlice() + assert.Len(t, objs, 1) +} + +// make sure that we use both individual as well as batch imports, as they +// might produce different metrics +func importMetricsClass(t *testing.T, classIndex int) { + // individual + createObject(t, &models.Object{ + Class: metricsClassName(classIndex), + Properties: map[string]interface{}{ + "some_text": "this object was created individually", + }, + ID: helper.IntToUUID(1), + Vector: randomVector(4), + }) + + // with batches + const ( + batchSize = 100 + numBatches = 50 + ) + + for i := 0; i < numBatches; i++ { + batch := make([]*models.Object, batchSize) + for j := 0; j < batchSize; j++ { + batch[j] = &models.Object{ + Class: metricsClassName(classIndex), + Properties: map[string]interface{}{ + "some_text": fmt.Sprintf("this is object %d of batch %d", j, i), + }, + Vector: randomVector(4), + } + } + + createObjectsBatch(t, batch) + } + + waitForIndexing(t, metricsClassName(classIndex)) +} + +func cleanupMetricsClasses(t *testing.T, start, end int) { + for i := start; i < end; i++ { + deleteObjectClass(t, metricsClassName(i)) + } +} + +func randomVector(dims int) []float32 { + out := make([]float32, dims) + for i := range out { + out[i] = rand.Float32() + } + return out +} + +func countMetricsLines(t *testing.T) int { + ctx, cancel := context.WithTimeout(context.Background(), time.Second) + defer cancel() + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, + "http://localhost:2112/metrics", nil) + require.Nil(t, err) + + c := &http.Client{} + res, err := c.Do(req) + require.Nil(t, err) + + defer res.Body.Close() + require.Equal(t, http.StatusOK, res.StatusCode) + + scanner := bufio.NewScanner(res.Body) + lineCount := 0 + for scanner.Scan() { + line := scanner.Text() + if strings.Contains(line, "shards_loaded") || strings.Contains(line, "shards_loading") || strings.Contains(line, "shards_unloading") || strings.Contains(line, "shards_unloaded") { + continue + } + require.NotContains( + t, + strings.ToLower(line), + strings.ToLower(metricClassPrefix), + ) + lineCount++ + } + + require.Nil(t, scanner.Err()) + + return lineCount +} + +func metricsClassName(classIndex int) string { + return fmt.Sprintf("%s_%d", metricClassPrefix, classIndex) +} + +func startBackup(t *testing.T, start, end int) string { + var includeClasses []string + for i := start; i < end; i++ { + includeClasses = append(includeClasses, metricsClassName(i)) + } + + backupID := fmt.Sprintf("metrics-test-backup-%d", rand.Intn(100000000)) + + _, err := helper.Client(t).Backups.BackupsCreate( + backups.NewBackupsCreateParams(). + WithBackend("filesystem"). + WithBody(&models.BackupCreateRequest{ + ID: backupID, + Include: includeClasses, + }), + nil) + require.Nil(t, err) + + return backupID +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/multi_reftype_bug_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/multi_reftype_bug_test.go new file mode 100644 index 0000000000000000000000000000000000000000..2a41cb613fead70864dbedb8706c32e509f55daa --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/multi_reftype_bug_test.go @@ -0,0 +1,501 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "fmt" + "testing" + + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +// This test prevents a regression on +// https://github.com/weaviate/weaviate/issues/1410 +func TestMultipleRefTypeIssues(t *testing.T) { + className := func(suffix string) string { + return "MultiRefTypeBug" + suffix + } + defer deleteObjectClass(t, className("TargetOne")) + defer deleteObjectClass(t, className("TargetTwo")) + defer deleteObjectClass(t, className("Source")) + + const ( + targetOneID strfmt.UUID = "155c5914-6594-4cde-b3ab-f8570b561965" + targetTwoID strfmt.UUID = "ebf85a07-6b34-4e3b-b7c5-077f904fc955" + ) + + t.Run("import schema", func(t *testing.T) { + createObjectClass(t, &models.Class{ + Class: className("TargetOne"), + Properties: []*models.Property{ + { + Name: "name", + DataType: []string{"text"}, + }, + }, + }) + + createObjectClass(t, &models.Class{ + Class: className("TargetTwo"), + Properties: []*models.Property{ + { + Name: "name", + DataType: []string{"text"}, + }, + }, + }) + + createObjectClass(t, &models.Class{ + Class: className("Source"), + Properties: []*models.Property{ + { + Name: "name", + DataType: []string{"text"}, + }, + { + Name: "toTargets", + DataType: []string{className("TargetOne"), className("TargetTwo")}, + }, + }, + }) + }) + + t.Run("import data", func(t *testing.T) { + createObject(t, &models.Object{ + Class: className("TargetOne"), + ID: targetOneID, + Properties: map[string]interface{}{ + "name": "target a", + }, + }) + + createObject(t, &models.Object{ + Class: className("TargetTwo"), + ID: targetTwoID, + Properties: map[string]interface{}{ + "name": "target b", + }, + }) + + createObject(t, &models.Object{ + Class: className("Source"), + Properties: map[string]interface{}{ + "name": "source without refs", + }, + }) + + createObject(t, &models.Object{ + Class: className("Source"), + Properties: map[string]interface{}{ + "name": "source with ref to One", + "toTargets": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s", targetOneID), + }, + }, + }, + }) + + createObject(t, &models.Object{ + Class: className("Source"), + Properties: map[string]interface{}{ + "name": "source with ref to Two", + "toTargets": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s", targetTwoID), + }, + }, + }, + }) + + createObject(t, &models.Object{ + Class: className("Source"), + Properties: map[string]interface{}{ + "name": "source with ref to both", + "toTargets": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s", targetOneID), + }, + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s", targetTwoID), + }, + }, + }, + }) + }) + + t.Run("verify different scenarios through GraphQL", func(t *testing.T) { + t.Run("requesting no references", func(t *testing.T) { + query := fmt.Sprintf(` + { + Get { + %s { + name + } + } + } + `, className("Source")) + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + actual := result.Get("Get", className("Source")).AsSlice() + expected := []interface{}{ + map[string]interface{}{"name": "source with ref to One"}, + map[string]interface{}{"name": "source with ref to Two"}, + map[string]interface{}{"name": "source with ref to both"}, + map[string]interface{}{"name": "source without refs"}, + } + + assert.ElementsMatch(t, expected, actual) + }) + + t.Run("requesting references of type One without additional { id }", func(t *testing.T) { + query := fmt.Sprintf(` + { + Get { + %s { + name + toTargets { + ... on %s { + name + } + } + } + } + } + `, className("Source"), className("TargetOne")) + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + actual := result.Get("Get", className("Source")).AsSlice() + expected := []interface{}{ + map[string]interface{}{ + "name": "source with ref to One", + "toTargets": []interface{}{ + map[string]interface{}{ + "name": "target a", + }, + }, + }, + map[string]interface{}{ + "name": "source with ref to Two", + "toTargets": nil, + }, + map[string]interface{}{ + "name": "source with ref to both", + "toTargets": []interface{}{ + map[string]interface{}{ + "name": "target a", + }, + }, + }, + map[string]interface{}{ + "name": "source without refs", + "toTargets": nil, + }, + } + + assert.ElementsMatch(t, expected, actual) + }) + + t.Run("requesting references of type One with additional { id }", func(t *testing.T) { + query := fmt.Sprintf(` + { + Get { + %s { + name + toTargets { + ... on %s { + name + _additional { id } + } + } + } + } + } + `, className("Source"), className("TargetOne")) + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + actual := result.Get("Get", className("Source")).AsSlice() + expected := []interface{}{ + map[string]interface{}{ + "name": "source with ref to One", + "toTargets": []interface{}{ + map[string]interface{}{ + "name": "target a", + "_additional": map[string]interface{}{ + "id": targetOneID.String(), + }, + }, + }, + }, + map[string]interface{}{ + "name": "source with ref to Two", + "toTargets": nil, + }, + map[string]interface{}{ + "name": "source with ref to both", + "toTargets": []interface{}{ + map[string]interface{}{ + "name": "target a", + "_additional": map[string]interface{}{ + "id": targetOneID.String(), + }, + }, + }, + }, + map[string]interface{}{ + "name": "source without refs", + "toTargets": nil, + }, + } + + assert.ElementsMatch(t, expected, actual) + }) + + t.Run("requesting references of type Two without additional { id }", func(t *testing.T) { + query := fmt.Sprintf(` + { + Get { + %s { + name + toTargets { + ... on %s { + name + } + } + } + } + } + `, className("Source"), className("TargetTwo")) + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + actual := result.Get("Get", className("Source")).AsSlice() + expected := []interface{}{ + map[string]interface{}{ + "name": "source with ref to Two", + "toTargets": []interface{}{ + map[string]interface{}{ + "name": "target b", + }, + }, + }, + map[string]interface{}{ + "name": "source with ref to One", + "toTargets": nil, + }, + map[string]interface{}{ + "name": "source with ref to both", + "toTargets": []interface{}{ + map[string]interface{}{ + "name": "target b", + }, + }, + }, + map[string]interface{}{ + "name": "source without refs", + "toTargets": nil, + }, + } + + assert.ElementsMatch(t, expected, actual) + }) + + t.Run("requesting references of type Two with additional { id }", func(t *testing.T) { + query := fmt.Sprintf(` + { + Get { + %s { + name + toTargets { + ... on %s { + name + _additional { id } + } + } + } + } + } + `, className("Source"), className("TargetTwo")) + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + actual := result.Get("Get", className("Source")).AsSlice() + expected := []interface{}{ + map[string]interface{}{ + "name": "source with ref to Two", + "toTargets": []interface{}{ + map[string]interface{}{ + "name": "target b", + "_additional": map[string]interface{}{ + "id": targetTwoID.String(), + }, + }, + }, + }, + map[string]interface{}{ + "name": "source with ref to One", + "toTargets": nil, + }, + map[string]interface{}{ + "name": "source with ref to both", + "toTargets": []interface{}{ + map[string]interface{}{ + "name": "target b", + "_additional": map[string]interface{}{ + "id": targetTwoID.String(), + }, + }, + }, + }, + map[string]interface{}{ + "name": "source without refs", + "toTargets": nil, + }, + } + + assert.ElementsMatch(t, expected, actual) + }) + + t.Run("requesting references of both types without additional { id }", + func(t *testing.T) { + query := fmt.Sprintf(` + { + Get { + %s { + name + toTargets { + ... on %s { + name + } + ... on %s { + name + } + } + } + } + } + `, className("Source"), className("TargetOne"), className("TargetTwo")) + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + actual := result.Get("Get", className("Source")).AsSlice() + expected := []interface{}{ + map[string]interface{}{ + "name": "source with ref to Two", + "toTargets": []interface{}{ + map[string]interface{}{ + "name": "target b", + }, + }, + }, + map[string]interface{}{ + "name": "source with ref to One", + "toTargets": []interface{}{ + map[string]interface{}{ + "name": "target a", + }, + }, + }, + map[string]interface{}{ + "name": "source with ref to both", + "toTargets": []interface{}{ + map[string]interface{}{ + "name": "target a", + }, + map[string]interface{}{ + "name": "target b", + }, + }, + }, + map[string]interface{}{ + "name": "source without refs", + "toTargets": nil, + }, + } + + assert.ElementsMatch(t, expected, actual) + }) + + t.Run("requesting references of type Two with additional { id }", func(t *testing.T) { + query := fmt.Sprintf(` + { + Get { + %s { + name + toTargets { + ... on %s { + name + _additional { id } + } + ... on %s { + name + _additional { id } + } + } + } + } + } + `, className("Source"), className("TargetOne"), className("TargetTwo")) + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + actual := result.Get("Get", className("Source")).AsSlice() + expected := []interface{}{ + map[string]interface{}{ + "name": "source with ref to Two", + "toTargets": []interface{}{ + map[string]interface{}{ + "name": "target b", + "_additional": map[string]interface{}{ + "id": targetTwoID.String(), + }, + }, + }, + }, + map[string]interface{}{ + "name": "source with ref to One", + "toTargets": []interface{}{ + map[string]interface{}{ + "name": "target a", + "_additional": map[string]interface{}{ + "id": targetOneID.String(), + }, + }, + }, + }, + map[string]interface{}{ + "name": "source with ref to both", + "toTargets": []interface{}{ + map[string]interface{}{ + "name": "target a", + "_additional": map[string]interface{}{ + "id": targetOneID.String(), + }, + }, + map[string]interface{}{ + "name": "target b", + "_additional": map[string]interface{}{ + "id": targetTwoID.String(), + }, + }, + }, + }, + map[string]interface{}{ + "name": "source without refs", + "toTargets": nil, + }, + } + + assert.ElementsMatch(t, expected, actual) + }) + }) + + t.Run("cleanup", func(t *testing.T) { + deleteObjectClass(t, className("Source")) + deleteObjectClass(t, className("TargetOne")) + deleteObjectClass(t, className("TargetTwo")) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/network_aggregate_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/network_aggregate_test.go new file mode 100644 index 0000000000000000000000000000000000000000..0322a7d73a8adb8373acec018af7f05f96fc2b9c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/network_aggregate_test.go @@ -0,0 +1,51 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +// import ( +// "encoding/json" +// "testing" + +// "github.com/weaviate/weaviate/test/helper" +// "github.com/stretchr/testify/assert" +// ) + +// func TestNetworkAggregate(t *testing.T) { +// result := AssertGraphQL(t, helper.RootAuth, ` +// { +// Network { +// Aggregate{ +// RemoteWeaviateForAcceptanceTest { +// Things { +// Instruments(groupBy:["name"]) { +// volume { +// count +// } +// } +// } +// } +// } +// } +// } +// `) + +// volume := result.Get("Network", "Aggregate", "RemoteWeaviateForAcceptanceTest", "Things", "Instruments").Result +// expected := []interface{}{ +// map[string]interface{}{ +// "volume": map[string]interface{}{ +// "count": json.Number("82"), +// }, +// }, +// } + +// assert.Equal(t, expected, volume) +// } diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/network_fetch_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/network_fetch_test.go new file mode 100644 index 0000000000000000000000000000000000000000..f1c5bc1d9b6894c04fec9e51a32b1ceaf278b073 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/network_fetch_test.go @@ -0,0 +1,75 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +// func Test_NetworkFetch(t *testing.T) { +// result := AssertGraphQL(t, helper.RootAuth, ` +// { +// Network { +// Fetch { +// Things(where: { +// class: { +// name: "bestclass" +// certainty: 0.8 +// keywords: [{value: "foo", weight: 0.9}] +// }, +// properties: { +// name: "bestproperty" +// certainty: 0.8 +// keywords: [{value: "bar", weight: 0.9}] +// operator: Equal +// valueString: "some-value" +// }, +// }) { +// beacon certainty +// } +// } +// } +// }`, +// ) + +// results := result.Get("Network", "Fetch", "Things").Result +// expected := []interface{}{ +// map[string]interface{}{ +// "beacon": "weaviate://RemoteWeaviateForAcceptanceTest/things/c2b94c9a-fea2-4f9a-ae40-6d63534633f7", +// "certainty": json.Number("0.5"), +// }, +// map[string]interface{}{ +// "beacon": "weaviate://RemoteWeaviateForAcceptanceTest/things/32fc9b12-00b8-46b2-962d-63c1f352e090", +// "certainty": json.Number("0.7"), +// }, +// } +// assert.Equal(t, expected, results) +// } + +// func Test_NetworkFetchFuzzy(t *testing.T) { +// result := AssertGraphQL(t, helper.RootAuth, ` +// { +// Network { +// Fetch { +// Fuzzy(value:"something", certainty: 0.5) { +// beacon certainty +// } +// } +// } +// }`, +// ) + +// results := result.Get("Network", "Fetch", "Fuzzy").Result +// expected := []interface{}{ +// map[string]interface{}{ +// "beacon": "weaviate://RemoteWeaviateForAcceptanceTest/things/61c21951-3460-4189-86ad-884a17b70c16", +// "certainty": json.Number("0.5"), +// }, +// } +// assert.Equal(t, expected, results) +// } diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/network_get_meta_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/network_get_meta_test.go new file mode 100644 index 0000000000000000000000000000000000000000..aef8c8c05a860a2d7538e77b7551f537e3c42ca3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/network_get_meta_test.go @@ -0,0 +1,50 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +// import ( +// "encoding/json" +// "testing" + +// "github.com/weaviate/weaviate/test/helper" +// "github.com/stretchr/testify/assert" +// ) + +// func TestNetworkMeta(t *testing.T) { +// result := AssertGraphQL(t, helper.RootAuth, ` +// { +// Network { +// Meta{ +// RemoteWeaviateForAcceptanceTest { +// Things { +// Instruments { +// volume { +// maximum +// minimum +// mean +// } +// } +// } +// } +// } +// } +// } +// `) + +// volume := result.Get("Network", "Meta", "RemoteWeaviateForAcceptanceTest", "Things", "Instruments", "volume").Result +// expected := map[string]interface{}{ +// "mean": json.Number("82"), +// "maximum": json.Number("110"), +// "minimum": json.Number("65"), +// } +// assert.Equal(t, expected, volume) +// } diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/network_get_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/network_get_test.go new file mode 100644 index 0000000000000000000000000000000000000000..2d1f8ef7590f07899c9b928c3d2fda2e19d7d3f7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/network_get_test.go @@ -0,0 +1,36 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +// import ( +// "testing" + +// "github.com/weaviate/weaviate/test/helper" +// "github.com/stretchr/testify/assert" +// ) + +// // Note: Things.Instruments is not something that is present in our local schema +// // This is on purpose to verify that we have support for a completely different +// // schema on a remote instance. +// func TestNetworkGetSimple(t *testing.T) { +// result := AssertGraphQL(t, helper.RootAuth, "{ Network { Get { RemoteWeaviateForAcceptanceTest { Things { Instruments { name } } } } }") +// instruments := result.Get("Network", "Get", "RemoteWeaviateForAcceptanceTest", "Things", "Instruments").AsSlice() + +// expected := []interface{}{ +// map[string]interface{}{"name": "Piano"}, +// map[string]interface{}{"name": "Guitar"}, +// map[string]interface{}{"name": "Bass Guitar"}, +// map[string]interface{}{"name": "Talkbox"}, +// } + +// assert.ElementsMatch(t, expected, instruments) +// } diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/objects_helper.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/objects_helper.go new file mode 100644 index 0000000000000000000000000000000000000000..b5eba755d77c8876132cc675080fdbf3ae2596ad --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/objects_helper.go @@ -0,0 +1,35 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "testing" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +func createObjectClass(t *testing.T, class *models.Class) { + helper.CreateClass(t, class) +} + +func createObject(t *testing.T, object *models.Object) { + helper.CreateObject(t, object) +} + +func createObjectsBatch(t *testing.T, objects []*models.Object) { + helper.CreateObjectsBatch(t, objects) +} + +func deleteObjectClass(t *testing.T, class string) { + helper.DeleteClass(t, class) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/setup_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/setup_test.go new file mode 100644 index 0000000000000000000000000000000000000000..2116b929a6cb07f3d57c46919b56315ac0596dab --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/setup_test.go @@ -0,0 +1,936 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "context" + "fmt" + "math/rand" + "testing" + "time" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/nodes" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/schema/crossref" + enthnsw "github.com/weaviate/weaviate/entities/vectorindex/hnsw" + "github.com/weaviate/weaviate/entities/verbosity" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/cities" + "github.com/weaviate/weaviate/test/helper/sample-schema/multishard" +) + +func TestGraphQL_AsyncIndexing(t *testing.T) { + ctx := context.Background() + compose, err := docker.New(). + WithWeaviate(). + WithText2VecContextionary(). + WithBackendFilesystem(). + WithWeaviateEnv("ASYNC_INDEXING", "true"). + WithWeaviateEnv("ASYNC_INDEXING_STALE_TIMEOUT", "100ms"). + WithWeaviateEnv("QUEUE_SCHEDULER_INTERVAL", "100ms"). + WithWeaviateEnv("API_BASED_MODULES_DISABLED", "true"). + Start(ctx) + require.NoError(t, err) + defer func() { + require.NoError(t, compose.Terminate(ctx)) + }() + + testGraphQL(t, compose.GetWeaviate().URI()) +} + +func TestGraphQL_SyncIndexing(t *testing.T) { + testGraphQL(t, "localhost:8080") +} + +func testGraphQL(t *testing.T, host string) { + helper.SetupClient(host) + // tests with classes that have objects with same uuids + t.Run("import test data (near object search class)", addTestDataNearObjectSearch) + + t.Run("running Get nearObject against shadowed objects", runningGetNearObjectWithShadowedObjects) + t.Run("running Aggregate nearObject against shadowed objects", runningAggregateNearObjectWithShadowedObjects) + t.Run("running Explore nearObject against shadowed objects", runningExploreNearObjectWithShadowedObjects) + + deleteObjectClass(t, "NearObjectSearch") + deleteObjectClass(t, "NearObjectSearchShadow") + + // setup tests + t.Run("setup test schema", func(t *testing.T) { addTestSchema(t, host) }) + t.Run("import test data (city, country, airport)", func(t *testing.T) { addTestDataCityAirport(t, host) }) + t.Run("import test data (companies)", addTestDataCompanies) + t.Run("import test data (person)", addTestDataPersons) + t.Run("import test data (pizzas)", addTestDataPizzas) + t.Run("import test data (array class)", addTestDataArrayClass) + t.Run("import test data (duplicates class)", addTestDataDuplicatesClass) + t.Run("import test data (500 random strings)", addTestDataRansomNotes) + t.Run("import test data (multi shard)", addTestDataMultiShard) + t.Run("import test data (date field class)", addDateFieldClass) + t.Run("import test data (custom vector class)", addTestDataCVC) + t.Run("import test data (class without properties)", addTestDataNoProperties) + t.Run("import test data (cursor api)", addTestDataCursorSearch) + + t.Run("aggregates with hybrid search", aggregationWithHybridSearch) + + // explore tests + t.Run("expected explore failures with invalid conditions", exploreWithExpectedFailures) + + // get tests + t.Run("getting objects", gettingObjects) + t.Run("getting objects with filters", gettingObjectsWithFilters) + t.Run("getting objects with geo filters", gettingObjectsWithGeoFilters) + t.Run("getting objects with grouping", gettingObjectsWithGrouping) + t.Run("getting objects with additional props", gettingObjectsWithAdditionalProps) + t.Run("getting objects with near fields", gettingObjectsWithNearFields) + t.Run("getting objects with near fields with multi shard setup", gettingObjectsWithNearFieldsMultiShard) + t.Run("getting objects with sort", gettingObjectsWithSort) + t.Run("getting objects with hybrid search", getWithHybridSearch) + t.Run("expected get failures with invalid conditions", getsWithExpectedFailures) + t.Run("cursor through results", getWithCursorSearch) + t.Run("groupBy objects", groupByObjects) + + // aggregate tests + t.Run("aggregates noPropsClass without grouping", aggregateNoPropsClassWithoutGroupByTest) + t.Run("aggregates arrayClass without grouping", aggregateArrayClassWithoutGroupByTest) + t.Run("aggregates arrayClass with grouping", aggregateArrayClassWithGroupByTest) + t.Run("aggregates duplicatesClass without grouping", aggregateDuplicatesClassWithoutGroupByTest) + t.Run("aggregates duplicatesClass with grouping", aggregateDuplicatesClassWithGroupByTest) + t.Run("aggregates city without grouping", aggregateCityClassWithoutGroupByTest) + t.Run("aggregates city with grouping", aggregateCityClassWithGroupByTest) + + t.Run("aggregates local meta string props not set everywhere", localMeta_StringPropsNotSetEverywhere) + t.Run("aggregates local meta with where and nearText filters", localMetaWithWhereAndNearTextFilters) + t.Run("aggregates local meta with where and nearObject filters", localMetaWithWhereAndNearObjectFilters) + t.Run("aggregates local meta with nearVector filters", localMetaWithNearVectorFilter) + t.Run("aggregates local meta with where and nearVector nearMedia", localMetaWithWhereAndNearVectorFilters) + t.Run("aggregates local meta with where groupBy and nearMedia filters", localMetaWithWhereGroupByNearMediaFilters) + t.Run("aggregates local meta with objectLimit and nearMedia filters", localMetaWithObjectLimit) + t.Run("aggregates on date fields", aggregatesOnDateFields) + t.Run("aggregates using an alias", aggregatesUsingAlias) + + t.Run("expected aggregate failures with invalid conditions", aggregatesWithExpectedFailures) + + t.Run("metrics count is stable when more classes are added", metricsCount) + + // tear down + deleteObjectClass(t, "Person") + deleteObjectClass(t, "Pizza") + deleteObjectClass(t, "Country") + deleteObjectClass(t, "City") + deleteObjectClass(t, "Airport") + deleteObjectClass(t, "Company") + deleteObjectClass(t, "RansomNote") + deleteObjectClass(t, "MultiShard") + deleteObjectClass(t, "HasDateField") + deleteObjectClass(t, arrayClassName) + deleteObjectClass(t, duplicatesClassName) + deleteObjectClass(t, noPropsClassName) + deleteObjectClass(t, "CursorClass") + deleteObjectClass(t, "CompanyGroup") + + // only run after everything else is deleted, this way, we can also run an + // all-class Explore since all vectors which are now left have the same + // dimensions. + + t.Run("getting objects with custom vectors", gettingObjectsWithCustomVectors) + t.Run("explore objects with custom vectors", exploreObjectsWithCustomVectors) + + deleteObjectClass(t, "CustomVectorClass") +} + +func TestAggregateHybrid(t *testing.T) { + host := "localhost:8080" + t.Run("setup test schema", func(t *testing.T) { addTestSchema(t, host) }) + + t.Run("import test data (company groups)", addTestDataCompanyGroups) + t.Run("import test data (500 random strings)", addTestDataRansomNotes) + + t.Run("aggregate hybrid groupby", aggregateHybridGroupBy) + + deleteObjectClass(t, "Person") + deleteObjectClass(t, "Pizza") + deleteObjectClass(t, "Country") + deleteObjectClass(t, "City") + deleteObjectClass(t, "Airport") + deleteObjectClass(t, "Company") + deleteObjectClass(t, "RansomNote") + deleteObjectClass(t, "MultiShard") + deleteObjectClass(t, "HasDateField") + deleteObjectClass(t, arrayClassName) + deleteObjectClass(t, duplicatesClassName) + deleteObjectClass(t, noPropsClassName) + deleteObjectClass(t, "CursorClass") + deleteObjectClass(t, "CompanyGroup") + deleteObjectClass(t, "CustomVectorClass") +} + +func TestGroupBy(t *testing.T) { + host := "localhost:8080" + t.Run("setup test schema", func(t *testing.T) { addTestSchema(t, host) }) + + t.Run("import test data (company groups)", addTestDataCompanyGroups) + t.Run("import test data (500 random strings)", addTestDataRansomNotes) + + t.Run("groupBy objects with bm25", groupByBm25) + t.Run("groupBy objects with hybrid nearvector", groupByHybridNearVector) + + t.Run("conflicting subsearches", conflictingSubSearches) + t.Run("vector and nearText", vectorNearText) + + t.Run("0 alpha no query", twoVector) + + deleteObjectClass(t, "Person") + deleteObjectClass(t, "Pizza") + deleteObjectClass(t, "Country") + deleteObjectClass(t, "City") + deleteObjectClass(t, "Airport") + deleteObjectClass(t, "Company") + deleteObjectClass(t, "RansomNote") + deleteObjectClass(t, "MultiShard") + deleteObjectClass(t, "HasDateField") + deleteObjectClass(t, arrayClassName) + deleteObjectClass(t, duplicatesClassName) + deleteObjectClass(t, noPropsClassName) + deleteObjectClass(t, "CursorClass") + deleteObjectClass(t, "CompanyGroup") + deleteObjectClass(t, "CustomVectorClass") +} + +func boolRef(a bool) *bool { + return &a +} + +func addTestSchema(t *testing.T, host string) { + // Country, City, Airport schema + cities.CreateCountryCityAirportSchema(t, host) + + createObjectClass(t, &models.Class{ + Class: "Company", + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": false, + }, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizePropertyName": false, + }, + }, + }, + { + Name: "inCity", + DataType: []string{"City"}, + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizePropertyName": false, + }, + }, + }, + }, + }) + + createObjectClass(t, &models.Class{ + Class: "CompanyGroup", + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": false, + }, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWord, + IndexFilterable: boolRef(true), + IndexSearchable: boolRef(true), + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizePropertyName": false, + }, + }, + }, + { + Name: "city", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationField, + IndexFilterable: boolRef(true), + IndexSearchable: boolRef(true), + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizePropertyName": false, + }, + }, + }, + }, + }) + + createObjectClass(t, &models.Class{ + Class: "Person", + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": false, + }, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizePropertyName": false, + }, + }, + }, + { + Name: "livesIn", + DataType: []string{"City"}, + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizePropertyName": false, + }, + }, + }, + { + Name: "profession", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationField, + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizePropertyName": false, + }, + }, + }, + { + Name: "about", + DataType: schema.DataTypeTextArray.PropString(), + Tokenization: models.PropertyTokenizationField, + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizePropertyName": false, + }, + }, + }, + }, + }) + + createObjectClass(t, &models.Class{ + Class: "Pizza", + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": false, + }, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationField, + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizePropertyName": false, + }, + }, + }, + { + Name: "description", + DataType: []string{string(schema.DataTypeText)}, + Tokenization: models.PropertyTokenizationWord, + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizePropertyName": false, + }, + }, + }, + }, + }) + + hnswConfig := enthnsw.NewDefaultUserConfig() + hnswConfig.MaxConnections = 64 // RansomNote tests require higher default max connections (reduced in 1.26) + createObjectClass(t, &models.Class{ + Class: "RansomNote", + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + VectorIndexConfig: hnswConfig, + Properties: []*models.Property{ + { + Name: "contents", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + }, + }) + + createObjectClass(t, multishard.ClassContextionaryVectorizer()) + + createObjectClass(t, &models.Class{ + Class: "HasDateField", + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + Properties: []*models.Property{ + { + Name: "unique", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + { + Name: "timestamp", + DataType: []string{"date"}, + }, + { + Name: "identical", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + }, + }) + + createObjectClass(t, &models.Class{ + Class: "CustomVectorClass", + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + }, + }) + + createObjectClass(t, noPropsClassSchema()) + createObjectClass(t, arrayClassSchema()) + createObjectClass(t, duplicatesClassSchema()) +} + +const ( + netherlands strfmt.UUID = cities.Netherlands + germany strfmt.UUID = cities.Germany + amsterdam strfmt.UUID = cities.Amsterdam + rotterdam strfmt.UUID = cities.Rotterdam + berlin strfmt.UUID = cities.Berlin + dusseldorf strfmt.UUID = cities.Dusseldorf + missingisland strfmt.UUID = cities.Missingisland + nullisland strfmt.UUID = cities.Nullisland + airport1 strfmt.UUID = cities.Airport1 + airport2 strfmt.UUID = cities.Airport2 + airport3 strfmt.UUID = cities.Airport3 + airport4 strfmt.UUID = cities.Airport4 + cvc1 strfmt.UUID = "1ffeb3e1-1258-4c2a-afc3-55543f6c44b8" + cvc2 strfmt.UUID = "df22e5c4-5d17-49f9-a71d-f392a82bc086" + cvc3 strfmt.UUID = "c28a039a-d509-4c2e-940a-8b109e5bebf4" + + quattroFormaggi strfmt.UUID = "152500c6-4a8a-4732-aede-9fcab7e43532" + fruttiDiMare strfmt.UUID = "a828e9aa-d1b6-4644-8569-30d404e31a0d" + hawaii strfmt.UUID = "ed75037b-0748-4970-811e-9fe835ed41d1" + doener strfmt.UUID = "a655292d-1b93-44a1-9a47-57b6922bb455" +) + +var ( + historyAmsterdam = cities.HistoryAmsterdam + historyRotterdam = cities.HistoryRotterdam + historyBerlin = cities.HistoryBerlin + historyDusseldorf = cities.HistoryDusseldorf +) + +func addTestDataCityAirport(t *testing.T, host string) { + cities.InsertCountryCityAirportObjects(t, host) +} + +func addTestDataCompanies(t *testing.T) { + var ( + microsoft1 strfmt.UUID = "cfa3b21e-ca4f-4db7-a432-7fc6a23c534d" + microsoft2 strfmt.UUID = "8f75ed97-39dd-4294-bff7-ecabd7923062" + microsoft3 strfmt.UUID = "f343f51d-7e05-4084-bd66-d504db3b6bec" + apple1 strfmt.UUID = "477fec91-1292-4928-8f53-f0ff49c76900" + apple2 strfmt.UUID = "bb2cfdba-d4ba-4cf8-abda-e719ef35ac33" + apple3 strfmt.UUID = "b71d2b4c-3da1-4684-9c5e-aabd2a4f2998" + google1 strfmt.UUID = "8c2e21fc-46fe-4999-b41c-a800595129af" + google2 strfmt.UUID = "62b969c6-f184-4be0-8c40-7470af417cfc" + google3 strfmt.UUID = "c7829929-2037-4420-acbc-a433269feb93" + ) + + type companyTemplate struct { + id strfmt.UUID + name string + inCity []strfmt.UUID + } + + companies := []companyTemplate{ + {id: microsoft1, name: "Microsoft Inc.", inCity: []strfmt.UUID{dusseldorf}}, + {id: microsoft2, name: "Microsoft Incorporated", inCity: []strfmt.UUID{dusseldorf, amsterdam}}, + {id: microsoft3, name: "Microsoft", inCity: []strfmt.UUID{berlin}}, + {id: apple1, name: "Apple Inc."}, + {id: apple2, name: "Apple Incorporated"}, + {id: apple3, name: "Apple"}, + {id: google1, name: "Google Inc."}, + {id: google2, name: "Google Incorporated"}, + {id: google3, name: "Google"}, + } + + // companies + for _, company := range companies { + inCity := []interface{}{} + for _, c := range company.inCity { + inCity = append(inCity, + map[string]interface{}{ + "beacon": crossref.NewLocalhost("City", c).String(), + }) + } + createObject(t, &models.Object{ + Class: "Company", + ID: company.id, + Properties: map[string]interface{}{ + "inCity": inCity, + "name": company.name, + }, + }) + } + + assertGetObjectEventually(t, companies[len(companies)-1].id) +} + +func addTestDataPersons(t *testing.T) { + var ( + alice strfmt.UUID = "5d0fa6ee-21c4-4b46-a735-f0208717837d" + bob strfmt.UUID = "8615585a-2960-482d-b19d-8bee98ade52c" + john strfmt.UUID = "3ef44474-b5e5-455d-91dc-d917b5b76165" + petra strfmt.UUID = "15d222c9-8c36-464b-bedb-113faa1c1e4c" + ) + + type personTemplate struct { + id strfmt.UUID + name string + livesIn []strfmt.UUID + profession string + about []string + } + + persons := []personTemplate{ + { + id: alice, name: "Alice", livesIn: []strfmt.UUID{}, profession: "Quality Control Analyst", + about: []string{"loves travelling very much"}, + }, + { + id: bob, name: "Bob", livesIn: []strfmt.UUID{amsterdam}, profession: "Mechanical Engineer", + about: []string{"loves travelling", "hates cooking"}, + }, + { + id: john, name: "John", livesIn: []strfmt.UUID{amsterdam, berlin}, profession: "Senior Mechanical Engineer", + about: []string{"hates swimming", "likes cooking", "loves travelling"}, + }, + { + id: petra, name: "Petra", livesIn: []strfmt.UUID{amsterdam, berlin, dusseldorf}, profession: "Quality Assurance Manager", + about: []string{"likes swimming", "likes cooking for family"}, + }, + } + + // persons + for _, person := range persons { + livesIn := []interface{}{} + for _, c := range person.livesIn { + livesIn = append(livesIn, + map[string]interface{}{ + "beacon": crossref.NewLocalhost("City", c).String(), + }) + } + + createObject(t, &models.Object{ + Class: "Person", + ID: person.id, + Properties: map[string]interface{}{ + "livesIn": livesIn, + "name": person.name, + "profession": person.profession, + "about": person.about, + }, + }) + } + + assertGetObjectEventually(t, persons[len(persons)-1].id) +} + +func addTestDataCompanyGroups(t *testing.T) { + var ( + microsoft1 strfmt.UUID = "1fa3b21e-ca4f-4db7-a432-7fc6a23c534d" + microsoft2 strfmt.UUID = "1f75ed97-39dd-4294-bff7-ecabd7923062" + microsoft3 strfmt.UUID = "1343f51d-7e05-4084-bd66-d504db3b6bec" + apple1 strfmt.UUID = "177fec91-1292-4928-8f53-f0ff49c76900" + apple2 strfmt.UUID = "1b2cfdba-d4ba-4cf8-abda-e719ef35ac33" + apple3 strfmt.UUID = "171d2b4c-3da1-4684-9c5e-aabd2a4f2998" + google1 strfmt.UUID = "1c2e21fc-46fe-4999-b41c-a800595129af" + google2 strfmt.UUID = "12b969c6-f184-4be0-8c40-7470af417cfc" + google3 strfmt.UUID = "17829929-2037-4420-acbc-a433269feb93" + ) + + type companyTemplate struct { + id strfmt.UUID + name string + inCity string + } + + companies := []companyTemplate{ + {id: microsoft1, name: "Microsoft Inc.", inCity: "dusseldorf"}, + {id: microsoft2, name: "Microsoft Incorporated", inCity: "amsterdam"}, + {id: microsoft3, name: "Microsoft", inCity: "berlin"}, + {id: apple1, name: "Apple Inc.", inCity: "berlin"}, + {id: apple2, name: "Apple Incorporated", inCity: "dusseldorf"}, + {id: apple3, name: "Apple", inCity: "amsterdam"}, + {id: google1, name: "Google Inc.", inCity: "amsterdam"}, + {id: google2, name: "Google Incorporated", inCity: "berlin"}, + {id: google3, name: "Google", inCity: "dusseldorf"}, + } + + // companies + for _, company := range companies { + + createObject(t, &models.Object{ + Class: "CompanyGroup", + ID: company.id, + Properties: map[string]interface{}{ + "city": company.inCity, + "name": company.name, + }, + }) + fmt.Printf("created company %s\n", company.name) + } + + assertGetObjectEventually(t, companies[len(companies)-1].id) +} + +func addTestDataPizzas(t *testing.T) { + createObject(t, &models.Object{ + Class: "Pizza", + ID: quattroFormaggi, + Properties: map[string]interface{}{ + "name": "Quattro Formaggi", + "description": "Pizza quattro formaggi Italian: [ˈkwattro forˈmaddʒi] (four cheese pizza) is a variety of pizza in Italian cuisine that is topped with a combination of four kinds of cheese, usually melted together, with (rossa, red) or without (bianca, white) tomato sauce. It is popular worldwide, including in Italy,[1] and is one of the iconic items from pizzerias's menus.", + }, + }) + createObject(t, &models.Object{ + Class: "Pizza", + ID: fruttiDiMare, + Properties: map[string]interface{}{ + "name": "Frutti di Mare", + "description": "Frutti di Mare is an Italian type of pizza that may be served with scampi, mussels or squid. It typically lacks cheese, with the seafood being served atop a tomato sauce.", + }, + }) + createObject(t, &models.Object{ + Class: "Pizza", + ID: hawaii, + Properties: map[string]interface{}{ + "name": "Hawaii", + "description": "Universally accepted to be the best pizza ever created.", + }, + }) + createObject(t, &models.Object{ + Class: "Pizza", + ID: doener, + Properties: map[string]interface{}{ + "name": "Doener", + "description": "A innovation, some say revolution, in the pizza industry.", + }, + }) + + assertGetObjectEventually(t, quattroFormaggi) + assertGetObjectEventually(t, fruttiDiMare) + assertGetObjectEventually(t, hawaii) + assertGetObjectEventually(t, doener) +} + +func addTestDataCVC(t *testing.T) { + // add one object individually + createObject(t, &models.Object{ + Class: "CustomVectorClass", + ID: cvc1, + Vector: []float32{1.1, 1.1, 1.1}, + Properties: map[string]interface{}{ + "name": "Ford", + }, + }) + + assertGetObjectEventually(t, cvc1) + + createObjectsBatch(t, []*models.Object{ + { + Class: "CustomVectorClass", + ID: cvc2, + Vector: []float32{1.1, 1.1, 0.1}, + Properties: map[string]interface{}{ + "name": "Tesla", + }, + }, + { + Class: "CustomVectorClass", + ID: cvc3, + Vector: []float32{1.1, 0, 0}, + Properties: map[string]interface{}{ + "name": "Mercedes", + }, + }, + }) + assertGetObjectEventually(t, cvc3) +} + +func addTestDataNoProperties(t *testing.T) { + for _, object := range noPropsClassObjects() { + createObject(t, object) + assertGetObjectEventually(t, object.ID) + } +} + +func addTestDataArrayClass(t *testing.T) { + for _, object := range arrayClassObjects() { + createObject(t, object) + assertGetObjectEventually(t, object.ID) + } +} + +func addTestDataDuplicatesClass(t *testing.T) { + for _, object := range duplicatesClassObjects() { + createObject(t, object) + assertGetObjectEventually(t, object.ID) + } +} + +func addTestDataRansomNotes(t *testing.T) { + const ( + noteLengthMin = 4 + noteLengthMax = 1024 + + batchSize = 10 + numBatches = 50 + ) + + className := "RansomNote" + seededRand := rand.New(rand.NewSource(time.Now().UnixNano())) + + for i := 0; i < numBatches; i++ { + batch := make([]*models.Object, batchSize) + for j := 0; j < batchSize; j++ { + noteLength := noteLengthMin + seededRand.Intn(noteLengthMax-noteLengthMin+1) + note := helper.GetRandomString(noteLength) + + batch[j] = &models.Object{ + Class: className, + Properties: map[string]interface{}{"contents": note}, + } + } + + createObjectsBatch(t, batch) + } + + t.Run("wait for all objects to be indexed", func(t *testing.T) { + // wait for all of the objects to get indexed + waitForIndexing(t, className) + }) +} + +func addTestDataMultiShard(t *testing.T) { + for _, multiShard := range multishard.Objects() { + helper.CreateObject(t, multiShard) + helper.AssertGetObjectEventually(t, multiShard.Class, multiShard.ID) + } +} + +func addTestDataNearObjectSearch(t *testing.T) { + classNames := []string{"NearObjectSearch", "NearObjectSearchShadow"} + ids := []strfmt.UUID{ + "aa44bbee-ca5f-4db7-a412-5fc6a2300001", + "aa44bbee-ca5f-4db7-a412-5fc6a2300002", + "aa44bbee-ca5f-4db7-a412-5fc6a2300003", + "aa44bbee-ca5f-4db7-a412-5fc6a2300004", + "aa44bbee-ca5f-4db7-a412-5fc6a2300005", + } + names := []string{ + "Mount Everest", + "Amsterdam is a cool city", + "Football is a game where people run after ball", + "Berlin is Germany's capital city", + "London is a cool city", + } + + for _, className := range classNames { + createObjectClass(t, &models.Class{ + Class: className, + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + }, + }) + } + + for i, id := range ids { + createObject(t, &models.Object{ + Class: classNames[0], + ID: id, + Properties: map[string]interface{}{ + "name": names[i], + }, + }) + assertGetObjectEventually(t, id) + createObject(t, &models.Object{ + Class: classNames[1], + ID: id, + Properties: map[string]interface{}{ + "name": fmt.Sprintf("altered contents of: %v", names[i]), + }, + }) + assertGetObjectEventually(t, id) + } + + createObject(t, &models.Object{ + Class: classNames[0], + ID: "aa44bbee-ca5f-4db7-a412-5fc6a2300011", + Properties: map[string]interface{}{ + "name": "the same content goes here just for explore tests", + }, + }) + assertGetObjectEventually(t, "aa44bbee-ca5f-4db7-a412-5fc6a2300011") + createObject(t, &models.Object{ + Class: classNames[1], + ID: "aa44bbee-ca5f-4db7-a412-5fc6a2300011", + Properties: map[string]interface{}{ + "name": "the same content goes here just for explore tests", + }, + }) + assertGetObjectEventually(t, "aa44bbee-ca5f-4db7-a412-5fc6a2300011") + + waitForIndexing(t, classNames[0]) + waitForIndexing(t, classNames[1]) +} + +const ( + cursorClassID1 = strfmt.UUID("00000000-0000-0000-0000-000000000001") + cursorClassID2 = strfmt.UUID("00000000-0000-0000-0000-000000000002") + cursorClassID3 = strfmt.UUID("00000000-0000-0000-0000-000000000003") + cursorClassID4 = strfmt.UUID("00000000-0000-0000-0000-000000000004") + cursorClassID5 = strfmt.UUID("00000000-0000-0000-0000-000000000005") + cursorClassID6 = strfmt.UUID("00000000-0000-0000-0000-000000000006") + cursorClassID7 = strfmt.UUID("00000000-0000-0000-0000-000000000007") +) + +func addTestDataCursorSearch(t *testing.T) { + className := "CursorClass" + ids := []strfmt.UUID{ + cursorClassID1, + cursorClassID2, + cursorClassID3, + cursorClassID4, + cursorClassID5, + cursorClassID6, + cursorClassID7, + } + names := []string{ + "Mount Everest", + "Amsterdam is a cool city", + "Football is a game where people run after ball", + "Berlin is Germany's capital city", + "London is a cool city", + "Wroclaw is a really cool city", + "Brisbane is a city in Australia", + } + + createObjectClass(t, &models.Class{ + Class: className, + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + }, + }) + + for i, id := range ids { + createObject(t, &models.Object{ + Class: className, + ID: id, + Properties: map[string]interface{}{ + "name": names[i], + }, + }) + assertGetObjectEventually(t, id) + } + + waitForIndexing(t, className) +} + +func addDateFieldClass(t *testing.T) { + timestamps := []string{ + "2022-06-16T22:18:59.640162Z", + "2022-06-16T22:19:01.495967Z", + "2022-06-16T22:19:03.495596Z", + "2022-06-16T22:19:04.3828349Z", + "2022-06-16T22:19:05.894857Z", + "2022-06-16T22:19:06.394958Z", + "2022-06-16T22:19:07.589828Z", + "2022-06-16T22:19:08.112395Z", + "2022-06-16T22:19:10.339493Z", + "2022-06-16T22:19:11.837473Z", + } + + for i := 0; i < len(timestamps); i++ { + createObject(t, &models.Object{ + Class: "HasDateField", + Properties: map[string]interface{}{ + "unique": fmt.Sprintf("#%d", i+1), + "timestamp": timestamps[i], + "identical": "hello!", + }, + }) + } +} + +func waitForIndexing(t *testing.T, className string) { + assert.EventuallyWithT(t, func(ct *assert.CollectT) { + verbose := verbosity.OutputVerbose + params := nodes.NewNodesGetClassParams().WithOutput(&verbose).WithClassName(className) + body, clientErr := helper.Client(t).Nodes.NodesGetClass(params, nil) + resp, err := body.Payload, clientErr + require.NoError(ct, err) + require.NotEmpty(ct, resp.Nodes) + for _, n := range resp.Nodes { + require.NotEmpty(ct, n.Shards) + for _, s := range n.Shards { + assert.Equal(ct, "READY", s.VectorIndexingStatus) + } + } + }, 15*time.Second, 500*time.Millisecond) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/unindexed_property_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/unindexed_property_test.go new file mode 100644 index 0000000000000000000000000000000000000000..92b80ac492c5d5bf4a5633b372bd323d3e2d8260 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/graphql_resolvers/unindexed_property_test.go @@ -0,0 +1,154 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "testing" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/objects" + clschema "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" +) + +func Test_UnindexedProperty(t *testing.T) { + className := "NoIndexTestClass" + + defer func() { + delParams := clschema.NewSchemaObjectsDeleteParams().WithClassName(className) + delResp, err := helper.Client(t).Schema.SchemaObjectsDelete(delParams, nil) + helper.AssertRequestOk(t, delResp, err, nil) + }() + + t.Run("creating a class with two string props", func(t *testing.T) { + vFalse := false + vTrue := true + + c := &models.Class{ + Class: className, + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + IndexFilterable: &vTrue, + IndexSearchable: &vTrue, + }, + { + Name: "hiddenName", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + }, + }, + } + + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(c) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + }) + + t.Run("creating an object", func(t *testing.T) { + params := objects.NewObjectsCreateParams().WithBody( + &models.Object{ + Class: className, + ID: "f5ffb60f-4c13-4d07-a395-829b2396c7b9", + Properties: map[string]interface{}{ + "name": "elephant", + "hiddenName": "zebra", + }, + }) + resp, err := helper.Client(t).Objects.ObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + }) + + assertGetObjectEventually(t, "f5ffb60f-4c13-4d07-a395-829b2396c7b9") + + t.Run("searching for the indexed prop", func(t *testing.T) { + query := ` + { + Get { + NoIndexTestClass(where:{ + operator: Equal, + valueText: "elephant" + path:["name"] + }){ + name + hiddenName + } + } + } + ` + + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + objects := result.Get("Get", className).AsSlice() + + expected := []interface{}{ + map[string]interface{}{"name": "elephant", "hiddenName": "zebra"}, + } + + assert.ElementsMatch(t, expected, objects) + }) + + t.Run("searching for the non-indexed prop", func(t *testing.T) { + query := ` + { + Get { + NoIndexTestClass(where:{ + operator: Equal, + valueText: "zebra" + path:["hiddenName"] + }){ + name + hiddenName + } + } + } + ` + res, err := graphqlhelper.QueryGraphQL(t, helper.RootAuth, "", query, nil) + require.Nil(t, err) + assert.True(t, len(res.Errors) > 0, "this query should be impossible as the field was not indexed") + }) +} + +func assertGetObjectEventually(t *testing.T, uuid strfmt.UUID) *models.Object { + var ( + resp *objects.ObjectsGetOK + err error + ) + + checkThunk := func() interface{} { + resp, err = helper.Client(t).Objects.ObjectsGet(objects.NewObjectsGetParams().WithID(uuid), nil) + return err == nil + } + + helper.AssertEventuallyEqual(t, true, checkThunk) + + var object *models.Object + + helper.AssertRequestOk(t, resp, err, func() { + object = resp.Payload + }) + + return object +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/batch_references_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/batch_references_test.go new file mode 100644 index 0000000000000000000000000000000000000000..5143e60d3efa3d8428bb18d78895978c4c80d64b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/batch_references_test.go @@ -0,0 +1,83 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + pb "github.com/weaviate/weaviate/grpc/generated/protocol/v1" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/articles" +) + +const ( + UUID0 = "00000000-0000-0000-0000-000000000001" + UUID1 = "00000000-0000-0000-0000-000000000002" + UUID2 = "00000000-0000-0000-0000-000000000003" +) + +func TestGrpcBatchReferences(t *testing.T) { + ctx := context.Background() + + helper.SetupClient("localhost:8080") + grpcClient, _ := newClient(t) + + clsA := articles.ArticlesClass() + clsP := articles.ParagraphsClass() + + helper.DeleteClass(t, clsP.Class) + helper.CreateClass(t, clsP) + defer helper.DeleteClass(t, clsP.Class) + + helper.DeleteClass(t, clsA.Class) + helper.CreateClass(t, clsA) + defer helper.DeleteClass(t, clsA.Class) + + // Add two paragraphs + paragraphs := []*models.Object{ + articles.NewParagraph().WithContents("Paragraph 1").WithID(UUID1).Object(), + articles.NewParagraph().WithContents("Paragraph 2").WithID(UUID2).Object(), + } + helper.CreateObjectsBatch(t, paragraphs) + + // Add an article + article := articles.NewArticle().WithTitle("Article 1").WithID(UUID0).Object() + helper.CreateObject(t, article) + + // Batch add references + res, err := grpcClient.BatchReferences(ctx, &pb.BatchReferencesRequest{ + References: []*pb.BatchReference{ + { + Name: "hasParagraphs", + FromCollection: clsA.Class, + ToCollection: &clsP.Class, + FromUuid: UUID0, + ToUuid: UUID1, + }, // Test with ToCollection + { + Name: "hasParagraphs", + FromCollection: clsA.Class, + FromUuid: UUID0, + ToUuid: UUID2, + }, // Test without ToCollection + }, + }) + require.NoError(t, err, "BatchReferences should not return an error") + require.Len(t, res.Errors, 0, "Expected no errors in batch references response") + + obj, err := helper.GetObject(t, clsA.Class, UUID0) + require.NoError(t, err, "GetObject should not return an error") + require.Len(t, obj.Properties.(map[string]any)["hasParagraphs"], 2, "Expected two references in hasParagraphs") +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/batching_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/batching_test.go new file mode 100644 index 0000000000000000000000000000000000000000..b06cda02a9b44b78b3e6d20f5626162101a0de34 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/batching_test.go @@ -0,0 +1,189 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" + pb "github.com/weaviate/weaviate/grpc/generated/protocol/v1" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/articles" +) + +func TestGRPC_Batching(t *testing.T) { + helper.SetupClient("localhost:8080") + ctx := context.Background() + grpcClient, _ := newClient(t) + + clsA := articles.ArticlesClass() + clsP := articles.ParagraphsClass() + + setupClasses := func() func() { + helper.DeleteClass(t, clsA.Class) + helper.DeleteClass(t, clsP.Class) + // Create the schema + helper.CreateClass(t, clsP) + helper.CreateClass(t, clsA) + return func() { + helper.DeleteClass(t, clsA.Class) + helper.DeleteClass(t, clsP.Class) + } + } + + t.Run("send objects and references without errors", func(t *testing.T) { + defer setupClasses()() + + // Open up a stream to read messages from + stream, streamId := startStream(ctx, t, grpcClient) + + // Send some articles and paragraphs in send message + objects := []*pb.BatchObject{ + {Collection: clsA.Class, Uuid: UUID0}, + {Collection: clsP.Class, Uuid: UUID1}, + {Collection: clsP.Class, Uuid: UUID2}, + } + _, err := grpcClient.BatchSend(ctx, &pb.BatchSendRequest{ + StreamId: streamId, + Message: &pb.BatchSendRequest_Objects_{Objects: &pb.BatchSendRequest_Objects{Values: objects}}, + }) + require.NoError(t, err, "BatchSend should not return an error") + + // Send some references between the articles and paragraphs + references := []*pb.BatchReference{ + {Name: "hasParagraphs", FromCollection: clsA.Class, FromUuid: UUID0, ToUuid: UUID1}, + {Name: "hasParagraphs", FromCollection: clsA.Class, FromUuid: UUID0, ToUuid: UUID2}, + } + _, err = grpcClient.BatchSend(ctx, &pb.BatchSendRequest{ + StreamId: streamId, + Message: &pb.BatchSendRequest_References_{References: &pb.BatchSendRequest_References{ + Values: references, + }}, + }) + require.NoError(t, err, "BatchSend References should not return an error") + + // Send stop message + _, err = grpcClient.BatchSend(ctx, &pb.BatchSendRequest{ + StreamId: streamId, + Message: &pb.BatchSendRequest_Stop_{Stop: &pb.BatchSendRequest_Stop{}}, + }) + require.NoError(t, err, "BatchSend Stop should not return an error") + + // Read the stop message + resp, err := stream.Recv() + require.NoError(t, err, "BatchStream should return a response") + end := resp.GetStop() + require.NotNil(t, end, "End message should not be nil") + + // Validate the number of articles created + listA, err := helper.ListObjects(t, clsA.Class) + require.NoError(t, err, "ListObjects should not return an error") + require.Len(t, listA.Objects, 1, "Number of articles created should match the number sent") + require.NotNil(t, listA.Objects[0].Properties.(map[string]any)["hasParagraphs"], "hasParagraphs should not be nil") + require.Len(t, listA.Objects[0].Properties.(map[string]any)["hasParagraphs"], 2, "Article should have 2 paragraphs") + + listP, err := helper.ListObjects(t, clsP.Class) + require.NoError(t, err, "ListObjects should not return an error") + require.Len(t, listP.Objects, 2, "Number of paragraphs created should match the number sent") + }) + + t.Run("send objects that should partially error and read the errors correctly", func(t *testing.T) { + defer setupClasses()() + + // Open up a stream to read messages from + stream, streamId := startStream(ctx, t, grpcClient) + + // Send a list of articles, one with a tenant incorrectly specified + objects := []*pb.BatchObject{ + {Collection: clsA.Class, Uuid: UUID0}, + {Collection: clsA.Class, Tenant: "tenant", Uuid: UUID1}, + {Collection: clsA.Class, Uuid: UUID2}, + } + _, err := grpcClient.BatchSend(ctx, &pb.BatchSendRequest{ + StreamId: streamId, + Message: &pb.BatchSendRequest_Objects_{Objects: &pb.BatchSendRequest_Objects{Values: objects}}, + }) + require.NoError(t, err, "BatchSend should not return an error") + + errMsg, err := stream.Recv() + require.NoError(t, err, "BatchStream should return a response") + require.NotNil(t, errMsg, "Error message should not be nil") + require.Equal(t, errMsg.GetError().Error, "class Article has multi-tenancy disabled, but request was with tenant") + require.Equal(t, errMsg.GetError().Index, int32(1), "Error index should be 1") + require.True(t, errMsg.GetError().IsObject, "IsObject should be true for object errors") + require.False(t, errMsg.GetError().IsReference, "IsReference should be false for object errors") + require.False(t, errMsg.GetError().IsRetriable, "IsRetriable should be false for this error") + + list, err := helper.ListObjects(t, clsA.Class) + require.NoError(t, err, "ListObjects should not return an error") + require.Len(t, list.Objects, 2, "There should be two articles") + }) + + t.Run("send references that should error and read the errors correctly", func(t *testing.T) { + defer setupClasses()() + + // Open up a stream to read messages from + stream, streamId := startStream(ctx, t, grpcClient) + + // Send some articles and paragraphs in send message + objects := []*pb.BatchObject{ + {Collection: clsA.Class, Uuid: UUID0}, + {Collection: clsP.Class, Uuid: UUID1}, + } + _, err := grpcClient.BatchSend(ctx, &pb.BatchSendRequest{ + StreamId: streamId, + Message: &pb.BatchSendRequest_Objects_{Objects: &pb.BatchSendRequest_Objects{Values: objects}}, + }) + require.NoError(t, err, "BatchSend should not return an error") + + // Send a list of references, one pointing to a non-existent object + references := []*pb.BatchReference{ + {Name: "hasParagraphs", FromCollection: clsA.Class, FromUuid: UUID0, ToUuid: UUID1}, + {Name: "hasParagraphss", FromCollection: clsA.Class, FromUuid: UUID0, ToUuid: UUID2}, + } + _, err = grpcClient.BatchSend(ctx, &pb.BatchSendRequest{ + StreamId: streamId, + Message: &pb.BatchSendRequest_References_{References: &pb.BatchSendRequest_References{ + Values: references, + }}, + }) + require.NoError(t, err, "BatchSend References should not return an error") + + errMsg, err := stream.Recv() + require.NoError(t, err, "BatchStream should return a response") + require.NotNil(t, errMsg, "Error message should not be nil") + require.Equal(t, errMsg.GetError().Error, "property hasParagraphss does not exist for class Article") + require.Equal(t, errMsg.GetError().Index, int32(1), "Error index should be 1") + require.True(t, errMsg.GetError().IsReference, "IsReference should be true for reference errors") + require.False(t, errMsg.GetError().IsObject, "IsObject should be false for reference errors") + require.False(t, errMsg.GetError().IsRetriable, "IsRetriable should be false for this error") + + obj, err := helper.GetObject(t, clsA.Class, UUID0) + require.NoError(t, err, "ListObjects should not return an error") + require.Equal(t, 1, len(obj.Properties.(map[string]any)["hasParagraphs"].([]any)), "Article should have 1 paragraph") + }) +} + +func startStream(ctx context.Context, t *testing.T, grpcClient pb.WeaviateClient) (pb.Weaviate_BatchStreamClient, string) { + stream, err := grpcClient.BatchStream(ctx, &pb.BatchStreamRequest{}) + require.NoError(t, err, "BatchStream should not return an error") + + // Read first message, which starts the batching process + resp, err := stream.Recv() + require.NoError(t, err, "BatchStream should return a response") + start := resp.GetStart() + require.NotNil(t, start, "Start message should not be nil") + streamId := resp.GetStreamId() + + return stream, streamId +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/filtered_search_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/filtered_search_test.go new file mode 100644 index 0000000000000000000000000000000000000000..caf3fcec59343d130234f9c959225652722e0ae4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/filtered_search_test.go @@ -0,0 +1,233 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "context" + "fmt" + "math" + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + pb "github.com/weaviate/weaviate/grpc/generated/protocol/v1" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/usecases/config" + "google.golang.org/protobuf/types/known/structpb" +) + +const ( + alpha = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + exponentiation = 2 + collectionName = "Alphabetic" + propName = "contents" +) + +func TestGRPC_FilteredSearch(t *testing.T) { + grpcClient, _ := newClient(t) + helper.DeleteClass(t, collectionName) + helper.CreateClass(t, &models.Class{ + Class: collectionName, + Properties: []*models.Property{ + { + Name: propName, + DataType: []string{"text"}, + }, + }, + InvertedIndexConfig: &models.InvertedIndexConfig{ + Stopwords: &models.StopwordConfig{ + Preset: "none", + }, + UsingBlockMaxWAND: config.DefaultUsingBlockMaxWAND, + }, + }) + defer helper.DeleteClass(t, collectionName) + + var objects []*pb.BatchObject + for i := 0; i < len(alpha); i++ { + for j := 0; j < len(alpha); j++ { + objects = append(objects, &pb.BatchObject{ + Uuid: uuid.NewString(), + Properties: &pb.BatchObject_Properties{ + NonRefProperties: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "contents": structpb.NewStringValue( + fmt.Sprintf( + "%s%s%s %s", + string(alpha[i]), string(alpha[i]), + string(alpha[i]), string(alpha[j]), + ), + ), + }, + }, + }, + Collection: collectionName, + }) + } + } + + batchResp, err := grpcClient.BatchObjects(context.Background(), &pb.BatchObjectsRequest{ + Objects: objects, + }) + require.Nil(t, err) + require.Nil(t, batchResp.Errors) + + t.Run("NotEqual", func(t *testing.T) { + t.Run("without other filters", func(t *testing.T) { + t.Parallel() + expectedLen := int(math.Pow(float64(len(alpha)), exponentiation)) - len(alpha)*exponentiation + 1 + for i := 0; i < len(alpha); i++ { + tok1 := string(alpha[i]) + in := pb.SearchRequest{ + Collection: collectionName, + Properties: &pb.PropertiesRequest{NonRefProperties: []string{propName}}, + Limit: uint32(math.Pow(float64(len(alpha)), exponentiation)), + Filters: &pb.Filters{ + Operator: pb.Filters_OPERATOR_NOT_EQUAL, + TestValue: &pb.Filters_ValueText{ + ValueText: tok1, + }, + Target: &pb.FilterTarget{ + Target: &pb.FilterTarget_Property{Property: propName}, + }, + }, + Uses_123Api: true, + Uses_125Api: true, + } + t.Run(fmt.Sprintf("with singular token %q", tok1), func(t *testing.T) { + t.Parallel() + searchResp, err := grpcClient.Search(context.Background(), &in) + require.Nil(t, err) + require.Len(t, searchResp.Results, expectedLen) + for _, res := range searchResp.Results { + prop := res.Properties.NonRefProps.Fields[propName].GetTextValue() + assert.NotContains(t, prop, tok1) + } + }) + + tok2 := fmt.Sprintf("%s%s%s", string(alpha[i]), string(alpha[i]), string(alpha[i])) + in.Filters.TestValue = &pb.Filters_ValueText{ + ValueText: tok2, + } + t.Run(fmt.Sprintf("with repeated token %q", tok2), func(t *testing.T) { + t.Parallel() + searchResp, err := grpcClient.Search(context.Background(), &in) + require.Nil(t, err) + require.Len(t, searchResp.Results, expectedLen) + for _, res := range searchResp.Results { + prop := res.GetProperties().NonRefProps.Fields[propName].GetTextValue() + assert.NotContains(t, prop, tok2) + } + }) + + tok3 := fmt.Sprintf("%s %s", tok2, tok1) + in.Filters.TestValue = &pb.Filters_ValueText{ + ValueText: tok3, + } + t.Run(fmt.Sprintf("with combined tokens %q", tok3), func(t *testing.T) { + t.Parallel() + searchResp, err := grpcClient.Search(context.Background(), &in) + require.Nil(t, err) + require.Len(t, searchResp.Results, expectedLen) + for _, res := range searchResp.Results { + prop := res.GetProperties().NonRefProps.Fields[propName].GetTextValue() + assert.NotContains(t, prop, tok3) + } + }) + } + }) + + t.Run("with limit and sort ascending", func(t *testing.T) { + t.Parallel() + expectedLen := uint32(10) + for i := 0; i < len(alpha); i++ { + tok1 := fmt.Sprintf("%s%s%s", string(alpha[i]), string(alpha[i]), string(alpha[i])) + in := pb.SearchRequest{ + Collection: collectionName, + Properties: &pb.PropertiesRequest{NonRefProperties: []string{propName}}, + Limit: expectedLen, + SortBy: []*pb.SortBy{ + { + Ascending: true, + Path: []string{propName}, + }, + }, + Filters: &pb.Filters{ + Operator: pb.Filters_OPERATOR_NOT_EQUAL, + TestValue: &pb.Filters_ValueText{ + ValueText: tok1, + }, + Target: &pb.FilterTarget{ + Target: &pb.FilterTarget_Property{Property: propName}, + }, + }, + Uses_123Api: true, + Uses_125Api: true, + } + + searchResp, err := grpcClient.Search(context.Background(), &in) + require.Nil(t, err) + require.Len(t, searchResp.Results, int(expectedLen)) + lastResult := "" + for _, res := range searchResp.Results { + prop := res.Properties.NonRefProps.Fields[propName].GetTextValue() + assert.NotContains(t, prop, tok1) + assert.Greater(t, prop, lastResult) + lastResult = prop + } + } + }) + + t.Run("with limit and sort descending", func(t *testing.T) { + t.Parallel() + expectedLen := uint32(10) + for i := 0; i < len(alpha); i++ { + tok1 := fmt.Sprintf("%s%s%s", string(alpha[i]), string(alpha[i]), string(alpha[i])) + in := pb.SearchRequest{ + Collection: collectionName, + Properties: &pb.PropertiesRequest{NonRefProperties: []string{propName}}, + Limit: expectedLen, + SortBy: []*pb.SortBy{ + { + Ascending: false, + Path: []string{propName}, + }, + }, + Filters: &pb.Filters{ + Operator: pb.Filters_OPERATOR_NOT_EQUAL, + TestValue: &pb.Filters_ValueText{ + ValueText: tok1, + }, + Target: &pb.FilterTarget{ + Target: &pb.FilterTarget_Property{Property: propName}, + }, + }, + Uses_123Api: true, + Uses_125Api: true, + } + + searchResp, err := grpcClient.Search(context.Background(), &in) + require.Nil(t, err) + require.Len(t, searchResp.Results, int(expectedLen)) + lastResult := "[[[ [" // '[' is > 'Z' in the ascii table + for _, res := range searchResp.Results { + prop := res.Properties.NonRefProps.Fields[propName].GetTextValue() + assert.NotContains(t, prop, tok1) + assert.Less(t, prop, lastResult) + lastResult = prop + } + } + }) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/grpc_aggregate_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/grpc_aggregate_test.go new file mode 100644 index 0000000000000000000000000000000000000000..16f33611ebaf8521e64ec044b885a9572c86b0f9 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/grpc_aggregate_test.go @@ -0,0 +1,619 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + pb "github.com/weaviate/weaviate/grpc/generated/protocol/v1" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/cities" +) + +func TestGRPC_Aggregate(t *testing.T) { + ctx := context.Background() + + host := "localhost:8080" + helper.SetupClient(host) + + grpcClient, _ := newClient(t) + require.NotNil(t, grpcClient) + + cities.CreateCountryCityAirportSchema(t, host) + cities.InsertCountryCityAirportObjects(t, host) + defer cities.DeleteCountryCityAirportSchema(t, host) + + ptFloat64 := func(in float64) *float64 { + return &in + } + t.Run("meta count", func(t *testing.T) { + tests := []struct { + collection string + count int64 + }{ + {collection: cities.Country, count: 2}, + {collection: cities.City, count: 6}, + {collection: cities.Airport, count: 4}, + } + for _, tt := range tests { + t.Run(tt.collection, func(t *testing.T) { + resp, err := grpcClient.Aggregate(ctx, &pb.AggregateRequest{ + Collection: tt.collection, + ObjectsCount: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.GetSingleResult()) + require.Equal(t, tt.count, resp.GetSingleResult().GetObjectsCount()) + }) + } + }) + t.Run("aggregations", func(t *testing.T) { + t.Run("numerical", func(t *testing.T) { + resp, err := grpcClient.Aggregate(ctx, &pb.AggregateRequest{ + Collection: cities.City, + Aggregations: []*pb.AggregateRequest_Aggregation{ + { + Property: "population", + Aggregation: &pb.AggregateRequest_Aggregation_Int{ + Int: &pb.AggregateRequest_Aggregation_Integer{ + Count: true, + Type: true, + Mean: true, + Maximum: true, + Minimum: true, + Sum: true, + }, + }, + }, + }, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.GetSingleResult()) + require.NotNil(t, resp.GetSingleResult().Aggregations) + require.Len(t, resp.GetSingleResult().Aggregations.GetAggregations(), 1) + for _, aggregation := range resp.GetSingleResult().Aggregations.GetAggregations() { + assert.Equal(t, "population", aggregation.Property) + numerical := aggregation.GetInt() + require.NotNil(t, numerical) + assert.Equal(t, int64(5), numerical.GetCount()) + assert.Equal(t, "int", numerical.GetType()) + assert.Equal(t, float64(1294000), numerical.GetMean()) + assert.Equal(t, int64(3470000), numerical.GetMaximum()) + assert.Equal(t, int64(0), numerical.GetMinimum()) + assert.Equal(t, int64(6470000), numerical.GetSum()) + } + }) + t.Run("text", func(t *testing.T) { + resp, err := grpcClient.Aggregate(ctx, &pb.AggregateRequest{ + Collection: cities.City, + Aggregations: []*pb.AggregateRequest_Aggregation{ + { + Property: "name", + Aggregation: &pb.AggregateRequest_Aggregation_Text_{ + Text: &pb.AggregateRequest_Aggregation_Text{ + Type: true, + TopOccurences: true, + }, + }, + }, + }, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.GetSingleResult()) + require.NotNil(t, resp.GetSingleResult().Aggregations) + require.Len(t, resp.GetSingleResult().Aggregations.GetAggregations(), 1) + for _, aggregation := range resp.GetSingleResult().Aggregations.GetAggregations() { + assert.Equal(t, "name", aggregation.Property) + textAggregation := aggregation.GetText() + topOccurrencesResults := map[string]int64{} + require.NotNil(t, textAggregation) + topOccurrences := textAggregation.GetTopOccurences() + for _, item := range topOccurrences.GetItems() { + topOccurrencesResults[item.Value] = item.GetOccurs() + } + assert.Equal(t, int64(1), topOccurrencesResults["Amsterdam"]) + assert.Equal(t, int64(1), topOccurrencesResults["Berlin"]) + assert.Equal(t, int64(1), topOccurrencesResults["Dusseldorf"]) + assert.Equal(t, int64(1), topOccurrencesResults["Missing Island"]) + assert.Equal(t, int64(1), topOccurrencesResults["Rotterdam"]) + assert.Equal(t, "text", textAggregation.GetType()) + } + }) + t.Run("boolean", func(t *testing.T) { + resp, err := grpcClient.Aggregate(ctx, &pb.AggregateRequest{ + Collection: cities.City, + Aggregations: []*pb.AggregateRequest_Aggregation{ + { + Property: "isCapital", + Aggregation: &pb.AggregateRequest_Aggregation_Boolean_{ + Boolean: &pb.AggregateRequest_Aggregation_Boolean{ + Count: true, + Type: true, + TotalTrue: true, + TotalFalse: true, + PercentageTrue: true, + PercentageFalse: true, + }, + }, + }, + }, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.GetSingleResult()) + require.NotNil(t, resp.GetSingleResult().Aggregations) + require.Len(t, resp.GetSingleResult().Aggregations.GetAggregations(), 1) + for _, aggregation := range resp.GetSingleResult().Aggregations.GetAggregations() { + assert.Equal(t, "isCapital", aggregation.Property) + booleanAggregation := aggregation.GetBoolean() + assert.Equal(t, int64(5), booleanAggregation.GetCount()) + assert.Equal(t, "boolean", booleanAggregation.GetType()) + assert.Equal(t, int64(2), booleanAggregation.GetTotalTrue()) + assert.Equal(t, int64(3), booleanAggregation.GetTotalFalse()) + assert.Equal(t, float64(0.4), booleanAggregation.GetPercentageTrue()) + assert.Equal(t, float64(0.6), booleanAggregation.GetPercentageFalse()) + } + }) + t.Run("date", func(t *testing.T) { + resp, err := grpcClient.Aggregate(ctx, &pb.AggregateRequest{ + Collection: cities.City, + Aggregations: []*pb.AggregateRequest_Aggregation{ + { + Property: "cityRights", + Aggregation: &pb.AggregateRequest_Aggregation_Date_{ + Date: &pb.AggregateRequest_Aggregation_Date{ + Count: true, + Type: true, + Maximum: true, + Median: true, + Minimum: true, + Mode: true, + }, + }, + }, + }, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.GetSingleResult()) + require.NotNil(t, resp.GetSingleResult().Aggregations) + require.Len(t, resp.GetSingleResult().Aggregations.GetAggregations(), 1) + for _, aggregation := range resp.GetSingleResult().Aggregations.GetAggregations() { + assert.Equal(t, "cityRights", aggregation.Property) + dateProps := aggregation.GetDate() + assert.Equal(t, "1984-07-21T21:34:33.709551616Z", dateProps.GetMaximum()) + assert.Equal(t, "1926-01-21T09:34:33.709551616Z", dateProps.GetMedian()) + assert.Equal(t, "1719-07-21T21:34:33.709551616Z", dateProps.GetMinimum()) + assert.Equal(t, "1984-07-21T21:34:33.709551616Z", dateProps.GetMode()) + assert.Equal(t, int64(4), dateProps.GetCount()) + assert.Equal(t, "date", dateProps.GetType()) + } + }) + t.Run("reference", func(t *testing.T) { + resp, err := grpcClient.Aggregate(ctx, &pb.AggregateRequest{ + Collection: cities.City, + Aggregations: []*pb.AggregateRequest_Aggregation{ + { + Property: "inCountry", + Aggregation: &pb.AggregateRequest_Aggregation_Reference_{ + Reference: &pb.AggregateRequest_Aggregation_Reference{ + Type: true, + PointingTo: true, + }, + }, + }, + }, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.Result) + require.NotNil(t, resp.GetSingleResult().Aggregations) + require.Len(t, resp.GetSingleResult().Aggregations.GetAggregations(), 1) + for _, aggregation := range resp.GetSingleResult().Aggregations.GetAggregations() { + assert.Equal(t, "inCountry", aggregation.Property) + referenceAggregation := aggregation.GetReference() + assert.ElementsMatch(t, referenceAggregation.PointingTo, []string{"Country"}) + assert.Equal(t, "cref", referenceAggregation.GetType()) + } + }) + }) + t.Run("filters", func(t *testing.T) { + t.Run("reference", func(t *testing.T) { + resp, err := grpcClient.Aggregate(ctx, &pb.AggregateRequest{ + Collection: cities.City, + Filters: &pb.Filters{ + Operator: pb.Filters_OPERATOR_EQUAL, + TestValue: &pb.Filters_ValueBoolean{ValueBoolean: true}, + On: []string{"isCapital"}, + }, + Aggregations: []*pb.AggregateRequest_Aggregation{ + { + Property: "inCountry", + Aggregation: &pb.AggregateRequest_Aggregation_Reference_{ + Reference: &pb.AggregateRequest_Aggregation_Reference{ + Type: true, + PointingTo: true, + }, + }, + }, + }, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.GetSingleResult()) + require.NotNil(t, resp.GetSingleResult().Aggregations) + require.Len(t, resp.GetSingleResult().Aggregations.GetAggregations(), 1) + for _, aggregation := range resp.GetSingleResult().Aggregations.GetAggregations() { + assert.Equal(t, "inCountry", aggregation.Property) + referenceAggregation := aggregation.GetReference() + assert.ElementsMatch(t, referenceAggregation.GetPointingTo(), []string{"Country"}) + assert.Equal(t, "cref", referenceAggregation.GetType()) + } + }) + t.Run("is not capital city", func(t *testing.T) { + resp, err := grpcClient.Aggregate(ctx, &pb.AggregateRequest{ + Collection: cities.City, + Filters: &pb.Filters{ + Operator: pb.Filters_OPERATOR_EQUAL, + TestValue: &pb.Filters_ValueBoolean{ValueBoolean: false}, + On: []string{"isCapital"}, + }, + ObjectsCount: true, + Aggregations: []*pb.AggregateRequest_Aggregation{ + { + Property: "inCountry", + Aggregation: &pb.AggregateRequest_Aggregation_Reference_{ + Reference: &pb.AggregateRequest_Aggregation_Reference{ + Type: true, + PointingTo: true, + }, + }, + }, + { + Property: "name", + Aggregation: &pb.AggregateRequest_Aggregation_Text_{ + Text: &pb.AggregateRequest_Aggregation_Text{ + Count: true, + Type: true, + TopOccurences: true, + }, + }, + }, + { + Property: "population", + Aggregation: &pb.AggregateRequest_Aggregation_Int{ + Int: &pb.AggregateRequest_Aggregation_Integer{ + Mean: true, + Count: true, + Maximum: true, + Minimum: true, + Sum: true, + Type: true, + Mode: true, + }, + }, + }, + }, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.GetSingleResult()) + require.NotNil(t, resp.GetSingleResult().Aggregations) + require.Len(t, resp.GetSingleResult().Aggregations.GetAggregations(), 3) + for _, aggregation := range resp.GetSingleResult().Aggregations.GetAggregations() { + switch aggregation.Property { + case "inCountry": + assert.Equal(t, "inCountry", aggregation.Property) + referenceAggregation := aggregation.GetReference() + assert.ElementsMatch(t, referenceAggregation.GetPointingTo(), []string{"Country"}) + assert.Equal(t, "cref", referenceAggregation.GetType()) + case "name": + assert.Equal(t, "name", aggregation.Property) + textAggregation := aggregation.GetText() + topOccurrencesResults := map[string]int64{} + require.NotNil(t, textAggregation) + assert.Equal(t, "text", textAggregation.GetType()) + topOccurrences := textAggregation.GetTopOccurences() + require.NotNil(t, topOccurrences) + for _, item := range topOccurrences.GetItems() { + topOccurrencesResults[item.Value] = item.Occurs + } + assert.Equal(t, int64(1), topOccurrencesResults["Dusseldorf"]) + assert.Equal(t, int64(1), topOccurrencesResults["Missing Island"]) + assert.Equal(t, int64(1), topOccurrencesResults["Rotterdam"]) + case "population": + assert.Equal(t, "population", aggregation.Property) + numerical := aggregation.GetInt() + assert.Equal(t, int64(3), numerical.GetCount()) + assert.Equal(t, "int", numerical.GetType()) + assert.Equal(t, int64(600000), numerical.GetMaximum()) + assert.Equal(t, float64(400000), numerical.GetMean()) + assert.Equal(t, int64(0), numerical.GetMinimum()) + assert.Equal(t, int64(600000), numerical.GetMode()) + assert.Equal(t, int64(1200000), numerical.GetSum()) + case "isCapital": + assert.Equal(t, "isCapital", aggregation.Property) + booleanAggregation := aggregation.GetBoolean() + assert.Equal(t, int64(1), booleanAggregation.GetCount()) + assert.Equal(t, "boolean", booleanAggregation.GetType()) + assert.Equal(t, int64(1), booleanAggregation.GetTotalTrue()) + assert.Equal(t, int64(0), booleanAggregation.GetTotalFalse()) + assert.Equal(t, float64(1), booleanAggregation.GetPercentageTrue()) + assert.Equal(t, float64(0), booleanAggregation.GetPercentageFalse()) + } + } + }) + }) + t.Run("groupBy", func(t *testing.T) { + t.Run("cityRights", func(t *testing.T) { + resp, err := grpcClient.Aggregate(ctx, &pb.AggregateRequest{ + Collection: cities.City, + GroupBy: &pb.AggregateRequest_GroupBy{ + Collection: cities.City, + Property: "cityRights", + }, + Aggregations: []*pb.AggregateRequest_Aggregation{ + { + Property: "cityRights", + Aggregation: &pb.AggregateRequest_Aggregation_Date_{ + Date: &pb.AggregateRequest_Aggregation_Date{ + Count: true, + Median: true, + }, + }, + }, + { + Property: "timezones", + Aggregation: &pb.AggregateRequest_Aggregation_Text_{ + Text: &pb.AggregateRequest_Aggregation_Text{ + Count: true, + Type: true, + TopOccurences: true, + }, + }, + }, + { + Property: "name", + Aggregation: &pb.AggregateRequest_Aggregation_Text_{ + Text: &pb.AggregateRequest_Aggregation_Text{ + Count: true, + }, + }, + }, + }, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.GetGroupedResults()) + require.Len(t, resp.GetGroupedResults().GetGroups(), 3) + + checkProperties := func(t *testing.T, + aggregations []*pb.AggregateReply_Aggregations_Aggregation, + cityRightsCount int64, + cityRightsMedian string, + nameCount int64, + timezonesCount int64, + timezonesTopOccurrences map[string]int64, + ) { + for _, aggregation := range aggregations { + switch aggregation.Property { + case "cityRights": + assert.Equal(t, "cityRights", aggregation.Property) + dateProps := aggregation.GetDate() + assert.Equal(t, cityRightsCount, dateProps.GetCount()) + assert.Equal(t, cityRightsMedian, dateProps.GetMedian()) + case "name": + assert.Equal(t, "name", aggregation.Property) + textAggregation := aggregation.GetText() + assert.Equal(t, nameCount, textAggregation.GetCount()) + case "timezones": + assert.Equal(t, "timezones", aggregation.Property) + textAggregation := aggregation.GetText() + assert.Equal(t, "text[]", textAggregation.GetType()) + topOccurrencesResult := map[string]int64{} + require.NotNil(t, textAggregation) + topOccurrences := textAggregation.GetTopOccurences() + require.NotNil(t, topOccurrences) + for _, item := range topOccurrences.GetItems() { + topOccurrencesResult[item.Value] = item.GetOccurs() + } + assert.Equal(t, timezonesCount, textAggregation.GetCount()) + for expectedValue, expectedOccurs := range timezonesTopOccurrences { + assert.Equal(t, expectedOccurs, topOccurrencesResult[expectedValue]) + } + } + } + } + for _, group := range resp.GetGroupedResults().GetGroups() { + assert.ElementsMatch(t, []string{"cityRights"}, group.GroupedBy.Path) + require.NotNil(t, group.Aggregations) + require.Len(t, group.Aggregations.GetAggregations(), 3) + switch group.GroupedBy.GetText() { + case "1400-01-01T00:00:00+02:00": + assert.Equal(t, "1400-01-01T00:00:00+02:00", group.GroupedBy.GetText()) + checkProperties(t, group.Aggregations.GetAggregations(), + 2, "1400-01-01T00:00:00+02:00", 2, 4, map[string]int64{"CEST": 2, "CET": 2}) + case "1135-01-01T00:00:00+02:00": + assert.Equal(t, "1135-01-01T00:00:00+02:00", group.GroupedBy.GetText()) + checkProperties(t, group.Aggregations.GetAggregations(), + 1, "1135-01-01T00:00:00+02:00", 1, 2, map[string]int64{"CEST": 1, "CET": 1}) + case "1283-01-01T00:00:00+02:00": + assert.Equal(t, "1283-01-01T00:00:00+02:00", group.GroupedBy.GetText()) + checkProperties(t, group.Aggregations.GetAggregations(), + 1, "1283-01-01T00:00:00+02:00", 1, 2, map[string]int64{"CEST": 1, "CET": 1}) + default: + // do nothing + } + } + }) + }) + t.Run("search", func(t *testing.T) { + amsterdam, err := helper.GetObject(t, cities.City, cities.Amsterdam, "vector") + require.NoError(t, err) + require.NotNil(t, amsterdam) + require.NotEmpty(t, amsterdam.Vector, 1) + tests := []struct { + name string + isNearText bool + nearText *pb.AggregateRequest_NearText + isNearObject bool + nearObject *pb.AggregateRequest_NearObject + isNearVector bool + nearVector *pb.AggregateRequest_NearVector + }{ + { + name: "nearText", + isNearText: true, + nearText: &pb.AggregateRequest_NearText{ + NearText: &pb.NearTextSearch{ + Query: []string{"Amsterdam"}, + Distance: ptFloat64(0.2), + }, + }, + }, + { + name: "nearObject", + isNearObject: true, + nearObject: &pb.AggregateRequest_NearObject{ + NearObject: &pb.NearObject{ + Id: cities.Amsterdam.String(), + Distance: ptFloat64(0.2), + }, + }, + }, + { + name: "nearVector", + isNearVector: true, + nearVector: &pb.AggregateRequest_NearVector{ + NearVector: &pb.NearVector{ + Vector: amsterdam.Vector, + Distance: ptFloat64(0.2), + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + aggregateRequest := &pb.AggregateRequest{ + Collection: cities.City, + Filters: &pb.Filters{ + Operator: pb.Filters_OPERATOR_EQUAL, + TestValue: &pb.Filters_ValueBoolean{ValueBoolean: true}, + On: []string{"isCapital"}, + }, + ObjectsCount: true, + Aggregations: []*pb.AggregateRequest_Aggregation{ + { + Property: "isCapital", + Aggregation: &pb.AggregateRequest_Aggregation_Boolean_{ + Boolean: &pb.AggregateRequest_Aggregation_Boolean{ + Count: true, + Type: true, + TotalTrue: true, + TotalFalse: true, + PercentageTrue: true, + PercentageFalse: true, + }, + }, + }, + { + Property: "population", + Aggregation: &pb.AggregateRequest_Aggregation_Int{ + Int: &pb.AggregateRequest_Aggregation_Integer{ + Count: true, + Type: true, + Mean: true, + Maximum: true, + Minimum: true, + Sum: true, + Mode: true, + }, + }, + }, + { + Property: "inCountry", + Aggregation: &pb.AggregateRequest_Aggregation_Reference_{ + Reference: &pb.AggregateRequest_Aggregation_Reference{ + Type: true, + PointingTo: true, + }, + }, + }, + { + Property: "name", + Aggregation: &pb.AggregateRequest_Aggregation_Text_{ + Text: &pb.AggregateRequest_Aggregation_Text{ + Count: true, + Type: true, + TopOccurences: true, + }, + }, + }, + }, + } + if tt.isNearText { + aggregateRequest.Search = tt.nearText + } + if tt.isNearObject { + aggregateRequest.Search = tt.nearObject + } + if tt.isNearVector { + aggregateRequest.Search = tt.nearVector + } + resp, err := grpcClient.Aggregate(ctx, aggregateRequest) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.GetSingleResult()) + require.NotNil(t, resp.GetSingleResult().Aggregations) + require.Len(t, resp.GetSingleResult().Aggregations.GetAggregations(), 4) + for _, aggregation := range resp.GetSingleResult().Aggregations.GetAggregations() { + switch aggregation.Property { + case "inCountry": + assert.Equal(t, "inCountry", aggregation.Property) + referenceAggregation := aggregation.GetReference() + assert.ElementsMatch(t, referenceAggregation.GetPointingTo(), []string{"Country"}) + assert.Equal(t, "cref", referenceAggregation.GetType()) + case "name": + assert.Equal(t, "name", aggregation.Property) + textAggregation := aggregation.GetText() + topOccurrencesResults := map[string]int64{} + require.NotNil(t, textAggregation) + assert.Equal(t, "text", textAggregation.GetType()) + assert.Equal(t, int64(1), textAggregation.GetCount()) + topOccurrences := textAggregation.GetTopOccurences() + require.NotNil(t, topOccurrences) + for _, item := range topOccurrences.GetItems() { + topOccurrencesResults[item.Value] = item.Occurs + } + assert.Equal(t, int64(1), topOccurrencesResults["Amsterdam"]) + case "population": + assert.Equal(t, "population", aggregation.Property) + numerical := aggregation.GetInt() + assert.Equal(t, int64(1), numerical.GetCount()) + assert.Equal(t, "int", numerical.GetType()) + assert.Equal(t, int64(1800000), numerical.GetMaximum()) + assert.Equal(t, float64(1800000), numerical.GetMean()) + assert.Equal(t, int64(1800000), numerical.GetMinimum()) + assert.Equal(t, int64(1800000), numerical.GetMode()) + assert.Equal(t, int64(1800000), numerical.GetSum()) + } + } + }) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/grpc_mixed_vectors_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/grpc_mixed_vectors_test.go new file mode 100644 index 0000000000000000000000000000000000000000..d9f079a49a47dc95210d4e55025f2d0f086276c0 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/grpc_mixed_vectors_test.go @@ -0,0 +1,203 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "context" + "fmt" + "testing" + + "github.com/stretchr/testify/require" + pb "github.com/weaviate/weaviate/grpc/generated/protocol/v1" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/books" + "github.com/weaviate/weaviate/usecases/byteops" +) + +func TestGRPC_MixedVectors(t *testing.T) { + var ( + ctx = context.Background() + grpcClient, _ = newClient(t) + ) + + class := books.ClassMixedContextionaryVectorizer() + helper.DeleteClass(t, class.Class) + helper.CreateClass(t, class) + + _, err := grpcClient.BatchObjects(ctx, &pb.BatchObjectsRequest{ + Objects: books.BatchObjects(), + }) + require.NoError(t, err) + + search := func(t *testing.T, mutate func(request *pb.SearchRequest)) *pb.SearchReply { + req := &pb.SearchRequest{ + Collection: class.Class, + Metadata: &pb.MetadataRequest{ + Uuid: true, + Vector: true, + }, + Uses_127Api: true, + } + mutate(req) + + resp, err := grpcClient.Search(ctx, req) + require.NoError(t, err) + return resp + } + + aggregate := func(t *testing.T, mutate func(request *pb.AggregateRequest)) *pb.AggregateReply { + req := &pb.AggregateRequest{ + Collection: class.Class, + ObjectsCount: true, + Aggregations: []*pb.AggregateRequest_Aggregation{ + { + Property: "title", + Aggregation: &pb.AggregateRequest_Aggregation_Text_{ + Text: &pb.AggregateRequest_Aggregation_Text{ + Count: true, + }, + }, + }, + }, + } + mutate(req) + + resp, err := grpcClient.Aggregate(ctx, req) + require.NoError(t, err) + return resp + } + + t.Run("search all", func(t *testing.T) { + resp := search(t, func(req *pb.SearchRequest) {}) + require.Len(t, resp.Results, 3) + + for _, result := range resp.Results { + require.Len(t, result.Metadata.Vector, 300) + require.Len(t, result.Metadata.Vectors, 2) + + contextionary := find(result.Metadata.Vectors, func(t *pb.Vectors) bool { + return t.Name == "contextionary_all" + }) + require.Equal(t, "contextionary_all", contextionary.Name) + require.Equal(t, result.Metadata.Vector, byteops.Fp32SliceFromBytes(contextionary.VectorBytes)) + } + }) + + for _, targetVector := range []string{"", "contextionary_all"} { + t.Run(fmt.Sprintf("search,targetVector=%q", targetVector), func(t *testing.T) { + t.Run("hybrid", func(t *testing.T) { + resp := search(t, func(req *pb.SearchRequest) { + req.HybridSearch = &pb.Hybrid{Query: "Dune"} + if targetVector != "" { + req.HybridSearch.Targets = &pb.Targets{ + TargetVectors: []string{targetVector}, + } + } + }) + require.Len(t, resp.Results, 1) + require.Equal(t, "Dune", resp.Results[0].Properties.NonRefProps.Fields["title"].GetTextValue()) + }) + + t.Run("hybrid with group by", func(t *testing.T) { + resp := search(t, func(req *pb.SearchRequest) { + req.GroupBy = &pb.GroupBy{ + Path: []string{"title"}, + NumberOfGroups: 1, + ObjectsPerGroup: 1, + } + req.HybridSearch = &pb.Hybrid{Query: "Dune"} + if targetVector != "" { + req.HybridSearch.TargetVectors = []string{targetVector} + } + }) + require.Len(t, resp.GroupByResults, 1) + }) + + t.Run("hybrid near text and group by", func(t *testing.T) { + resp := search(t, func(req *pb.SearchRequest) { + req.GroupBy = &pb.GroupBy{ + Path: []string{"title"}, + NumberOfGroups: 1, + ObjectsPerGroup: 1, + } + req.HybridSearch = &pb.Hybrid{ + Alpha: 0.5, + NearText: &pb.NearTextSearch{ + Query: []string{"Dune"}, + }, + } + if targetVector != "" { + req.HybridSearch.Targets = &pb.Targets{ + TargetVectors: []string{targetVector}, + } + } + }) + require.Len(t, resp.GroupByResults, 1) + }) + + t.Run("near text", func(t *testing.T) { + resp := search(t, func(req *pb.SearchRequest) { + req.NearText = &pb.NearTextSearch{ + Query: []string{"Dune"}, + } + if targetVector != "" { + req.NearText.Targets = &pb.Targets{ + TargetVectors: []string{targetVector}, + } + } + }) + require.Equal(t, "Dune", resp.Results[0].Properties.NonRefProps.Fields["title"].GetTextValue()) + }) + }) + t.Run(fmt.Sprintf("aggregation,targetVector=%q", targetVector), func(t *testing.T) { + t.Run("simple", func(t *testing.T) { + resp := aggregate(t, func(req *pb.AggregateRequest) {}) + require.Equal(t, int64(3), *resp.GetSingleResult().GetAggregations().GetAggregations()[0].GetText().Count) + }) + + t.Run("with hybrid search", func(t *testing.T) { + resp := aggregate(t, func(req *pb.AggregateRequest) { + certainty := 0.7 + h := &pb.Hybrid{ + Alpha: 0.5, + NearText: &pb.NearTextSearch{ + Query: []string{"dune"}, + Certainty: &certainty, + }, + } + if targetVector != "" { + h.Targets = &pb.Targets{ + TargetVectors: []string{targetVector}, + } + } + + req.Search = &pb.AggregateRequest_Hybrid{ + Hybrid: h, + } + }) + agg := resp.GetSingleResult().GetAggregations().GetAggregations()[0].GetText() + require.Equal(t, int64(1), *agg.Count) + require.Equal(t, "Dune", agg.TopOccurences.GetItems()[0].Value) + }) + }) + } +} + +func find[T any](arr []T, predicate func(t T) bool) T { + for _, v := range arr { + if predicate(v) { + return v + } + } + var zero T + return zero +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/grpc_named_vectors_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/grpc_named_vectors_test.go new file mode 100644 index 0000000000000000000000000000000000000000..7e7bbcec7a31f88194d85793fe9ecc59ffcdcfac --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/grpc_named_vectors_test.go @@ -0,0 +1,190 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "context" + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + pb "github.com/weaviate/weaviate/grpc/generated/protocol/v1" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/books" + "google.golang.org/grpc/health/grpc_health_v1" +) + +func TestGRPC_NamedVectors(t *testing.T) { + grpcClient, conn := newClient(t) + + // delete if exists and then re-create Books class + booksClass := books.ClassNamedContextionaryVectorizer() + helper.DeleteClass(t, booksClass.Class) + helper.CreateClass(t, booksClass) + defer helper.DeleteClass(t, booksClass.Class) + + t.Run("Health Check", func(t *testing.T) { + client := grpc_health_v1.NewHealthClient(conn) + check, err := client.Check(context.TODO(), &grpc_health_v1.HealthCheckRequest{}) + require.NoError(t, err) + require.NotNil(t, check) + assert.Equal(t, grpc_health_v1.HealthCheckResponse_SERVING.Enum().Number(), check.Status.Number()) + }) + + t.Run("Batch import", func(t *testing.T) { + resp, err := grpcClient.BatchObjects(context.TODO(), &pb.BatchObjectsRequest{ + Objects: books.BatchObjects(), + }) + require.NoError(t, err) + require.NotNil(t, resp) + }) + + tests := []struct { + name string + req *pb.MetadataRequest + expectedVecs int + }{ + { + name: "all vectors", + req: &pb.MetadataRequest{Vector: true}, + expectedVecs: 3, + }, + { + name: "one vector", + req: &pb.MetadataRequest{Vectors: []string{"all"}}, + expectedVecs: 1, + }, + } + for _, tt := range tests { + t.Run(fmt.Sprintf("Search with hybrid return returning %s", tt.name), func(t *testing.T) { + resp, err := grpcClient.Search(context.TODO(), &pb.SearchRequest{ + Collection: booksClass.Class, + Metadata: tt.req, + HybridSearch: &pb.Hybrid{ + Query: "Dune", + TargetVectors: []string{"all"}, + }, + Uses_123Api: true, + Uses_125Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.Results) + require.Equal(t, "Dune", resp.Results[0].Properties.NonRefProps.Fields["title"].GetTextValue()) + require.Len(t, resp.Results[0].Metadata.Vectors, tt.expectedVecs) + if tt.expectedVecs == 1 { + require.Equal(t, "all", resp.Results[0].Metadata.Vectors[0].Name) + } + }) + + t.Run(fmt.Sprintf("Search with hybrid and group by returning %s", tt.name), func(t *testing.T) { + resp, err := grpcClient.Search(context.TODO(), &pb.SearchRequest{ + Collection: booksClass.Class, + Metadata: tt.req, + GroupBy: &pb.GroupBy{ + Path: []string{"title"}, + NumberOfGroups: 1, + ObjectsPerGroup: 1, + }, + HybridSearch: &pb.Hybrid{ + Query: "Dune", + TargetVectors: []string{"all"}, + }, + Uses_123Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.GroupByResults) + require.Len(t, resp.GroupByResults, 1) + require.Len(t, resp.GroupByResults[0].Objects[0].Metadata.Vectors, tt.expectedVecs) + if tt.expectedVecs == 1 { + require.Equal(t, "all", resp.GroupByResults[0].Objects[0].Metadata.Vectors[0].Name) + } + }) + + t.Run(fmt.Sprintf("Search with hybrid near text and group by returning %s", tt.name), func(t *testing.T) { + resp, err := grpcClient.Search(context.TODO(), &pb.SearchRequest{ + Collection: booksClass.Class, + GroupBy: &pb.GroupBy{ + Path: []string{"title"}, + NumberOfGroups: 1, + ObjectsPerGroup: 1, + }, + Metadata: tt.req, + HybridSearch: &pb.Hybrid{ + Alpha: 0.5, + NearText: &pb.NearTextSearch{ + Query: []string{"Dune"}, + }, + TargetVectors: []string{"all"}, + }, + Uses_123Api: true, + Uses_125Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.GroupByResults) + require.Len(t, resp.GroupByResults, 1) + require.Len(t, resp.GroupByResults[0].Objects[0].Metadata.Vectors, tt.expectedVecs) + if tt.expectedVecs == 1 { + require.Equal(t, "all", resp.GroupByResults[0].Objects[0].Metadata.Vectors[0].Name) + } + }) + + t.Run(fmt.Sprintf("Search with near text returning %s", tt.name), func(t *testing.T) { + resp, err := grpcClient.Search(context.TODO(), &pb.SearchRequest{ + Collection: booksClass.Class, + Metadata: tt.req, + NearText: &pb.NearTextSearch{ + Query: []string{"Dune"}, + TargetVectors: []string{"all"}, + }, + Uses_123Api: true, + Uses_125Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.Results) + require.Equal(t, "Dune", resp.Results[0].Properties.NonRefProps.Fields["title"].GetTextValue()) + require.Len(t, resp.Results[0].Metadata.Vectors, tt.expectedVecs) + if tt.expectedVecs == 1 { + require.Equal(t, "all", resp.Results[0].Metadata.Vectors[0].Name) + } + }) + + t.Run(fmt.Sprintf("Search with near text and group by returning %s", tt.name), func(t *testing.T) { + resp, err := grpcClient.Search(context.TODO(), &pb.SearchRequest{ + Collection: booksClass.Class, + GroupBy: &pb.GroupBy{ + Path: []string{"title"}, + NumberOfGroups: 1, + ObjectsPerGroup: 1, + }, + Metadata: tt.req, + NearText: &pb.NearTextSearch{ + Query: []string{"Dune"}, + TargetVectors: []string{"all"}, + }, + Uses_123Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.GroupByResults) + require.Len(t, resp.GroupByResults, 1) + require.Len(t, resp.GroupByResults[0].Objects[0].Metadata.Vectors, tt.expectedVecs) + if tt.expectedVecs == 1 { + require.Equal(t, "all", resp.GroupByResults[0].Objects[0].Metadata.Vectors[0].Name) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/grpc_search_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/grpc_search_test.go new file mode 100644 index 0000000000000000000000000000000000000000..db4c906d82c3f86abd6a67078600f3427d3682a0 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/grpc_search_test.go @@ -0,0 +1,1081 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/grpc/generated/protocol/v1" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/planets" + "github.com/weaviate/weaviate/usecases/byteops" +) + +func TestGRPCSearch(t *testing.T) { + ctx := context.Background() + + host := "localhost:8080" + helper.SetupClient(host) + + grpcClient, _ := newClient(t) + require.NotNil(t, grpcClient) + + // Define class + className := "PlanetsMultiVectorSearch" + class := planets.BaseClass(className) + class.VectorConfig = map[string]models.VectorConfig{ + "colbert": { + Vectorizer: map[string]interface{}{ + "none": map[string]interface{}{}, + }, + VectorIndexConfig: map[string]interface{}{ + "multivector": map[string]interface{}{ + "enabled": true, + }, + }, + VectorIndexType: "hnsw", + }, + "regular": { + Vectorizer: map[string]interface{}{ + "none": map[string]interface{}{}, + }, + VectorIndexType: "hnsw", + }, + "description": { + Vectorizer: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "properties": []interface{}{"description"}, + "vectorizeClassName": false, + }, + }, + VectorIndexType: "flat", + }, + } + + colbertVectors := [][][]float32{ + {{0.11, 0.12}, {0.13, 0.14}, {0.15, 0.16}}, + {{0.21, 0.22}, {0.23, 0.24}, {0.25, 0.26}}, + } + + regularVectors := [][]float32{ + {0.11, 0.12, 0.13}, + {0.14, 0.15, 0.16}, + } + + getDescriptionVectors := func(t *testing.T) [][]float32 { + descriptionVectors := make([][]float32, len(planets.Planets)) + for i, planet := range planets.Planets { + obj, err := helper.GetObject(t, class.Class, planet.ID, "vector") + require.NoError(t, err) + require.NotNil(t, obj) + require.Len(t, obj.Vectors, 3) + require.IsType(t, []float32{}, obj.Vectors["description"]) + assert.True(t, len(obj.Vectors["description"].([]float32)) > 0) + descriptionVectors[i] = obj.Vectors["description"].([]float32) + } + require.Len(t, descriptionVectors, 2) + return descriptionVectors + } + + helper.CreateClass(t, class) + defer helper.DeleteClass(t, class.Class) + + t.Run("insert data", func(t *testing.T) { + for i, planet := range planets.Planets { + obj := &models.Object{ + Class: className, + ID: planet.ID, + Properties: map[string]interface{}{ + "name": planet.Name, + "description": planet.Description, + }, + Vectors: models.Vectors{ + "colbert": colbertVectors[i], + "regular": regularVectors[i], + }, + } + helper.CreateObject(t, obj) + helper.AssertGetObjectEventually(t, obj.Class, obj.ID) + } + }) + + t.Run("vector search", func(t *testing.T) { + tests := []struct { + name string + nearVector *protocol.NearVector + }{ + { + name: "legacy vector", + nearVector: &protocol.NearVector{ + Vector: regularVectors[0], + Targets: &protocol.Targets{ + TargetVectors: []string{"regular"}, + }, + }, + }, + { + name: "legacy vector bytes", + nearVector: &protocol.NearVector{ + VectorBytes: byteops.Fp32SliceToBytes(regularVectors[0]), + Targets: &protocol.Targets{ + TargetVectors: []string{"regular"}, + }, + }, + }, + { + name: "colbert vector", + nearVector: &protocol.NearVector{ + Vectors: []*protocol.Vectors{ + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes(colbertVectors[0]), + }, + }, + Targets: &protocol.Targets{ + TargetVectors: []string{"colbert"}, + }, + }, + }, + { + name: "regular", + nearVector: &protocol.NearVector{ + Vectors: []*protocol.Vectors{ + { + Type: protocol.Vectors_VECTOR_TYPE_SINGLE_FP32, + VectorBytes: byteops.Fp32SliceToBytes(regularVectors[0]), + }, + }, + Targets: &protocol.Targets{ + TargetVectors: []string{"regular"}, + }, + }, + }, + { + name: "regular unspecified", + nearVector: &protocol.NearVector{ + Vectors: []*protocol.Vectors{ + { + VectorBytes: byteops.Fp32SliceToBytes(regularVectors[0]), + }, + }, + Targets: &protocol.Targets{ + TargetVectors: []string{"regular"}, + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + resp, err := grpcClient.Search(ctx, &protocol.SearchRequest{ + Collection: class.Class, + NearVector: tt.nearVector, + Uses_123Api: true, + Uses_125Api: true, + Uses_127Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + assert.Len(t, resp.Results, 2) + }) + } + }) + + t.Run("multi vector search", func(t *testing.T) { + t.Run("regular vectors with proper type", func(t *testing.T) { + tests := []struct { + name string + vectorType protocol.Vectors_VectorType + }{ + { + name: "unspecified", + vectorType: protocol.Vectors_VECTOR_TYPE_UNSPECIFIED, + }, + { + name: "single_fp32", + vectorType: protocol.Vectors_VECTOR_TYPE_UNSPECIFIED, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + resp, err := grpcClient.Search(ctx, &protocol.SearchRequest{ + Collection: class.Class, + NearVector: &protocol.NearVector{ + VectorForTargets: []*protocol.VectorForTarget{ + { + Name: "regular", + Vectors: []*protocol.Vectors{ + { + Type: tt.vectorType, + VectorBytes: byteops.Fp32SliceToBytes(regularVectors[0]), + }, + { + Type: tt.vectorType, + VectorBytes: byteops.Fp32SliceToBytes(regularVectors[1]), + }, + }, + }, + }, + Targets: &protocol.Targets{ + TargetVectors: []string{"regular"}, + }, + }, + Uses_123Api: true, + Uses_125Api: true, + Uses_127Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + assert.Len(t, resp.Results, 2) + }) + } + }) + t.Run("only 1", func(t *testing.T) { + t.Run("regular vector", func(t *testing.T) { + resp, err := grpcClient.Search(ctx, &protocol.SearchRequest{ + Collection: class.Class, + NearVector: &protocol.NearVector{ + VectorForTargets: []*protocol.VectorForTarget{ + { + Name: "regular", + Vectors: []*protocol.Vectors{ + { + Type: protocol.Vectors_VECTOR_TYPE_SINGLE_FP32, + VectorBytes: byteops.Fp32SliceToBytes(regularVectors[0]), + }, + }, + }, + }, + Targets: &protocol.Targets{ + TargetVectors: []string{"regular"}, + }, + }, + Uses_123Api: true, + Uses_125Api: true, + Uses_127Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + assert.Len(t, resp.Results, 2) + }) + t.Run("colbert vector", func(t *testing.T) { + resp, err := grpcClient.Search(ctx, &protocol.SearchRequest{ + Collection: class.Class, + NearVector: &protocol.NearVector{ + VectorForTargets: []*protocol.VectorForTarget{ + { + Name: "colbert", + Vectors: []*protocol.Vectors{ + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes(colbertVectors[0]), + }, + }, + }, + }, + Targets: &protocol.Targets{ + TargetVectors: []string{"colbert"}, + }, + }, + Uses_123Api: true, + Uses_125Api: true, + Uses_127Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + assert.Len(t, resp.Results, 2) + }) + }) + t.Run("regular vectors", func(t *testing.T) { + resp, err := grpcClient.Search(ctx, &protocol.SearchRequest{ + Collection: class.Class, + NearVector: &protocol.NearVector{ + VectorForTargets: []*protocol.VectorForTarget{ + { + Name: "regular", + Vectors: []*protocol.Vectors{ + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes(regularVectors), + }, + }, + }, + }, + Targets: &protocol.Targets{ + TargetVectors: []string{"regular"}, + }, + }, + Uses_123Api: true, + Uses_125Api: true, + Uses_127Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + assert.Len(t, resp.Results, 2) + }) + t.Run("colbert vectors", func(t *testing.T) { + resp, err := grpcClient.Search(ctx, &protocol.SearchRequest{ + Collection: class.Class, + NearVector: &protocol.NearVector{ + VectorForTargets: []*protocol.VectorForTarget{ + { + Name: "colbert", + Vectors: []*protocol.Vectors{ + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes(colbertVectors[0]), + }, + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes(colbertVectors[1]), + }, + }, + }, + }, + Targets: &protocol.Targets{ + TargetVectors: []string{"colbert"}, + }, + }, + Uses_123Api: true, + Uses_125Api: true, + Uses_127Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + assert.Len(t, resp.Results, 2) + }) + t.Run("regular and colbert vectors", func(t *testing.T) { + resp, err := grpcClient.Search(ctx, &protocol.SearchRequest{ + Collection: class.Class, + NearVector: &protocol.NearVector{ + VectorForTargets: []*protocol.VectorForTarget{ + { + Name: "regular", + Vectors: []*protocol.Vectors{ + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes(regularVectors), + }, + }, + }, + { + Name: "colbert", + Vectors: []*protocol.Vectors{ + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes(colbertVectors[0]), + }, + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes(colbertVectors[1]), + }, + }, + }, + }, + Targets: &protocol.Targets{ + TargetVectors: []string{"regular", "colbert"}, + }, + }, + Uses_123Api: true, + Uses_125Api: true, + Uses_127Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + assert.Len(t, resp.Results, 2) + }) + t.Run("regular and colbert and description vectors", func(t *testing.T) { + descriptionVectors := getDescriptionVectors(t) + resp, err := grpcClient.Search(ctx, &protocol.SearchRequest{ + Collection: class.Class, + NearVector: &protocol.NearVector{ + VectorForTargets: []*protocol.VectorForTarget{ + { + Name: "regular", + Vectors: []*protocol.Vectors{ + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes(regularVectors), + }, + }, + }, + { + Name: "colbert", + Vectors: []*protocol.Vectors{ + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes(colbertVectors[0]), + }, + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes(colbertVectors[1]), + }, + }, + }, + { + Name: "description", + Vectors: []*protocol.Vectors{ + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes(descriptionVectors), + }, + }, + }, + }, + Targets: &protocol.Targets{ + TargetVectors: []string{"regular", "colbert", "description"}, + }, + }, + Uses_123Api: true, + Uses_125Api: true, + Uses_127Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + assert.Len(t, resp.Results, 2) + }) + }) + + t.Run("multi vector search with weights", func(t *testing.T) { + t.Run("legacy regular vectors with weights", func(t *testing.T) { + tests := []struct { + combination protocol.CombinationMethod + weightsForTargets []*protocol.WeightsForTarget + }{ + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_AVERAGE, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_SUM, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_MIN, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_MANUAL, + weightsForTargets: []*protocol.WeightsForTarget{ + {Target: "regular", Weight: 0.2}, + {Target: "regular", Weight: 0.8}, + }, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_RELATIVE_SCORE, + weightsForTargets: []*protocol.WeightsForTarget{ + {Target: "regular", Weight: 0.2}, + {Target: "regular", Weight: 0.8}, + }, + }, + } + for _, tt := range tests { + t.Run(tt.combination.String(), func(t *testing.T) { + resp, err := grpcClient.Search(ctx, &protocol.SearchRequest{ + Collection: class.Class, + NearVector: &protocol.NearVector{ + VectorForTargets: []*protocol.VectorForTarget{ + { + Name: "regular", + VectorBytes: byteops.Fp32SliceToBytes(regularVectors[0]), + }, + { + Name: "regular", + VectorBytes: byteops.Fp32SliceToBytes(regularVectors[1]), + }, + }, + Targets: &protocol.Targets{ + Combination: tt.combination, + WeightsForTargets: tt.weightsForTargets, + TargetVectors: []string{"regular", "regular"}, + }, + }, + Uses_123Api: true, + Uses_125Api: true, + Uses_127Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + assert.Len(t, resp.Results, 2) + }) + } + }) + t.Run("regular vectors with weights", func(t *testing.T) { + tests := []struct { + combination protocol.CombinationMethod + targetVectors []string + weightsForTargets []*protocol.WeightsForTarget + }{ + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_AVERAGE, + targetVectors: []string{"regular"}, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_SUM, + targetVectors: []string{"regular"}, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_MIN, + targetVectors: []string{"regular"}, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_MANUAL, + weightsForTargets: []*protocol.WeightsForTarget{ + {Target: "regular", Weight: 0.2}, + {Target: "regular", Weight: 0.8}, + }, + targetVectors: []string{"regular", "regular"}, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_RELATIVE_SCORE, + weightsForTargets: []*protocol.WeightsForTarget{ + {Target: "regular", Weight: 0.2}, + {Target: "regular", Weight: 0.8}, + }, + targetVectors: []string{"regular", "regular"}, + }, + } + for _, tt := range tests { + t.Run(tt.combination.String(), func(t *testing.T) { + resp, err := grpcClient.Search(ctx, &protocol.SearchRequest{ + Collection: class.Class, + NearVector: &protocol.NearVector{ + VectorForTargets: []*protocol.VectorForTarget{ + { + Name: "regular", + Vectors: []*protocol.Vectors{ + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes(regularVectors), + }, + }, + }, + }, + Targets: &protocol.Targets{ + Combination: tt.combination, + WeightsForTargets: tt.weightsForTargets, + TargetVectors: tt.targetVectors, + }, + }, + Uses_123Api: true, + Uses_125Api: true, + Uses_127Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + assert.Len(t, resp.Results, 2) + }) + } + }) + t.Run("regular and colbert vectors with weights", func(t *testing.T) { + resp, err := grpcClient.Search(ctx, &protocol.SearchRequest{ + Collection: class.Class, + NearVector: &protocol.NearVector{ + VectorForTargets: []*protocol.VectorForTarget{ + { + Name: "regular", + Vectors: []*protocol.Vectors{ + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes(regularVectors), + }, + }, + }, + { + Name: "colbert", + Vectors: []*protocol.Vectors{ + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes(colbertVectors[0]), + }, + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes(colbertVectors[1]), + }, + }, + }, + }, + Targets: &protocol.Targets{ + WeightsForTargets: []*protocol.WeightsForTarget{ + {Target: "regular", Weight: 0.2}, + {Target: "regular", Weight: 0.4}, + {Target: "colbert", Weight: 0.2}, + {Target: "colbert", Weight: 0.2}, + }, + Combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_MANUAL, + TargetVectors: []string{"regular", "regular", "colbert", "colbert"}, + }, + }, + Uses_123Api: true, + Uses_125Api: true, + Uses_127Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + assert.Len(t, resp.Results, 2) + }) + t.Run("regular and colbert and description vectors with weights", func(t *testing.T) { + descriptionVectors := getDescriptionVectors(t) + tests := []struct { + combination protocol.CombinationMethod + targetVectors []string + weightsForTargets []*protocol.WeightsForTarget + }{ + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_AVERAGE, + targetVectors: []string{"regular", "colbert", "description"}, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_SUM, + targetVectors: []string{"regular", "colbert", "description"}, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_MIN, + targetVectors: []string{"regular", "colbert", "description"}, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_MANUAL, + weightsForTargets: []*protocol.WeightsForTarget{ + {Target: "regular", Weight: 0.2}, + {Target: "regular", Weight: 0.4}, + {Target: "colbert", Weight: 0.2}, + {Target: "description", Weight: 0.1}, + {Target: "description", Weight: 0.1}, + }, + targetVectors: []string{"regular", "regular", "colbert", "description", "description"}, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_RELATIVE_SCORE, + weightsForTargets: []*protocol.WeightsForTarget{ + {Target: "regular", Weight: 0.2}, + {Target: "regular", Weight: 0.4}, + {Target: "colbert", Weight: 0.2}, + {Target: "description", Weight: 0.1}, + {Target: "description", Weight: 0.1}, + }, + targetVectors: []string{"regular", "regular", "colbert", "description", "description"}, + }, + } + for _, tt := range tests { + t.Run(tt.combination.String(), func(t *testing.T) { + resp, err := grpcClient.Search(ctx, &protocol.SearchRequest{ + Collection: class.Class, + NearVector: &protocol.NearVector{ + VectorForTargets: []*protocol.VectorForTarget{ + { + Name: "regular", + Vectors: []*protocol.Vectors{ + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes(regularVectors), + }, + }, + }, + { + Name: "colbert", + Vectors: []*protocol.Vectors{ + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes(colbertVectors[0]), + }, + }, + }, + { + Name: "description", + Vectors: []*protocol.Vectors{ + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes(descriptionVectors), + }, + }, + }, + }, + Targets: &protocol.Targets{ + WeightsForTargets: tt.weightsForTargets, + Combination: tt.combination, + TargetVectors: tt.targetVectors, + }, + }, + Uses_123Api: true, + Uses_125Api: true, + Uses_127Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + assert.Len(t, resp.Results, 2) + }) + } + }) + t.Run("regular and description vector with weights", func(t *testing.T) { + descriptionVectors := getDescriptionVectors(t) + tests := []struct { + combination protocol.CombinationMethod + targetVectors []string + weightsForTargets []*protocol.WeightsForTarget + }{ + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_AVERAGE, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_SUM, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_MIN, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_MANUAL, + weightsForTargets: []*protocol.WeightsForTarget{ + {Target: "regular", Weight: 0.2}, + {Target: "description", Weight: 0.8}, + }, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_RELATIVE_SCORE, + weightsForTargets: []*protocol.WeightsForTarget{ + {Target: "regular", Weight: 0.2}, + {Target: "description", Weight: 0.8}, + }, + }, + } + for _, tt := range tests { + t.Run(tt.combination.String(), func(t *testing.T) { + resp, err := grpcClient.Search(ctx, &protocol.SearchRequest{ + Collection: class.Class, + NearVector: &protocol.NearVector{ + VectorForTargets: []*protocol.VectorForTarget{ + { + Name: "regular", + Vectors: []*protocol.Vectors{ + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes([][]float32{regularVectors[0]}), + }, + }, + }, + { + Name: "description", + Vectors: []*protocol.Vectors{ + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes([][]float32{descriptionVectors[1]}), + }, + }, + }, + }, + Targets: &protocol.Targets{ + WeightsForTargets: tt.weightsForTargets, + Combination: tt.combination, + TargetVectors: []string{"regular", "description"}, + }, + }, + Uses_123Api: true, + Uses_125Api: true, + Uses_127Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + assert.Len(t, resp.Results, 2) + }) + } + }) + }) + t.Run("hybrid", func(t *testing.T) { + t.Run("legacy regular vectors with weights", func(t *testing.T) { + tests := []struct { + combination protocol.CombinationMethod + weightsForTargets []*protocol.WeightsForTarget + }{ + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_AVERAGE, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_SUM, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_MIN, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_MANUAL, + weightsForTargets: []*protocol.WeightsForTarget{ + {Target: "regular", Weight: 0.2}, + {Target: "regular", Weight: 0.8}, + }, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_RELATIVE_SCORE, + weightsForTargets: []*protocol.WeightsForTarget{ + {Target: "regular", Weight: 0.2}, + {Target: "regular", Weight: 0.8}, + }, + }, + } + for _, tt := range tests { + t.Run(tt.combination.String(), func(t *testing.T) { + resp, err := grpcClient.Search(ctx, &protocol.SearchRequest{ + Collection: class.Class, + HybridSearch: &protocol.Hybrid{ + Query: "Earth", + NearVector: &protocol.NearVector{ + VectorForTargets: []*protocol.VectorForTarget{ + { + Name: "regular", + VectorBytes: byteops.Fp32SliceToBytes(regularVectors[0]), + }, + { + Name: "regular", + VectorBytes: byteops.Fp32SliceToBytes(regularVectors[1]), + }, + }, + }, + Targets: &protocol.Targets{ + Combination: tt.combination, + WeightsForTargets: tt.weightsForTargets, + TargetVectors: []string{"regular", "regular"}, + }, + }, + Uses_123Api: true, + Uses_125Api: true, + Uses_127Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + assert.Len(t, resp.Results, 2) + }) + } + }) + t.Run("regular vectors with weights", func(t *testing.T) { + tests := []struct { + combination protocol.CombinationMethod + targetVectors []string + weightsForTargets []*protocol.WeightsForTarget + }{ + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_AVERAGE, + targetVectors: []string{"regular"}, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_SUM, + targetVectors: []string{"regular"}, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_MIN, + targetVectors: []string{"regular"}, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_MANUAL, + weightsForTargets: []*protocol.WeightsForTarget{ + {Target: "regular", Weight: 0.2}, + {Target: "regular", Weight: 0.8}, + }, + targetVectors: []string{"regular", "regular"}, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_RELATIVE_SCORE, + weightsForTargets: []*protocol.WeightsForTarget{ + {Target: "regular", Weight: 0.2}, + {Target: "regular", Weight: 0.8}, + }, + targetVectors: []string{"regular", "regular"}, + }, + } + for _, tt := range tests { + t.Run(tt.combination.String(), func(t *testing.T) { + resp, err := grpcClient.Search(ctx, &protocol.SearchRequest{ + Collection: class.Class, + HybridSearch: &protocol.Hybrid{ + Query: "Mars", + NearVector: &protocol.NearVector{ + VectorForTargets: []*protocol.VectorForTarget{ + { + Name: "regular", + Vectors: []*protocol.Vectors{ + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes(regularVectors), + }, + }, + }, + }, + }, + Targets: &protocol.Targets{ + Combination: tt.combination, + WeightsForTargets: tt.weightsForTargets, + TargetVectors: tt.targetVectors, + }, + }, + Uses_123Api: true, + Uses_125Api: true, + Uses_127Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + assert.Len(t, resp.Results, 1) + }) + } + }) + t.Run("regular and colbert and description vectors with weights", func(t *testing.T) { + descriptionVectors := getDescriptionVectors(t) + tests := []struct { + combination protocol.CombinationMethod + targetVectors []string + weightsForTargets []*protocol.WeightsForTarget + }{ + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_AVERAGE, + targetVectors: []string{"regular", "colbert", "description"}, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_SUM, + targetVectors: []string{"regular", "colbert", "description"}, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_MIN, + targetVectors: []string{"regular", "colbert", "description"}, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_MANUAL, + weightsForTargets: []*protocol.WeightsForTarget{ + {Target: "regular", Weight: 0.2}, + {Target: "regular", Weight: 0.4}, + {Target: "colbert", Weight: 0.2}, + {Target: "description", Weight: 0.1}, + {Target: "description", Weight: 0.1}, + }, + targetVectors: []string{"regular", "regular", "colbert", "description", "description"}, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_RELATIVE_SCORE, + weightsForTargets: []*protocol.WeightsForTarget{ + {Target: "regular", Weight: 0.2}, + {Target: "regular", Weight: 0.4}, + {Target: "colbert", Weight: 0.2}, + {Target: "description", Weight: 0.1}, + {Target: "description", Weight: 0.1}, + }, + targetVectors: []string{"regular", "regular", "colbert", "description", "description"}, + }, + } + for _, tt := range tests { + t.Run(tt.combination.String(), func(t *testing.T) { + resp, err := grpcClient.Search(ctx, &protocol.SearchRequest{ + Collection: class.Class, + HybridSearch: &protocol.Hybrid{ + Query: "Mars", + NearVector: &protocol.NearVector{ + VectorForTargets: []*protocol.VectorForTarget{ + { + Name: "regular", + Vectors: []*protocol.Vectors{ + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes(regularVectors), + }, + }, + }, + { + Name: "colbert", + Vectors: []*protocol.Vectors{ + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes(colbertVectors[0]), + }, + }, + }, + { + Name: "description", + Vectors: []*protocol.Vectors{ + { + Type: protocol.Vectors_VECTOR_TYPE_MULTI_FP32, + VectorBytes: byteops.Fp32SliceOfSlicesToBytes(descriptionVectors), + }, + }, + }, + }, + }, + Targets: &protocol.Targets{ + WeightsForTargets: tt.weightsForTargets, + Combination: tt.combination, + TargetVectors: tt.targetVectors, + }, + }, + Uses_123Api: true, + Uses_125Api: true, + Uses_127Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + assert.Len(t, resp.Results, 1) + }) + } + }) + t.Run("legacy regular vectors with weights", func(t *testing.T) { + tests := []struct { + combination protocol.CombinationMethod + weightsForTargets []*protocol.WeightsForTarget + }{ + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_AVERAGE, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_SUM, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_MIN, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_MANUAL, + weightsForTargets: []*protocol.WeightsForTarget{ + {Target: "regular", Weight: 0.2}, + {Target: "regular", Weight: 0.8}, + }, + }, + { + combination: protocol.CombinationMethod_COMBINATION_METHOD_TYPE_RELATIVE_SCORE, + weightsForTargets: []*protocol.WeightsForTarget{ + {Target: "regular", Weight: 0.2}, + {Target: "regular", Weight: 0.8}, + }, + }, + } + for _, tt := range tests { + t.Run(tt.combination.String(), func(t *testing.T) { + resp, err := grpcClient.Search(ctx, &protocol.SearchRequest{ + Collection: class.Class, + NearVector: &protocol.NearVector{ + VectorForTargets: []*protocol.VectorForTarget{ + { + Name: "regular", + VectorBytes: byteops.Fp32SliceToBytes(regularVectors[0]), + }, + { + Name: "regular", + VectorBytes: byteops.Fp32SliceToBytes(regularVectors[1]), + }, + }, + Targets: &protocol.Targets{ + Combination: tt.combination, + WeightsForTargets: tt.weightsForTargets, + TargetVectors: []string{"regular", "regular"}, + }, + }, + Uses_123Api: true, + Uses_125Api: true, + Uses_127Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + assert.Len(t, resp.Results, 2) + }) + } + }) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/grpc_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/grpc_test.go new file mode 100644 index 0000000000000000000000000000000000000000..c09952db15ba6c1d7224afa9da38de0cc12cf770 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/grpc_test.go @@ -0,0 +1,309 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "context" + "math/big" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + pb "github.com/weaviate/weaviate/grpc/generated/protocol/v1" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/books" + "google.golang.org/grpc" + "google.golang.org/grpc/health/grpc_health_v1" +) + +func idByte(id string) []byte { + hexInteger, _ := new(big.Int).SetString(strings.ReplaceAll(id, "-", ""), 16) + return hexInteger.Bytes() +} + +func TestGRPC(t *testing.T) { + grpcClient, conn := newClient(t) + + // delete if exists and then re-create Books class + booksClass := books.ClassContextionaryVectorizer() + helper.DeleteClass(t, booksClass.Class) + helper.CreateClass(t, booksClass) + defer helper.DeleteClass(t, booksClass.Class) + + t.Run("Health Check", func(t *testing.T) { + client := grpc_health_v1.NewHealthClient(conn) + check, err := client.Check(context.TODO(), &grpc_health_v1.HealthCheckRequest{}) + require.NoError(t, err) + require.NotNil(t, check) + assert.Equal(t, grpc_health_v1.HealthCheckResponse_SERVING.Enum().Number(), check.Status.Number()) + }) + + t.Run("Health List", func(t *testing.T) { + client := grpc_health_v1.NewHealthClient(conn) + list, err := client.List(context.TODO(), &grpc_health_v1.HealthListRequest{}) + require.NoError(t, err) + require.NotNil(t, list) + require.NotEmpty(t, list.Statuses) + require.NotEmpty(t, list.Statuses["weaviate"]) + assert.Equal(t, grpc_health_v1.HealthCheckResponse_SERVING.Enum().Number(), list.Statuses["weaviate"].Status.Number()) + }) + + t.Run("Batch import", func(t *testing.T) { + resp, err := grpcClient.BatchObjects(context.TODO(), &pb.BatchObjectsRequest{ + Objects: books.BatchObjects(), + }) + require.NoError(t, err) + require.NotNil(t, resp) + }) + + propsTests := []struct { + name string + req *pb.SearchRequest + }{ + { + name: "Search with props", + req: &pb.SearchRequest{ + Collection: booksClass.Class, + Properties: &pb.PropertiesRequest{ + NonRefProperties: []string{"title"}, + ObjectProperties: []*pb.ObjectPropertiesRequest{ + { + PropName: "meta", + PrimitiveProperties: []string{"isbn"}, + ObjectProperties: []*pb.ObjectPropertiesRequest{ + { + PropName: "obj", + PrimitiveProperties: []string{"text"}, + }, + { + PropName: "objs", + PrimitiveProperties: []string{"text"}, + }, + }, + }, + {PropName: "reviews", PrimitiveProperties: []string{"tags"}}, + }, + }, + Metadata: &pb.MetadataRequest{ + Uuid: true, + }, + Uses_123Api: true, + Uses_125Api: true, + }, + }, + { + name: "Search without props", + req: &pb.SearchRequest{ + Collection: booksClass.Class, + Metadata: &pb.MetadataRequest{ + Uuid: true, + }, + Uses_123Api: true, + Uses_125Api: true, + }, + }, + } + for _, tt := range propsTests { + t.Run(tt.name, func(t *testing.T) { + scifi := "sci-fi" + resp, err := grpcClient.Search(context.TODO(), tt.req) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.Results) + assert.Equal(t, len(books.BatchObjects()), len(resp.Results)) + for i := range resp.Results { + res := resp.Results[i] + id := res.Metadata.Id + + assert.True(t, id == books.Dune.String() || id == books.ProjectHailMary.String() || id == books.TheLordOfTheIceGarden.String()) + titleRaw := res.Properties.NonRefProps.Fields["title"] + require.NotNil(t, titleRaw) + title := titleRaw.GetTextValue() + require.NotNil(t, title) + + metaRaw := res.Properties.NonRefProps.Fields["meta"] + require.NotNil(t, metaRaw) + meta := metaRaw.GetObjectValue() + require.NotNil(t, meta) + isbnRaw := meta.GetFields()["isbn"] + require.NotNil(t, isbnRaw) + isbn := isbnRaw.GetTextValue() + require.NotNil(t, isbn) + + objRaw := meta.GetFields()["obj"] + require.NotNil(t, objRaw) + obj := objRaw.GetObjectValue() + require.NotNil(t, obj) + + objsRaw := meta.GetFields()["objs"] + require.NotNil(t, objsRaw) + objs := objsRaw.GetListValue().GetObjectValues() + require.NotNil(t, objs) + objEntry := objs.Values[0] + require.NotNil(t, objEntry) + + reviewsRaw := res.Properties.NonRefProps.Fields["reviews"] + require.NotNil(t, reviewsRaw) + reviews := reviewsRaw.GetListValue().GetObjectValues() + require.NotNil(t, reviews) + require.Len(t, reviews.Values, 1) + + review := reviews.Values[0] + require.NotNil(t, review) + + tags := review.Fields["tags"].GetListValue().GetTextValues() + require.NotNil(t, tags) + + txtTags := tags.Values + + expectedTitle := "" + expectedIsbn := "" + expectedTags := []string{} + if id == books.Dune.String() { + expectedTitle = "Dune" + expectedIsbn = "978-0593099322" + expectedTags = []string{scifi, "epic"} + } + if id == books.ProjectHailMary.String() { + expectedTitle = "Project Hail Mary" + expectedIsbn = "978-0593135204" + expectedTags = []string{scifi} + } + if id == books.TheLordOfTheIceGarden.String() { + expectedTitle = "The Lord of the Ice Garden" + expectedIsbn = "978-8374812962" + expectedTags = []string{scifi, "fantasy"} + } + assert.Equal(t, expectedTitle, title) + assert.Equal(t, expectedIsbn, isbn) + assert.Equal(t, expectedTags, txtTags) + + expectedObj := &pb.Properties{ + Fields: map[string]*pb.Value{ + "text": {Kind: &pb.Value_TextValue{TextValue: "some text"}}, + }, + } + assert.Equal(t, expectedObj, obj) + assert.Equal(t, expectedObj, objEntry) + } + }) + } + + t.Run("Search with hybrid", func(t *testing.T) { + resp, err := grpcClient.Search(context.TODO(), &pb.SearchRequest{ + Collection: booksClass.Class, + HybridSearch: &pb.Hybrid{ + Query: "Dune", + }, + Uses_123Api: true, + Uses_125Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.Results) + require.Equal(t, "Dune", resp.Results[0].Properties.NonRefProps.Fields["title"].GetTextValue()) + }) + + t.Run("Search with hybrid and group by", func(t *testing.T) { + resp, err := grpcClient.Search(context.TODO(), &pb.SearchRequest{ + Collection: booksClass.Class, + GroupBy: &pb.GroupBy{ + Path: []string{"title"}, + NumberOfGroups: 1, + ObjectsPerGroup: 1, + }, + HybridSearch: &pb.Hybrid{ + Query: "Dune", + }, + Uses_123Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.GroupByResults) + require.Len(t, resp.GroupByResults, 1) + }) + + t.Run("Search with near text", func(t *testing.T) { + resp, err := grpcClient.Search(context.TODO(), &pb.SearchRequest{ + Collection: booksClass.Class, + NearText: &pb.NearTextSearch{ + Query: []string{"Dune"}, + }, + Uses_123Api: true, + Uses_125Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.Results) + require.Equal(t, "Dune", resp.Results[0].Properties.NonRefProps.Fields["title"].GetTextValue()) + }) + + t.Run("Search with near text and group by", func(t *testing.T) { + resp, err := grpcClient.Search(context.TODO(), &pb.SearchRequest{ + Collection: booksClass.Class, + GroupBy: &pb.GroupBy{ + Path: []string{"title"}, + NumberOfGroups: 1, + ObjectsPerGroup: 1, + }, + NearText: &pb.NearTextSearch{ + Query: []string{"Dune"}, + }, + Uses_123Api: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.GroupByResults) + require.Len(t, resp.GroupByResults, 1) + }) + + t.Run("Aggregate", func(t *testing.T) { + resp, err := grpcClient.Aggregate(context.TODO(), &pb.AggregateRequest{ + Collection: booksClass.Class, + ObjectsCount: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.GetSingleResult()) + require.Equal(t, int64(3), resp.GetSingleResult().GetObjectsCount()) + }) + + t.Run("Batch delete", func(t *testing.T) { + resp, err := grpcClient.BatchDelete(context.TODO(), &pb.BatchDeleteRequest{ + Collection: "Books", + Filters: &pb.Filters{Operator: pb.Filters_OPERATOR_EQUAL, TestValue: &pb.Filters_ValueText{ValueText: "Dune"}, Target: &pb.FilterTarget{Target: &pb.FilterTarget_Property{Property: "title"}}}, + DryRun: true, + Verbose: true, + }) + require.NoError(t, err) + require.NotNil(t, resp) + require.Equal(t, resp.Matches, int64(1)) + require.Equal(t, resp.Successful, int64(1)) + require.Equal(t, resp.Failed, int64(0)) + require.Equal(t, resp.Objects[0].Uuid, idByte(books.Dune.String())) + }) + + t.Run("gRPC Search removed", func(t *testing.T) { + _, err := grpcClient.Search(context.TODO(), &pb.SearchRequest{}) + require.NotNil(t, err) + }) +} + +func newClient(t *testing.T) (pb.WeaviateClient, *grpc.ClientConn) { + conn, err := helper.CreateGrpcConnectionClient(":50051") + require.NoError(t, err) + require.NotNil(t, conn) + grpcClient := helper.CreateGrpcWeaviateClient(conn) + require.NotNil(t, grpcClient) + return grpcClient, conn +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/grpc_test_deprecated.go b/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/grpc_test_deprecated.go new file mode 100644 index 0000000000000000000000000000000000000000..9ad01315528b8b06a85aebe5f6659008af547859 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/grpc_test_deprecated.go @@ -0,0 +1,58 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + pb "github.com/weaviate/weaviate/grpc/generated/protocol/v1" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/books" + "google.golang.org/grpc/health/grpc_health_v1" +) + +func TestGRPCDeprecated(t *testing.T) { + conn, err := helper.CreateGrpcConnectionClient(":50051") + require.NoError(t, err) + require.NotNil(t, conn) + grpcClient := helper.CreateGrpcWeaviateClient(conn) + require.NotNil(t, grpcClient) + + // create Books class + booksClass := books.ClassContextionaryVectorizer() + helper.CreateClass(t, booksClass) + defer helper.DeleteClass(t, booksClass.Class) + + t.Run("Health Check", func(t *testing.T) { + client := grpc_health_v1.NewHealthClient(conn) + check, err := client.Check(context.TODO(), &grpc_health_v1.HealthCheckRequest{}) + require.NoError(t, err) + require.NotNil(t, check) + assert.Equal(t, grpc_health_v1.HealthCheckResponse_SERVING.Enum().Number(), check.Status.Number()) + }) + + t.Run("Batch import", func(t *testing.T) { + resp, err := grpcClient.BatchObjects(context.TODO(), &pb.BatchObjectsRequest{ + Objects: books.BatchObjects(), + }) + require.NoError(t, err) + require.NotNil(t, resp) + }) + + t.Run("gRPC Search removed", func(t *testing.T) { + _, err := grpcClient.Search(context.TODO(), &pb.SearchRequest{}) + require.NotNil(t, err) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/list_value_return_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/list_value_return_test.go new file mode 100644 index 0000000000000000000000000000000000000000..3e1f3e8ffdfeec7507fe7e7a2d0457499e040f42 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/list_value_return_test.go @@ -0,0 +1,187 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "bytes" + "context" + "encoding/binary" + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + pb "github.com/weaviate/weaviate/grpc/generated/protocol/v1" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/usecases/byteops" +) + +const ( + collectionNameLVR = "ListValueReturn" +) + +func TestGRPC_ListValueReturn(t *testing.T) { + grpcClient, _ := newClient(t) + helper.DeleteClass(t, collectionNameLVR) + helper.CreateClass(t, &models.Class{ + Class: collectionNameLVR, + Properties: []*models.Property{ + { + Name: "texts", + DataType: []string{"text[]"}, + }, + { + Name: "ints", + DataType: []string{"int[]"}, + }, + { + Name: "bools", + DataType: []string{"boolean[]"}, + }, + { + Name: "numbers", + DataType: []string{"number[]"}, + }, + { + Name: "uuids", + DataType: []string{"uuid[]"}, + }, + { + Name: "dates", + DataType: []string{"date[]"}, + }, + { + Name: "objects", + DataType: []string{"object[]"}, + NestedProperties: []*models.NestedProperty{{ + Name: "texts", + DataType: []string{"text[]"}, + }}, + }, + }, + }) + defer helper.DeleteClass(t, collectionNameLVR) + + var buf bytes.Buffer + err := binary.Write(&buf, binary.LittleEndian, []float64{1.1, 2.2}) + require.Nil(t, err) + numbersBytes := buf.Bytes() + + uuid1 := uuid.NewString() + uuid2 := uuid.NewString() + + batchResp, err := grpcClient.BatchObjects(context.Background(), &pb.BatchObjectsRequest{ + Objects: []*pb.BatchObject{{ + Uuid: uuid.NewString(), + Properties: &pb.BatchObject_Properties{ + TextArrayProperties: []*pb.TextArrayProperties{ + { + PropName: "texts", + Values: []string{"text1", "text2"}, + }, + { + PropName: "uuids", + Values: []string{uuid1, uuid2}, + }, + { + PropName: "dates", + Values: []string{ + "2020-01-01T00:00:00Z", + }, + }, + }, + IntArrayProperties: []*pb.IntArrayProperties{{ + PropName: "ints", + Values: []int64{1, 2}, + }}, + BooleanArrayProperties: []*pb.BooleanArrayProperties{{ + PropName: "bools", + Values: []bool{true, false}, + }}, + NumberArrayProperties: []*pb.NumberArrayProperties{{ + PropName: "numbers", + ValuesBytes: numbersBytes, + }}, + ObjectArrayProperties: []*pb.ObjectArrayProperties{{ + PropName: "objects", + Values: []*pb.ObjectPropertiesValue{{ + TextArrayProperties: []*pb.TextArrayProperties{{ + PropName: "texts", + Values: []string{"text1", "text2"}, + }}, + }}, + }}, + }, + Collection: collectionNameLVR, + }}, + }) + require.Nil(t, err) + require.Nil(t, batchResp.Errors) + + // Test the list value return + t.Run("ListValueReturn using >=1.25 API", func(t *testing.T) { + in := pb.SearchRequest{ + Collection: collectionNameLVR, + Properties: &pb.PropertiesRequest{ + NonRefProperties: []string{ + "texts", "ints", "bools", "numbers", "uuids", "dates", + }, + ObjectProperties: []*pb.ObjectPropertiesRequest{{ + PropName: "objects", + PrimitiveProperties: []string{"texts"}, + }}, + }, + Uses_123Api: true, + Uses_125Api: true, + } + searchResp, err := grpcClient.Search(context.Background(), &in) + require.Nil(t, err) + require.Len(t, searchResp.Results, 1) + props := searchResp.Results[0].GetProperties() + require.NotNil(t, props) + nonRefProps := props.GetNonRefProps() + require.NotNil(t, nonRefProps) + + texts := nonRefProps.GetFields()["texts"].GetListValue().GetTextValues() + require.NotNil(t, texts) + require.Equal(t, []string{"text1", "text2"}, texts.GetValues()) + + ints := nonRefProps.GetFields()["ints"].GetListValue().GetIntValues() + require.NotNil(t, ints) + require.Equal(t, []int64{1, 2}, byteops.IntsFromByteVector(ints.GetValues())) + + bools := nonRefProps.GetFields()["bools"].GetListValue().GetBoolValues() + require.NotNil(t, bools) + require.Equal(t, []bool{true, false}, bools.GetValues()) + + numbers := nonRefProps.GetFields()["numbers"].GetListValue().GetNumberValues() + require.NotNil(t, numbers) + require.Equal(t, []float64{1.1, 2.2}, byteops.Fp64SliceFromBytes(numbers.GetValues())) + + uuids := nonRefProps.GetFields()["uuids"].GetListValue().GetUuidValues() + require.NotNil(t, uuids) + require.Equal(t, []string{uuid1, uuid2}, uuids.GetValues()) + + dates := nonRefProps.GetFields()["dates"].GetListValue().GetDateValues() + require.NotNil(t, dates) + require.Equal(t, []string{"2020-01-01T00:00:00Z"}, dates.GetValues()) + + objects := nonRefProps.GetFields()["objects"].GetListValue().GetObjectValues() + require.NotNil(t, objects) + require.Len(t, objects.GetValues(), 1) + object := objects.GetValues()[0] + require.NotNil(t, object) + texts = object.GetFields()["texts"].GetListValue().GetTextValues() + require.NotNil(t, texts) + require.Equal(t, []string{"text1", "text2"}, texts.GetValues()) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/tenants_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/tenants_test.go new file mode 100644 index 0000000000000000000000000000000000000000..c8f5f85f464c399cfed0a54286da202c557179eb --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/grpc/tenants_test.go @@ -0,0 +1,124 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "context" + "slices" + "testing" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" + + pb "github.com/weaviate/weaviate/grpc/generated/protocol/v1" +) + +func TestGRPCTenantsGet(t *testing.T) { + grpcClient, _ := newClient(t) + + className := "GRPCTenantsGet" + testClass := models.Class{ + Class: className, + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + tenantNames := []string{ + "Tenant1", "Tenant2", "Tenant3", + } + + defer func() { + helper.DeleteClass(t, className) + }() + + helper.CreateClass(t, &testClass) + tenants := make([]*models.Tenant, len(tenantNames)) + for i := range tenants { + tenants[i] = &models.Tenant{Name: tenantNames[i], ActivityStatus: "HOT"} + } + helper.CreateTenants(t, className, tenants) + + t.Run("Gets tenants of a class", func(t *testing.T) { + resp, err := grpcClient.TenantsGet(context.TODO(), &pb.TenantsGetRequest{ + Collection: className, + }) + if err != nil { + t.Fatalf("error while getting tenants: %v", err) + } + for _, tenant := range resp.Tenants { + require.Equal(t, slices.Contains(tenantNames, tenant.Name), true) + require.Equal(t, tenant.ActivityStatus, pb.TenantActivityStatus_TENANT_ACTIVITY_STATUS_HOT) + } + }) + + t.Run("Gets two tenants by their names", func(t *testing.T) { + resp, err := grpcClient.TenantsGet(context.TODO(), &pb.TenantsGetRequest{ + Collection: className, + Params: &pb.TenantsGetRequest_Names{ + Names: &pb.TenantNames{ + Values: []string{tenantNames[0], tenantNames[2]}, + }, + }, + }) + if err != nil { + t.Fatalf("error while getting tenants: %v", err) + } + require.Equal(t, resp.Tenants, []*pb.Tenant{{ + Name: tenantNames[0], + ActivityStatus: pb.TenantActivityStatus_TENANT_ACTIVITY_STATUS_HOT, + }, { + Name: tenantNames[2], + ActivityStatus: pb.TenantActivityStatus_TENANT_ACTIVITY_STATUS_HOT, + }}) + }) + + t.Run("Returns error when tenant names are missing", func(t *testing.T) { + _, err := grpcClient.TenantsGet(context.TODO(), &pb.TenantsGetRequest{ + Collection: className, + Params: &pb.TenantsGetRequest_Names{}, + }) + require.NotNil(t, err) + }) + + t.Run("Returns error when tenant names are specified empty", func(t *testing.T) { + _, err := grpcClient.TenantsGet(context.TODO(), &pb.TenantsGetRequest{ + Collection: className, + Params: &pb.TenantsGetRequest_Names{ + Names: &pb.TenantNames{ + Values: []string{}, + }, + }, + }) + require.NotNil(t, err) + }) + + t.Run("Returns nothing when tenant names are not found", func(t *testing.T) { + resp, err := grpcClient.TenantsGet(context.TODO(), &pb.TenantsGetRequest{ + Collection: className, + Params: &pb.TenantsGetRequest_Names{ + Names: &pb.TenantNames{ + Values: []string{"NonExistentTenant"}, + }, + }, + }) + require.Nil(t, err) + require.Empty(t, resp.Tenants) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/maintenance_mode/maintenance_mode_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/maintenance_mode/maintenance_mode_test.go new file mode 100644 index 0000000000000000000000000000000000000000..6bac5f30ace478168186f82f08f361302468da6f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/maintenance_mode/maintenance_mode_test.go @@ -0,0 +1,105 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package maintenance_mode + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/articles" +) + +// TestMaintenanceMode starts a 3-node cluster and puts the 3rd node in +// maintenance mode. It then verifies that the 3rd node can still respond to +// schema/metadata changes/queries but not to object changes/queries. +func TestMaintenanceMode(t *testing.T) { + ctx := context.Background() + + compose, err := docker.New(). + With3NodeCluster(). + WithWeaviateEnv("MAINTENANCE_NODES", "node3"). + Start(ctx) + require.Nil(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %s", err.Error()) + } + }() + + testClass := articles.ParagraphsClass() + testClass.ReplicationConfig = &models.ReplicationConfig{Factor: 3} + + helper.SetupClient(compose.GetWeaviate().URI()) + + t.Run("create class", func(t *testing.T) { + helper.CreateClass(t, testClass) + }) + + // The 3rd node is in maintenance mode but should still be able to respond + // to schema/metadata changes/queries + t.Run("verify class exists on node3", func(t *testing.T) { + helper.SetupClient(compose.GetWeaviateNode3().URI()) + helper.GetClass(t, testClass.Class) + }) + + paragraphIDs := []strfmt.UUID{ + strfmt.UUID("3bf331ac-8c86-4f95-b127-2f8f96bbc093"), + strfmt.UUID("47b26ba1-6bc9-41f8-a655-8b9a5b60e1a3"), + } + + t.Run("Add objects with consistency level QUORUM succeeds", func(t *testing.T) { + for idx, id := range paragraphIDs { + o := articles.NewParagraph(). + WithID(id). + WithContents(fmt.Sprintf("paragraph#%d", idx)). + Object() + helper.CreateObjectCL(t, o, "QUORUM") + } + }) + + t.Run("Get objects with consistency level QUORUM succeeds", func(t *testing.T) { + helper.SetupClient(compose.GetWeaviateNode2().URI()) + for idx, id := range paragraphIDs { + res, err := helper.GetObjectCL(t, testClass.Class, id, "QUORUM") + require.Nil(t, err) + require.Equal(t, id, res.ID) + require.Equal(t, helper.ObjectContentsProp(fmt.Sprintf("paragraph#%d", idx)), res.Properties) + } + }) + + t.Run("Add objects with consistency level ALL should fail after timeout", func(t *testing.T) { + t.Helper() + o := articles.NewParagraph(). + WithID(strfmt.UUID("8833b60c-fe16-4b80-beed-440fc4738285")). + WithContents(fmt.Sprintf("paragraph#%d", 42)). + Object() + cls := string("ALL") + params := objects.NewObjectsCreateParamsWithTimeout(2 * time.Second).WithBody(o).WithConsistencyLevel(&cls) + _, err := helper.Client(t).Objects.ObjectsCreate(params, nil) + require.NotNil(t, err, "expected error, got nil") + }) + + t.Run("Get objects with consistency level ALL should fail after timeout", func(t *testing.T) { + cls := string("ALL") + params := objects.NewObjectsClassGetParamsWithTimeout(2 * time.Second).WithID(paragraphIDs[0]).WithClassName(testClass.Class).WithConsistencyLevel(&cls) + _, err = helper.Client(t).Objects.ObjectsClassGet(params, nil) + require.NotNil(t, err, "expected error, got nil") + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/multi_node/bm25_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_node/bm25_test.go new file mode 100644 index 0000000000000000000000000000000000000000..317aabde4efc4b231d795521757192cda46a3dbf --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_node/bm25_test.go @@ -0,0 +1,76 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package multi_node + +import ( + "context" + "fmt" + "testing" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" + "github.com/weaviate/weaviate/test/helper/sample-schema/articles" +) + +var paragraphs = []string{ + "Some random text", + "Other text", + "completely unrelated", + "this has nothing to do with the rest", +} + +func TestBm25MultiNode(t *testing.T) { + ctx := context.Background() + for i := 0; i < 5; i++ { + t.Run(fmt.Sprintf("iteration: %v", i), func(t *testing.T) { + runBM25MultinodeTest(t, ctx) + }) + } +} + +func runBM25MultinodeTest(t *testing.T, ctx context.Context) { + compose, err := docker.New(). + With3NodeCluster(). + Start(ctx) + require.NoError(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %s", err.Error()) + } + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + paragraphClass := articles.ParagraphsClass() + helper.CreateClass(t, paragraphClass) + + for _, par := range paragraphs { + obj := articles.NewParagraph(). + WithContents(par). + Object() + helper.CreateObject(t, obj) + } + + query := ` + { + Get { + Paragraph (bm25:{query:"random"}){ + contents + } + } + } + ` + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + resParagraph := result.Get("Get", "Paragraph").AsSlice() + require.Equal(t, resParagraph[0].(map[string]interface{})["contents"], paragraphs[0]) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/multi_node/collection_naming_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_node/collection_naming_test.go new file mode 100644 index 0000000000000000000000000000000000000000..b8671e39d0028f58646c00891e3516cb32c6a1a8 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_node/collection_naming_test.go @@ -0,0 +1,116 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package multi_node + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" +) + +func TestCollectionNamingGQL(t *testing.T) { + ctx := context.Background() + + compose, err := docker.New(). + With3NodeCluster(). + Start(ctx) + require.NoError(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %s", err.Error()) + } + }() + + cases := []struct { + name string + createWith string + deleteWith string + getWith string + shouldFail bool + }{ + { + name: "all-lower", + createWith: "testcollectioncase", + deleteWith: "testcollectioncase", + getWith: "testcollectioncase", + }, + { + name: "create-lower-get-delete-GQL", + createWith: "testcollectioncase", + deleteWith: "Testcollectioncase", + getWith: "Testcollectioncase", + }, + { + name: "create-mixed-get-delete-lower", + createWith: "testCollectionCase", + deleteWith: "testCollectionCase", + getWith: "testCollectionCase", + }, + { + name: "create-mixed-get-delete-GQL", + createWith: "testCollectionCase", + deleteWith: "TestCollectionCase", + getWith: "TestCollectionCase", + }, + { + name: "create-lower-get-delete-nonGQL-should-fail", + createWith: "testCollectionCase", + deleteWith: "TestCoLLectionCase", // not GQL + getWith: "TestCoLLectionCase", // not GQL + shouldFail: true, + }, + } + + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + helper.SetupClient(compose.GetWeaviate().URI()) // node1 + helper.CreateClass(t, &models.Class{ + Class: c.createWith, + Vectorizer: "none", + }) + + if c.shouldFail { + // get should fail + class, err := helper.GetClassWithoutAssert(t, c.getWith) + require.Nil(t, class) + require.Error(t, err) + + helper.SetupClient(compose.GetWeaviateNode2().URI()) // node2 + class, err = helper.GetClassWithoutAssert(t, c.getWith) // try getting it from different node + require.Nil(t, class) + require.Error(t, err) + + // delete should fail + helper.DeleteClass(t, c.deleteWith) // try deleting it from different node + // make sure after delete, collection still exist with original created name + helper.GetClass(t, c.createWith) + + return + } + + helper.GetClass(t, c.getWith) // try to get from same node + + helper.SetupClient(compose.GetWeaviateNode2().URI()) // node2 + helper.GetClass(t, c.getWith) // try getting it from different node + helper.DeleteClass(t, c.deleteWith) // try deleting it from different node + + // make sure after delete, collection should not exist + class, err := helper.GetClassWithoutAssert(t, c.deleteWith) + require.Nil(t, class) + require.Error(t, err) + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/multi_node/create_while_node_down_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_node/create_while_node_down_test.go new file mode 100644 index 0000000000000000000000000000000000000000..1beea41cde595fd00ac0505c4969da4ba1119a03 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_node/create_while_node_down_test.go @@ -0,0 +1,205 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package multi_node + +import ( + "context" + "fmt" + "testing" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/articles" +) + +func TestCreateClassWhileOneNodeIsDown(t *testing.T) { + ctx := context.Background() + + compose, err := docker.New(). + With3NodeCluster(). + Start(ctx) + require.Nil(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %s", err.Error()) + } + }() + + t.Run("class with MT Enabled", func(t *testing.T) { + testClass := articles.ParagraphsClass() + testClass.MultiTenancyConfig = &models.MultiTenancyConfig{Enabled: true} + testClass.ReplicationConfig = &models.ReplicationConfig{Factor: 3} + + helper.SetupClient(compose.GetWeaviate().URI()) + + t.Run("stop 3rd node", func(t *testing.T) { + require.Nil(t, compose.StopAt(context.Background(), 2, nil)) + }) + + t.Run("create class", func(t *testing.T) { + helper.CreateClass(t, testClass) + }) + + t.Run("bring 3rd node back up", func(t *testing.T) { + require.Nil(t, compose.StartAt(context.Background(), 2)) + }) + + t.Run("verify class exists on the 3rd node", func(t *testing.T) { + helper.SetupClient(compose.GetWeaviateNode3().URI()) + require.NotNil(t, helper.GetClass(t, testClass.Class)) + }) + + t.Run("delete create class", func(t *testing.T) { + helper.DeleteClass(t, testClass.Class) + }) + }) + + t.Run("class with MT disabled", func(t *testing.T) { + testClass := articles.ParagraphsClass() + testClass.MultiTenancyConfig = &models.MultiTenancyConfig{Enabled: false} + testClass.ReplicationConfig = &models.ReplicationConfig{Factor: 3} + + helper.SetupClient(compose.GetWeaviate().URI()) + + t.Run("stop 3rd node", func(t *testing.T) { + require.Nil(t, compose.StopAt(context.Background(), 2, nil)) + }) + + t.Run("create class", func(t *testing.T) { + helper.CreateClass(t, testClass) + }) + + t.Run("bring 3rd node back up", func(t *testing.T) { + require.Nil(t, compose.StartAt(context.Background(), 2)) + }) + + t.Run("verify class exists on the 3rd node", func(t *testing.T) { + helper.SetupClient(compose.GetWeaviateNode3().URI()) + require.NotNil(t, helper.GetClass(t, testClass.Class)) + }) + }) +} + +func TestAddTenantWhileOneNodeIsDown(t *testing.T) { + ctx := context.Background() + + compose, err := docker.New(). + With3NodeCluster(). + Start(ctx) + require.Nil(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %s", err.Error()) + } + }() + + testClass := articles.ParagraphsClass() + testClass.MultiTenancyConfig = &models.MultiTenancyConfig{Enabled: true} + testClass.ReplicationConfig = &models.ReplicationConfig{Factor: 3} + tenants := []*models.Tenant{ + {Name: "Tenant_1", ActivityStatus: models.TenantActivityStatusHOT}, + {Name: "Tenant_2", ActivityStatus: models.TenantActivityStatusCOLD}, + } + + helper.SetupClient(compose.GetWeaviateNode2().URI()) + + t.Run("create class", func(t *testing.T) { + helper.CreateClass(t, testClass) + }) + + t.Run("stop 3rd node", func(t *testing.T) { + require.Nil(t, compose.StopAt(context.Background(), 2, nil)) + }) + + t.Run("Add Tenant", func(t *testing.T) { + helper.CreateTenants(t, testClass.Class, tenants) + }) + + t.Run("bring 3rd node back up", func(t *testing.T) { + require.Nil(t, compose.StartAt(context.Background(), 2)) + }) + + t.Run("verify tenant exists on the 3rd node", func(t *testing.T) { + helper.SetupClient(compose.GetWeaviateNode3().URI()) + helper.TenantExists(t, testClass.Class, tenants[0].Name) + }) +} + +func TestAddObjectsWhileOneNodeIsDown(t *testing.T) { + ctx := context.Background() + + compose, err := docker.New(). + With3NodeCluster(). + Start(ctx) + require.Nil(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %s", err.Error()) + } + }() + + testClass := articles.ParagraphsClass() + testClass.MultiTenancyConfig = &models.MultiTenancyConfig{Enabled: true} + testClass.ReplicationConfig = &models.ReplicationConfig{Factor: 3} + tenants := []*models.Tenant{ + {Name: "Tenant_1", ActivityStatus: models.TenantActivityStatusHOT}, + {Name: "Tenant_2", ActivityStatus: models.TenantActivityStatusCOLD}, + } + + helper.SetupClient(compose.GetWeaviateNode2().URI()) + + t.Run("create class", func(t *testing.T) { + helper.CreateClass(t, testClass) + }) + + t.Run("stop 3rd node", func(t *testing.T) { + require.Nil(t, compose.StopAt(context.Background(), 2, nil)) + }) + + t.Run("Add Tenant", func(t *testing.T) { + helper.CreateTenants(t, testClass.Class, tenants) + }) + + paragraphIDs := []strfmt.UUID{ + strfmt.UUID("3bf331ac-8c86-4f95-b127-2f8f96bbc093"), + strfmt.UUID("47b26ba1-6bc9-41f8-a655-8b9a5b60e1a3"), + } + + t.Run("Add objects", func(t *testing.T) { + batch := make([]*models.Object, len(paragraphIDs)) + for idx, id := range paragraphIDs { + batch[idx] = articles.NewParagraph(). + WithID(id). + WithContents(fmt.Sprintf("paragraph#%d", idx)). + WithTenant(tenants[0].Name). + Object() + } + helper.CreateObjectsBatch(t, batch) + }) + + t.Run("bring 3rd node back up", func(t *testing.T) { + require.Nil(t, compose.StartAt(context.Background(), 2)) + }) + + t.Run("verify object created the 3rd node", func(t *testing.T) { + helper.SetupClient(compose.GetWeaviateNode3().URI()) + for idx, id := range paragraphIDs { + res, err := helper.TenantObject(t, testClass.Class, id, tenants[0].Name) + require.Nil(t, err) + require.Equal(t, id, res.ID) + require.Equal(t, helper.ObjectContentsProp(fmt.Sprintf("paragraph#%d", idx)), res.Properties) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/multi_node/nodes_api_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_node/nodes_api_test.go new file mode 100644 index 0000000000000000000000000000000000000000..f9c916e5dd48325e320bbb3bc051bb6685c81887 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_node/nodes_api_test.go @@ -0,0 +1,98 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package multi_node + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/nodes" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/verbosity" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/articles" +) + +func TestNodesMultiNode(t *testing.T) { + ctx := context.Background() + + compose, err := docker.New(). + With3NodeCluster(). + Start(ctx) + require.NoError(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %s", err.Error()) + } + }() + helper.SetupClient(compose.GetWeaviate().URI()) + + paragraphClass := articles.ParagraphsClass() + helper.DeleteClass(t, paragraphClass.Class) + helper.CreateClass(t, paragraphClass) + articleClass := articles.ArticlesClass() + helper.DeleteClass(t, articleClass.Class) + helper.CreateClass(t, articleClass) + + for i := 0; i < 10; i++ { + require.NoError(t, helper.CreateObject(t, articles.NewArticle().Object())) + require.NoError(t, helper.CreateObject(t, articles.NewParagraph().Object())) + } + + minimal, verbose := verbosity.OutputMinimal, verbosity.OutputVerbose + + t.Run("output without class minimal", func(t *testing.T) { + payload := getNodesPayload(t, minimal, "") + for _, node := range payload.Nodes { + require.Nil(t, node.Shards) + } + }) + + t.Run("output without class verbose", func(t *testing.T) { + payload := getNodesPayload(t, verbose, "") + for _, node := range payload.Nodes { + require.NotNil(t, node.Shards) + require.Len(t, node.Shards, 2) + } + }) + + t.Run("output with class minimal", func(t *testing.T) { + payload := getNodesPayload(t, minimal, articleClass.Class) + for _, node := range payload.Nodes { + require.Nil(t, node.Shards) + } + }) + + t.Run("output with class verbose", func(t *testing.T) { + payload := getNodesPayload(t, verbose, articleClass.Class) + for _, node := range payload.Nodes { + require.NotNil(t, node.Shards) + require.Len(t, node.Shards, 1) + } + }) +} + +func getNodesPayload(t *testing.T, verbosity string, class string) *models.NodesStatusResponse { + params := nodes.NewNodesGetClassParams().WithOutput(&verbosity) + if class != "" { + params.WithClassName(class) + } + body, clientErr := helper.Client(t).Nodes.NodesGetClass(params, nil) + require.NoError(t, clientErr) + payload, err := body.Payload, clientErr + require.NoError(t, err) + require.NotNil(t, payload) + require.Len(t, payload.Nodes, 3) + return payload +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/add_tenant_objects_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/add_tenant_objects_test.go new file mode 100644 index 0000000000000000000000000000000000000000..fd57e87cff10fd5489fa3b57ce95e9459fd28666 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/add_tenant_objects_test.go @@ -0,0 +1,246 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" +) + +func TestAddTenantObjects(t *testing.T) { + className := "MultiTenantClass" + testClass := models.Class{ + Class: className, + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + tenantNames := []string{ + "Tenant1", "Tenant2", "Tenant3", + } + tenantObjects := []*models.Object{ + { + ID: "0927a1e0-398e-4e76-91fb-04a7a8f0405c", + Class: className, + Properties: map[string]interface{}{ + "name": tenantNames[0], + }, + Tenant: tenantNames[0], + }, + { + ID: "831ae1d0-f441-44b1-bb2a-46548048e26f", + Class: className, + Properties: map[string]interface{}{ + "name": tenantNames[1], + }, + Tenant: tenantNames[1], + }, + { + ID: "6f3363e0-c0a0-4618-bf1f-b6cad9cdff59", + Class: className, + Properties: map[string]interface{}{ + "name": tenantNames[2], + }, + Tenant: tenantNames[2], + }, + } + + defer func() { + helper.DeleteClass(t, className) + }() + + t.Run("create class with multi-tenancy enabled", func(t *testing.T) { + helper.CreateClass(t, &testClass) + }) + + t.Run("create tenants", func(t *testing.T) { + tenants := make([]*models.Tenant, len(tenantNames)) + for i := range tenants { + tenants[i] = &models.Tenant{Name: tenantNames[i]} + } + helper.CreateTenants(t, className, tenants) + }) + + t.Run("add tenant objects", func(t *testing.T) { + for _, obj := range tenantObjects { + assert.Nil(t, helper.CreateObject(t, obj)) + } + }) + + t.Run("verify object creation", func(t *testing.T) { + for i, obj := range tenantObjects { + resp, err := helper.TenantObject(t, obj.Class, obj.ID, tenantNames[i]) + require.Nil(t, err) + assert.Equal(t, obj.Class, resp.Class) + assert.Equal(t, obj.Properties, resp.Properties) + } + }) +} + +func TestAddTenantObjectsToNonMultiClass(t *testing.T) { + className := "NoTenantClass" + tenantName := "randomTenant" + defer func() { + helper.DeleteClass(t, className) + }() + + testClass := models.Class{ + Class: className, + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: false}, + } + helper.CreateClass(t, &testClass) + + objWithTenant := &models.Object{ + ID: "0927a1e0-398e-4e76-91fb-04a7a8f0405c", + Class: className, + Tenant: tenantName, + } + params := objects.NewObjectsCreateParams().WithBody(objWithTenant) + _, err := helper.Client(t).Objects.ObjectsCreate(params, nil) + require.NotNil(t, err) +} + +func TestAddNonTenantObjectsToMultiClass(t *testing.T) { + className := "TenantClassFail" + defer func() { + helper.DeleteClass(t, className) + }() + + testClass := models.Class{ + Class: className, + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: true}, + } + helper.CreateClass(t, &testClass) + objWithTenant := &models.Object{ + ID: "0927a1e0-398e-4e76-91fb-04a7a8f0405c", + Class: className, + } + params := objects.NewObjectsCreateParams().WithBody(objWithTenant) + _, err := helper.Client(t).Objects.ObjectsCreate(params, nil) + require.NotNil(t, err) +} + +func TestAddObjectWithNonexistentTenantToMultiClass(t *testing.T) { + className := "TenantClass" + defer func() { + helper.DeleteClass(t, className) + }() + + testClass := models.Class{ + Class: className, + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: true}, + } + helper.CreateClass(t, &testClass) + helper.CreateTenants(t, className, []*models.Tenant{{Name: "randomTenant1"}}) + + objWithTenant := &models.Object{ + ID: "0927a1e0-398e-4e76-91fb-04a7a8f0405c", + Class: className, + Tenant: "randomTenant2", + } + params := objects.NewObjectsCreateParams().WithBody(objWithTenant) + _, err := helper.Client(t).Objects.ObjectsCreate(params, nil) + require.NotNil(t, err) +} + +func TestAddTenantObjectsWithAutoTenantCreation(t *testing.T) { + className := "AutoTenantClass" + testClass := models.Class{ + Class: className, + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + AutoTenantCreation: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + tenantObjectsWithID := []*models.Object{ + { + ID: "0927a1e0-398e-4e76-91fb-04a7a8f0405c", + Class: className, + }, + { + ID: "831ae1d0-f441-44b1-bb2a-46548048e26f", + Class: className, + }, + { + ID: "6f3363e0-c0a0-4618-bf1f-b6cad9cdff59", + Class: className, + }, + } + tenantObjectsNoID := []*models.Object{ + {Class: className}, + {Class: className}, + {Class: className}, + } + objectsToCreate := append(tenantObjectsWithID, tenantObjectsNoID...) + tenantNames := func() []string { + names := make([]string, len(objectsToCreate)) + for i := range objectsToCreate { + names[i] = fmt.Sprintf("NonExistentTenant%d", i) + } + return names + }() + for i := range objectsToCreate { + objectsToCreate[i].Properties = map[string]interface{}{ + "name": fmt.Sprintf("obj-%d", i), + } + objectsToCreate[i].Tenant = tenantNames[i] + } + + defer func() { + helper.DeleteClass(t, className) + }() + + t.Run("create class with multi-tenancy and autoTenantCreation enabled", func(t *testing.T) { + helper.CreateClass(t, &testClass) + }) + + // Don't create the tenants first :) + + t.Run("add tenant objects", func(t *testing.T) { + for i, obj := range objectsToCreate { + res, err := helper.CreateObjectWithResponse(t, obj) + require.Nil(t, err) + if obj.ID == "" { + // Some of the test objects were created without ID + objectsToCreate[i].ID = res.ID + } + } + }) + + t.Run("verify object creation", func(t *testing.T) { + for i, obj := range objectsToCreate { + resp, err := helper.TenantObject(t, obj.Class, obj.ID, tenantNames[i]) + require.Nil(t, err) + assert.Equal(t, obj.Class, resp.Class) + assert.Equal(t, obj.Properties, resp.Properties) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/batch_add_tenant_objects_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/batch_add_tenant_objects_test.go new file mode 100644 index 0000000000000000000000000000000000000000..b3f9528123e920a271f05398427bc0ca8cc25a83 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/batch_add_tenant_objects_test.go @@ -0,0 +1,398 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "errors" + "testing" + + "github.com/go-openapi/strfmt" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/batch" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" +) + +func TestBatchAddTenantObjects(t *testing.T) { + tests := []func(*testing.T, bool){ + testBatchAddTenantObjects, + testBatchAddTenantObjectsWithMixedClasses, + testBatchWithMixedTenants, + testAddNonTenantBatchToMultiClass, + testAddBatchWithNonExistentTenant, + } + + withImplicitTenantCreation(t, tests) + withExplicitTenantCreation(t, tests) +} + +func withImplicitTenantCreation(t *testing.T, tests []func(*testing.T, bool)) { + for _, test := range tests { + t.Run("with implicit tenant creation", func(t *testing.T) { + test(t, true) + }) + } +} + +func withExplicitTenantCreation(t *testing.T, tests []func(*testing.T, bool)) { + for _, test := range tests { + t.Run("with explicit tenant creation", func(t *testing.T) { + test(t, false) + }) + } +} + +func testBatchAddTenantObjects(t *testing.T, implicitTenants bool) { + testClass := models.Class{ + Class: "MultiTenantClass", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + AutoTenantCreation: implicitTenants, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + tenantName := "Tenant1" + tenantObjects := []*models.Object{ + { + ID: "0927a1e0-398e-4e76-91fb-04a7a8f0405c", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantName, + }, + Tenant: tenantName, + }, + { + ID: "831ae1d0-f441-44b1-bb2a-46548048e26f", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantName, + }, + Tenant: tenantName, + }, + { + ID: "6f3363e0-c0a0-4618-bf1f-b6cad9cdff59", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantName, + }, + Tenant: tenantName, + }, + } + + helper.CreateClass(t, &testClass) + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + + if !implicitTenants { + helper.CreateTenants(t, testClass.Class, []*models.Tenant{{Name: tenantName}}) + } + + assertAddedTenantObjects(t, tenantName, tenantObjects) +} + +func testBatchAddTenantObjectsWithMixedClasses(t *testing.T, implicitTenants bool) { + testClass1 := models.Class{ + Class: "MultiTenantClass1", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + AutoTenantCreation: implicitTenants, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + testClass2 := models.Class{ + Class: "MultiTenantClass2", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + AutoTenantCreation: implicitTenants, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + tenantName := "Tenant1" + tenantObjects := []*models.Object{ + { + ID: "0927a1e0-398e-4e76-91fb-04a7a8f0405c", + Class: testClass1.Class, + Properties: map[string]interface{}{ + "name": tenantName, + }, + Tenant: tenantName, + }, + { + ID: "831ae1d0-f441-44b1-bb2a-46548048e26f", + Class: testClass2.Class, + Properties: map[string]interface{}{ + "name": tenantName, + }, + Tenant: tenantName, + }, + { + ID: "6f3363e0-c0a0-4618-bf1f-b6cad9cdff59", + Class: testClass1.Class, + Properties: map[string]interface{}{ + "name": tenantName, + }, + Tenant: tenantName, + }, + { + ID: "dd5a3cdb-1bba-4a2b-b173-dad4fabd0326", + Class: testClass2.Class, + Properties: map[string]interface{}{ + "name": tenantName, + }, + Tenant: tenantName, + }, + } + + helper.CreateClass(t, &testClass1) + helper.CreateClass(t, &testClass2) + defer func() { + helper.DeleteClass(t, testClass1.Class) + helper.DeleteClass(t, testClass2.Class) + }() + + if !implicitTenants { + helper.CreateTenants(t, testClass1.Class, []*models.Tenant{{Name: tenantName}}) + helper.CreateTenants(t, testClass2.Class, []*models.Tenant{{Name: tenantName}}) + } + + assertAddedTenantObjects(t, tenantName, tenantObjects) +} + +func testBatchWithMixedTenants(t *testing.T, implicitTenants bool) { + className := "MultiTenantClassMixedBatchFail" + classes := []models.Class{ + { + Class: className + "1", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + AutoTenantCreation: implicitTenants, + }, + }, { + Class: className + "2", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + AutoTenantCreation: implicitTenants, + }, + }, + } + tenants := []string{"tenant1", "tenant2", "tenant3"} + for i := range classes { + helper.CreateClass(t, &classes[i]) + if !implicitTenants { + for k := range tenants { + helper.CreateTenants(t, classes[i].Class, []*models.Tenant{{Name: tenants[k]}}) + } + } + } + defer func() { + for i := range classes { + helper.DeleteClass(t, classes[i].Class) + } + }() + + var tenantObjects []*models.Object + + for i := 0; i < 9; i++ { + tenantObjects = append(tenantObjects, &models.Object{ + ID: strfmt.UUID(uuid.New().String()), + Class: classes[i%2].Class, + Tenant: tenants[i%len(tenants)], + }, + ) + } + helper.CreateObjectsBatch(t, tenantObjects) + + for _, obj := range tenantObjects { + resp, err := helper.TenantObject(t, obj.Class, obj.ID, obj.Tenant) + require.Nil(t, err) + assert.Equal(t, obj.ID, resp.ID) + assert.Equal(t, obj.Class, resp.Class) + } +} + +func testAddNonTenantBatchToMultiClass(t *testing.T, implicitTenants bool) { + className := "MultiTenantClassBatchFail" + testClass := models.Class{ + Class: className, + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + AutoTenantCreation: implicitTenants, + }, + } + nonTenantObjects := []*models.Object{ + { + ID: "0927a1e0-398e-4e76-91fb-04a7a8f0405c", + Class: testClass.Class, + }, + { + ID: "831ae1d0-f441-44b1-bb2a-46548048e26f", + Class: testClass.Class, + }, + { + ID: "6f3363e0-c0a0-4618-bf1f-b6cad9cdff59", + Class: testClass.Class, + }, + } + + helper.CreateClass(t, &testClass) + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + if !implicitTenants { + helper.CreateTenants(t, className, []*models.Tenant{{Name: "randomTenant1"}}) + } + params := batch.NewBatchObjectsCreateParams(). + WithBody(batch.BatchObjectsCreateBody{ + Objects: nonTenantObjects, + }) + resp, err := helper.Client(t).Batch.BatchObjectsCreate(params, nil) + if implicitTenants { + require.Nil(t, resp) + require.NotNil(t, err) + batchErr := &batch.BatchObjectsCreateUnprocessableEntity{} + as := errors.As(err, &batchErr) + require.True(t, as) + require.NotNil(t, batchErr.Payload) + require.Len(t, batchErr.Payload.Error, 1) + require.Contains(t, batchErr.Payload.Error[0].Message, "empty tenant name") + } else { + require.NotNil(t, resp) + require.Nil(t, err) + for _, r := range resp.Payload { + require.NotEmpty(t, r.Result.Errors.Error[0].Message) + } + } +} + +func testAddBatchWithNonExistentTenant(t *testing.T, implicitTenants bool) { + className := "MultiTenantClassBatchFail" + testClass := models.Class{ + Class: className, + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + AutoTenantCreation: implicitTenants, + }, + } + nonTenantObjects := []*models.Object{ + { + ID: "0927a1e0-398e-4e76-91fb-04a7a8f0405c", + Class: testClass.Class, + Tenant: "something", + }, + { + ID: "831ae1d0-f441-44b1-bb2a-46548048e26f", + Class: testClass.Class, + Tenant: "something", + }, + { + ID: "6f3363e0-c0a0-4618-bf1f-b6cad9cdff59", + Class: testClass.Class, + Tenant: "something", + }, + } + + helper.CreateClass(t, &testClass) + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + if !implicitTenants { + helper.CreateTenants(t, className, []*models.Tenant{{Name: "somethingElse"}}) + } + + params := batch.NewBatchObjectsCreateParams(). + WithBody(batch.BatchObjectsCreateBody{ + Objects: nonTenantObjects, + }) + resp, err := helper.Client(t).Batch.BatchObjectsCreate(params, nil) + require.Nil(t, err) + for i := range resp.Payload { + if !implicitTenants { + require.NotNil(t, resp.Payload[i].Result.Errors) + } else { + require.Nil(t, resp.Payload[i].Result.Errors) + } + } +} + +func TestAddBatchToNonMultiClass(t *testing.T) { + className := "MultiTenantClassBatchFail" + testClass := models.Class{ + Class: className, + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: false, + }, + } + tenantObjects := []*models.Object{ + { + ID: "0927a1e0-398e-4e76-91fb-04a7a8f0405c", + Class: testClass.Class, + Tenant: "something", + }, + { + ID: "831ae1d0-f441-44b1-bb2a-46548048e26f", + Class: testClass.Class, + Tenant: "something", + }, + { + ID: "6f3363e0-c0a0-4618-bf1f-b6cad9cdff59", + Class: testClass.Class, + Tenant: "something", + }, + } + + helper.CreateClass(t, &testClass) + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + params := batch.NewBatchObjectsCreateParams(). + WithBody(batch.BatchObjectsCreateBody{ + Objects: tenantObjects, + }) + resp, err := helper.Client(t).Batch.BatchObjectsCreate(params, nil) + require.Nil(t, err) + for i := range resp.Payload { + require.NotNil(t, resp.Payload[i].Result.Errors) + } +} + +func assertAddedTenantObjects(t *testing.T, tenantName string, objects []*models.Object) { + t.Run("add and get tenant objects", func(t *testing.T) { + helper.CreateObjectsBatch(t, objects) + + for _, obj := range objects { + resp, err := helper.TenantObject(t, obj.Class, obj.ID, tenantName) + require.Nil(t, err) + assert.Equal(t, obj.ID, resp.ID) + assert.Equal(t, obj.Class, resp.Class) + assert.Equal(t, obj.Tenant, resp.Tenant) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/batch_add_tenant_references_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/batch_add_tenant_references_test.go new file mode 100644 index 0000000000000000000000000000000000000000..0dc1fd63356af373fa9557e42d79254187258be2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/batch_add_tenant_references_test.go @@ -0,0 +1,450 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "fmt" + "testing" + + "github.com/google/uuid" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/schema/crossref" + "github.com/weaviate/weaviate/test/helper" +) + +func TestBatchAddTenantReferences(t *testing.T) { + className1 := "MultiTenantClass1" + className2 := "MultiTenantClass2" + className3 := "SingleTenantClass1" + className4 := "SingleTenantClass2" + tenantName1 := "Tenant1" + tenantName2 := "Tenant2" + mtRefProp1 := "relatedToMT1" + mtRefProp2 := "relatedToMT2" + stRefProp := "relatedToST" + mtClass1 := models.Class{ + Class: className1, + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: true}, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + { + Name: mtRefProp1, + DataType: []string{className1}, + }, + { + Name: mtRefProp2, + DataType: []string{className2}, + }, + { + Name: stRefProp, + DataType: []string{className3}, + }, + }, + } + mtClass2 := models.Class{ + Class: className2, + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + stClass1 := models.Class{ + Class: className3, + Properties: []*models.Property{ + { + Name: "stringProp", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + stClass2 := models.Class{ + Class: className4, + Properties: []*models.Property{ + { + Name: mtRefProp1, + DataType: []string{className1}, + }, + }, + } + mtObject1 := &models.Object{ + ID: "0927a1e0-398e-4e76-91fb-04a7a8f0405c", + Class: className1, + Properties: map[string]interface{}{ + "name": tenantName1, + }, + Tenant: tenantName1, + } + mtObject2DiffTenant := &models.Object{ + ID: "af90a7e3-53b3-4eb0-b395-10a04d217263", + Class: className2, + Properties: map[string]interface{}{ + "name": tenantName2, + }, + Tenant: tenantName2, + } + mtObject2SameTenant := &models.Object{ + ID: "4076df6b-0767-43a9-a0a4-2ec153bf262e", + Class: className2, + Properties: map[string]interface{}{ + "name": tenantName1, + }, + Tenant: tenantName1, + } + stObject1 := &models.Object{ + ID: "bea841c7-d689-4526-8af3-56c44b44274a", + Class: className3, + Properties: map[string]interface{}{ + "stringProp": "123", + }, + } + stObject2 := &models.Object{ + ID: "744f869a-7dcb-4fb5-8b0a-73075da1e116", + Class: className4, + } + + delClasses := func() { + helper.DeleteClass(t, className1) + helper.DeleteClass(t, className2) + helper.DeleteClass(t, className3) + helper.DeleteClass(t, className4) + } + delClasses() + defer delClasses() + + t.Run("create classes", func(t *testing.T) { + helper.CreateClass(t, &stClass1) + helper.CreateClass(t, &mtClass2) + helper.CreateClass(t, &mtClass1) + helper.CreateClass(t, &stClass2) + }) + + t.Run("create tenants", func(t *testing.T) { + helper.CreateTenants(t, className1, []*models.Tenant{{Name: tenantName1}}) + helper.CreateTenants(t, className2, []*models.Tenant{{Name: tenantName1}}) + helper.CreateTenants(t, className2, []*models.Tenant{{Name: tenantName2}}) + }) + + t.Run("add tenant objects", func(t *testing.T) { + helper.CreateObject(t, mtObject1) + helper.CreateObject(t, mtObject2DiffTenant) + helper.CreateObject(t, mtObject2SameTenant) + helper.CreateObject(t, stObject1) + helper.CreateObject(t, stObject2) + + t.Run("verify objects creation", func(t *testing.T) { + resp, err := helper.TenantObject(t, mtObject1.Class, mtObject1.ID, tenantName1) + require.Nil(t, err) + require.Equal(t, mtObject1.Class, resp.Class) + require.Equal(t, mtObject1.Properties, resp.Properties) + + resp, err = helper.TenantObject(t, mtObject2DiffTenant.Class, mtObject2DiffTenant.ID, tenantName2) + require.Nil(t, err) + require.Equal(t, mtObject2DiffTenant.Class, resp.Class) + require.Equal(t, mtObject2DiffTenant.Properties, resp.Properties) + + resp, err = helper.TenantObject(t, mtObject2SameTenant.Class, mtObject2SameTenant.ID, tenantName1) + require.Nil(t, err) + require.Equal(t, mtObject2SameTenant.Class, resp.Class) + require.Equal(t, mtObject2SameTenant.Properties, resp.Properties) + + resp, err = helper.GetObject(t, stObject1.Class, stObject1.ID) + require.Nil(t, err) + require.Equal(t, stObject1.Class, resp.Class) + + resp, err = helper.GetObject(t, stObject2.Class, stObject2.ID) + require.Nil(t, err) + require.Equal(t, stObject2.Class, resp.Class) + }) + }) + + t.Run("add tenant reference - same class and tenant", func(t *testing.T) { + refs := []*models.BatchReference{ + { + From: strfmt.URI(crossref.NewSource(schema.ClassName(className1), + schema.PropertyName(mtRefProp1), mtObject1.ID).String()), + To: strfmt.URI(crossref.NewLocalhost(className1, mtObject1.ID).String()), + Tenant: tenantName1, + }, + } + resp, err := helper.AddReferences(t, refs) + helper.CheckReferencesBatchResponse(t, resp, err) + + t.Run("verify object references", func(t *testing.T) { + resp, err := helper.TenantObject(t, mtObject1.Class, mtObject1.ID, tenantName1) + require.Nil(t, err) + require.Equal(t, mtObject1.Class, resp.Class) + require.Equal(t, mtObject1.ID, resp.ID) + relatedTo := resp.Properties.(map[string]interface{})[mtRefProp1].([]interface{}) + require.Len(t, relatedTo, 1) + beacon := relatedTo[0].(map[string]interface{})["beacon"].(string) + assert.Equal(t, helper.NewBeacon(className1, mtObject1.ID), strfmt.URI(beacon)) + }) + }) + + t.Run("add tenant reference - different MT class same tenant", func(t *testing.T) { + refs := []*models.BatchReference{ + { + From: strfmt.URI(crossref.NewSource(schema.ClassName(className1), + schema.PropertyName(mtRefProp2), mtObject1.ID).String()), + To: strfmt.URI(crossref.NewLocalhost(className2, mtObject2SameTenant.ID).String()), + Tenant: tenantName1, + }, + } + resp, err := helper.AddReferences(t, refs) + helper.CheckReferencesBatchResponse(t, resp, err) + + t.Run("verify object references", func(t *testing.T) { + resp, err := helper.TenantObject(t, mtObject1.Class, mtObject1.ID, tenantName1) + require.Nil(t, err) + require.Equal(t, mtObject1.Class, resp.Class) + require.Equal(t, mtObject1.ID, resp.ID) + relatedTo := resp.Properties.(map[string]interface{})[mtRefProp2].([]interface{}) + require.Len(t, relatedTo, 1) + beacon := relatedTo[0].(map[string]interface{})["beacon"].(string) + assert.Equal(t, helper.NewBeacon(className2, mtObject2SameTenant.ID), strfmt.URI(beacon)) + }) + }) + + t.Run("no references between different tenants", func(t *testing.T) { + refs := []*models.BatchReference{ + { + From: strfmt.URI(crossref.NewSource(schema.ClassName(className1), + schema.PropertyName(mtRefProp2), mtObject1.ID).String()), + To: strfmt.URI(crossref.NewLocalhost(className2, mtObject2DiffTenant.ID).String()), + Tenant: tenantName1, + }, + } + + resp, err := helper.AddReferences(t, refs) + require.Nil(t, err) + require.NotNil(t, resp) + require.Len(t, resp, 1) + require.Empty(t, resp[0].To) + require.Empty(t, resp[0].From) + require.NotNil(t, resp[0].Result) + require.NotNil(t, resp[0].Result.Errors) + require.Len(t, resp[0].Result.Errors.Error, 1) + require.NotNil(t, resp[0].Result.Errors.Error[0]) + expectedMsg := fmt.Sprintf(`target: object %s/%s not found for tenant %q`, className2, mtObject2DiffTenant.ID, tenantName1) + assert.Equal(t, expectedMsg, resp[0].Result.Errors.Error[0].Message) + }) + + t.Run("add tenant reference - from MT class to single tenant class", func(t *testing.T) { + refs := []*models.BatchReference{ + { + From: strfmt.URI(crossref.NewSource(schema.ClassName(className1), + schema.PropertyName(stRefProp), mtObject1.ID).String()), + To: strfmt.URI(crossref.NewLocalhost(className3, stObject1.ID).String()), + Tenant: tenantName1, + }, + } + resp, err := helper.AddReferences(t, refs) + helper.CheckReferencesBatchResponse(t, resp, err) + + t.Run("verify object references", func(t *testing.T) { + resp, err := helper.TenantObject(t, mtObject1.Class, mtObject1.ID, tenantName1) + require.Nil(t, err) + require.Equal(t, mtObject1.Class, resp.Class) + require.Equal(t, mtObject1.ID, resp.ID) + relatedTo := resp.Properties.(map[string]interface{})[stRefProp].([]interface{}) + require.Len(t, relatedTo, 1) + beacon := relatedTo[0].(map[string]interface{})["beacon"].(string) + assert.Equal(t, helper.NewBeacon(className3, stObject1.ID), strfmt.URI(beacon)) + }) + }) + + t.Run("no references from single tenant class to MT class", func(t *testing.T) { + refs := []*models.BatchReference{ + { + From: strfmt.URI(crossref.NewSource(schema.ClassName(className4), + schema.PropertyName(mtRefProp1), stObject2.ID).String()), + To: strfmt.URI(crossref.NewLocalhost(className1, mtObject1.ID).String()), + }, + } + + resp, err := helper.AddReferences(t, refs) + require.Nil(t, err) + require.NotNil(t, resp) + require.Len(t, resp, 1) + require.Empty(t, resp[0].To) + require.Empty(t, resp[0].From) + require.NotNil(t, resp[0].Result) + require.NotNil(t, resp[0].Result.Errors) + require.Len(t, resp[0].Result.Errors.Error, 1) + require.NotNil(t, resp[0].Result.Errors.Error[0]) + expectedMsg := "invalid reference: cannot reference a multi-tenant enabled class from a non multi-tenant enabled class" + assert.Equal(t, expectedMsg, resp[0].Result.Errors.Error[0].Message) + }) +} + +func TestAddMultipleTenantsForBatch(t *testing.T) { + tenants := []string{"tenant1", "tenant2"} + classNames := []string{"MultiTenantRefs1", "MultiTenantRefs2", "MultiTenantRefs3"} + refProps := []string{"refPropST", "refPropOtherMT", "refPropSelf"} + classes := []models.Class{ + {Class: classNames[0]}, + { + Class: classNames[1], + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: true}, + }, + { + Class: classNames[2], + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: true}, + Properties: []*models.Property{ + { + Name: refProps[0], + DataType: []string{classNames[0]}, + }, + { + Name: refProps[1], + DataType: []string{classNames[1]}, + }, + { + Name: refProps[2], + DataType: []string{classNames[2]}, + }, + }, + }, + } + defer func() { + for i := range classes { + helper.DeleteClass(t, classes[i].Class) + } + }() + for i := range classes { + helper.CreateClass(t, &classes[i]) + } + + for _, class := range classes[1:] { + for k := range tenants { + helper.CreateTenants(t, class.Class, []*models.Tenant{{Name: tenants[k]}}) + } + } + + var tenantObjects []*models.Object + objMap := make(map[string][]int) + + for i := 0; i < 9; i++ { + obj := &models.Object{ + ID: strfmt.UUID(uuid.New().String()), + Class: classes[i%len(classes)].Class, + } + if i%len(classes) > 0 { // only for MMT class + obj.Tenant = tenants[i%len(tenants)] + } + tenantObjects = append(tenantObjects, obj) + objMap[obj.Class] = append(objMap[obj.Class], i) + } + helper.CreateObjectsBatch(t, tenantObjects) + + t.Run("refs between same class", func(t *testing.T) { + var refs []*models.BatchReference + for _, objectIndex := range objMap[classNames[2]] { + obj := tenantObjects[objectIndex] + refs = append(refs, &models.BatchReference{ + From: strfmt.URI(crossref.NewSource(schema.ClassName(obj.Class), + schema.PropertyName(refProps[2]), obj.ID).String()), + To: strfmt.URI(crossref.NewLocalhost(classNames[2], obj.ID).String()), + Tenant: obj.Tenant, + }, + ) + } + resp, err := helper.AddReferences(t, refs) + helper.CheckReferencesBatchResponse(t, resp, err) + + // verify refs + for _, objectIndex := range objMap[classNames[2]] { + obj := tenantObjects[objectIndex] + + resp, err := helper.TenantObject(t, classNames[2], obj.ID, obj.Tenant) + require.Nil(t, err) + require.Equal(t, obj.Class, resp.Class) + require.Equal(t, fmt.Sprintf("weaviate://localhost/%s/%v", obj.Class, obj.ID), resp.Properties.(map[string]interface{})[refProps[2]].([]interface{})[0].(map[string]interface{})["beacon"]) + require.Equal(t, obj.Tenant, resp.Tenant) + } + }) + + t.Run("refs between multiple classes class", func(t *testing.T) { + var refs []*models.BatchReference + for i, objectIndexClass2 := range objMap[classNames[2]] { + objClass2 := tenantObjects[objectIndexClass2] + // refs between two MMT classes + if len(objMap[classNames[1]]) > i { + objClass1 := tenantObjects[objMap[classNames[1]][i]] + if objClass2.Tenant == objClass1.Tenant { + refs = append(refs, &models.BatchReference{ + From: strfmt.URI(crossref.NewSource(schema.ClassName(classNames[2]), + schema.PropertyName(refProps[1]), objClass2.ID).String()), + To: strfmt.URI(crossref.NewLocalhost(classNames[1], objClass1.ID).String()), + Tenant: objClass2.Tenant, + }) + } + } + + // refs between MMT and non MMT class + if len(objMap[classNames[0]]) > i { + objClass0 := tenantObjects[objMap[classNames[0]][i]] + refs = append(refs, &models.BatchReference{ + From: strfmt.URI(crossref.NewSource(schema.ClassName(classNames[2]), + schema.PropertyName(refProps[0]), objClass2.ID).String()), + To: strfmt.URI(crossref.NewLocalhost(classNames[0], objClass0.ID).String()), + Tenant: objClass2.Tenant, + }) + } + } + resp, err := helper.AddReferences(t, refs) + helper.CheckReferencesBatchResponse(t, resp, err) + + // verify refs + for i, objectIndexClass2 := range objMap[classNames[2]] { + objClass2 := tenantObjects[objectIndexClass2] + // refs between two MMT classes + if len(objMap[classNames[1]]) > i { + objClass1 := tenantObjects[objMap[classNames[1]][i]] + if objClass2.Tenant != objClass1.Tenant { + continue + } + + resp, err := helper.TenantObject(t, classNames[2], objClass2.ID, objClass2.Tenant) + require.Nil(t, err) + require.Equal(t, objClass2.Class, resp.Class) + require.Equal(t, fmt.Sprintf("weaviate://localhost/%s/%v", objClass1.Class, objClass1.ID), resp.Properties.(map[string]interface{})[refProps[1]].([]interface{})[0].(map[string]interface{})["beacon"]) + require.Equal(t, objClass2.Tenant, resp.Tenant) + + } + + // refs between MMT and non MMT class + if len(objMap[classNames[0]]) > i { + objClass0 := tenantObjects[objMap[classNames[0]][i]] + refs = append(refs, &models.BatchReference{ + From: strfmt.URI(crossref.NewSource(schema.ClassName(classNames[2]), + schema.PropertyName(refProps[0]), objClass2.ID).String()), + To: strfmt.URI(crossref.NewLocalhost(classNames[0], objClass0.ID).String()), + Tenant: objClass2.Tenant, + }) + } + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/batch_delete_tenant_objects_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/batch_delete_tenant_objects_test.go new file mode 100644 index 0000000000000000000000000000000000000000..1f522bdb35ea43e08732341dddedee31797875e8 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/batch_delete_tenant_objects_test.go @@ -0,0 +1,118 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/filters" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" +) + +func TestBatchDeleteTenantObjects(t *testing.T) { + className := "MultiTenantClass" + testClass := models.Class{ + Class: className, + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + tenantName := "Tenant1" + tenantObjects := []*models.Object{ + { + ID: "0927a1e0-398e-4e76-91fb-04a7a8f0405c", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantName, + }, + Tenant: tenantName, + }, + { + ID: "831ae1d0-f441-44b1-bb2a-46548048e26f", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantName, + }, + Tenant: tenantName, + }, + { + ID: "6f3363e0-c0a0-4618-bf1f-b6cad9cdff59", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantName, + }, + Tenant: tenantName, + }, + } + + defer func() { + helper.DeleteClass(t, className) + }() + + helper.CreateClass(t, &testClass) + helper.CreateTenants(t, className, []*models.Tenant{{Name: tenantName}}) + + t.Run("add tenant objects", func(t *testing.T) { + helper.CreateObjectsBatch(t, tenantObjects) + + t.Run("verify tenant objects", func(t *testing.T) { + for _, obj := range tenantObjects { + resp, err := helper.TenantObject(t, obj.Class, obj.ID, tenantName) + require.Nil(t, err) + require.Equal(t, obj.ID, resp.ID) + require.Equal(t, obj.Class, resp.Class) + require.Equal(t, obj.Properties, resp.Properties) + } + }) + }) + + t.Run("batch delete tenant objects", func(t *testing.T) { + glob := "*" + where := models.WhereFilter{ + Operator: filters.OperatorLike.Name(), + Path: []string{"id"}, + ValueString: &glob, + } + match := models.BatchDeleteMatch{ + Class: className, + Where: &where, + } + batch := models.BatchDelete{Match: &match} + resp, err := helper.DeleteTenantObjectsBatch(t, &batch, tenantName) + require.Nil(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.Results) + assert.Nil(t, resp.Results.Objects) + assert.Equal(t, int64(3), resp.Results.Successful) + assert.Equal(t, int64(0), resp.Results.Failed) + + t.Run("verify tenant object deletion", func(t *testing.T) { + for _, obj := range tenantObjects { + resp, err := helper.TenantObject(t, obj.Class, obj.ID, tenantName) + assert.Nil(t, resp) + assert.NotNil(t, err) + assert.EqualError(t, objects.NewObjectsClassGetNotFound(), err.Error()) + } + }) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/class_creation_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/class_creation_test.go new file mode 100644 index 0000000000000000000000000000000000000000..8807bb848f51b50f4f4c699a3334252297269691 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/class_creation_test.go @@ -0,0 +1,145 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "testing" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/batch" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +func TestClassMultiTenancyDisabled(t *testing.T) { + testClass := models.Class{ + Class: "ClassDisableMultiTenancy", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: false, + }, + } + objUUID := strfmt.UUID("0927a1e0-398e-4e76-91fb-04a7a8f0405c") + + helper.CreateClass(t, &testClass) + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + + helper.CreateObjectsBatch(t, []*models.Object{{ + ID: objUUID, + Class: testClass.Class, + }}) + + object, err := helper.GetObject(t, testClass.Class, objUUID) + require.Nil(t, err) + require.NotNil(t, object) + require.Equal(t, objUUID, object.ID) +} + +func TestClassMultiTenancyDisabledSchemaPrint(t *testing.T) { + testClass := models.Class{Class: "ClassDisableMultiTenancy"} + helper.CreateClass(t, &testClass) + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + + classReturn := helper.GetClass(t, testClass.Class) + require.NotNil(t, classReturn.MultiTenancyConfig) +} + +func TestClassMultiTenancyToggleAutoTenant(t *testing.T) { + createObjectToCheckAutoTenant := func(t *testing.T, object *models.Object) *models.ObjectsGetResponse { + t.Helper() + params := batch.NewBatchObjectsCreateParams(). + WithBody(batch.BatchObjectsCreateBody{ + Objects: []*models.Object{object}, + }) + resp, err := helper.Client(t).Batch.BatchObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + require.NotNil(t, resp) + require.Len(t, resp.Payload, 1) + return resp.Payload[0] + } + + testClass := models.Class{ + Class: "AutoTenantToggle", + MultiTenancyConfig: &models.MultiTenancyConfig{ + AutoTenantCreation: false, + Enabled: true, + }, + } + helper.CreateClass(t, &testClass) + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + + t.Run("autotenant not set, fail to create object with nonexistent tenant", func(t *testing.T) { + resp := createObjectToCheckAutoTenant(t, + &models.Object{ + Class: "AutoTenantToggle", + Properties: map[string]interface{}{ + "stringProp": "value", + }, + Tenant: "non-existent", + }, + ) + + require.NotNil(t, resp.Result) + require.NotNil(t, resp.Result.Errors) + require.Len(t, resp.Result.Errors.Error, 1) + assert.Equal(t, resp.Result.Errors.Error[0].Message, `tenant not found: "non-existent"`) + }) + + t.Run("autotenant toggled on, successfully create object", func(t *testing.T) { + fetched := helper.GetClass(t, testClass.Class) + fetched.MultiTenancyConfig.AutoTenantCreation = true + helper.UpdateClass(t, fetched) + + resp := createObjectToCheckAutoTenant(t, + &models.Object{ + Class: "AutoTenantToggle", + Properties: map[string]interface{}{ + "stringProp": "value", + }, + Tenant: "now-exists", + }, + ) + + require.NotNil(t, resp.Result) + require.Nil(t, resp.Result.Errors) + success := "SUCCESS" + assert.EqualValues(t, resp.Result.Status, &success) + }) + + t.Run("autotenant toggled back off, fail to create object with nonexistent tenant", func(t *testing.T) { + fetched := helper.GetClass(t, testClass.Class) + fetched.MultiTenancyConfig.AutoTenantCreation = false + helper.UpdateClass(t, fetched) + + resp := createObjectToCheckAutoTenant(t, + &models.Object{ + Class: "AutoTenantToggle", + Properties: map[string]interface{}{ + "stringProp": "value", + }, + Tenant: "still-nonexistent", + }, + ) + + require.NotNil(t, resp.Result) + require.NotNil(t, resp.Result.Errors) + require.Len(t, resp.Result.Errors.Error, 1) + assert.Equal(t, resp.Result.Errors.Error[0].Message, `tenant not found: "still-nonexistent"`) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/create_delete_update_tenants_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/create_delete_update_tenants_test.go new file mode 100644 index 0000000000000000000000000000000000000000..93e2fc582224bd974ae4f273327ad8eca57f53ef --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/create_delete_update_tenants_test.go @@ -0,0 +1,380 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "errors" + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/nodes" + eschema "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/verbosity" + "github.com/weaviate/weaviate/test/helper" + uschema "github.com/weaviate/weaviate/usecases/schema" +) + +var verbose = verbosity.OutputVerbose + +func TestCreateTenants(t *testing.T) { + testClass := models.Class{ + Class: "MultiTenantClass", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + + t.Run("Create tenant", func(Z *testing.T) { + expectedTenants := []string{ + "Tenant1", "Tenant2", "Tenant3", + } + + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + helper.CreateClass(t, &testClass) + + tenants := make([]*models.Tenant, len(expectedTenants)) + for i := range tenants { + tenants[i] = &models.Tenant{ + Name: expectedTenants[i], + ActivityStatus: models.TenantActivityStatusHOT, + } + } + helper.CreateTenants(t, testClass.Class, tenants) + + respGet, errGet := helper.GetTenants(t, testClass.Class) + require.Nil(t, errGet) + require.NotNil(t, respGet) + require.ElementsMatch(t, respGet.Payload, tenants) + + for _, tenant := range expectedTenants { + resp, err := helper.TenantExists(t, testClass.Class, tenant) + require.Nil(t, err) + require.True(t, resp.IsSuccess()) + } + + resp, err := helper.Client(t).Nodes.NodesGet(nodes.NewNodesGetParams().WithOutput(&verbose), nil) + require.Nil(t, err) + require.NotNil(t, resp.Payload) + require.NotNil(t, resp.Payload.Nodes) + require.Len(t, resp.Payload.Nodes, 1) + require.Len(t, resp.Payload.Nodes[0].Shards, 3) + + var foundTenants []string + for _, found := range resp.Payload.Nodes[0].Shards { + assert.Equal(t, testClass.Class, found.Class) + // Creating a tenant alone should not result in a loaded shard. + // This check also ensures that the nods api did not cause a + // force load. + assert.False(t, found.Loaded) + foundTenants = append(foundTenants, found.Name) + } + assert.ElementsMatch(t, expectedTenants, foundTenants) + }) + + t.Run("Create duplicate tenant once", func(t *testing.T) { + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + helper.CreateClass(t, &testClass) + err := helper.CreateTenantsReturnError(t, testClass.Class, []*models.Tenant{{Name: "DoubleTenant"}, {Name: "DoubleTenant"}}) + require.NotNil(t, err) + + err = helper.CreateTenantsReturnError(t, testClass.Class, []*models.Tenant{{Name: "DoubleTenant"}}) + require.Nil(t, err) + // only added once + respGet, errGet := helper.GetTenants(t, testClass.Class) + require.Nil(t, errGet) + require.NotNil(t, respGet) + require.Len(t, respGet.Payload, 1) + }) + + t.Run("Create same tenant multiple times", func(Z *testing.T) { + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + helper.CreateClass(t, &testClass) + helper.CreateTenants(t, testClass.Class, []*models.Tenant{{Name: "AddTenantAgain"}}) + + // idempotent operation + err := helper.CreateTenantsReturnError(t, testClass.Class, []*models.Tenant{{Name: "AddTenantAgain"}}) + require.Nil(t, err) + }) + + t.Run("Fail to create tenant with forbidden activity status:", func(Z *testing.T) { + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + helper.CreateClass(t, &testClass) + + activityStatuses := []string{ + models.TenantActivityStatusFROZEN, + models.TenantActivityStatusFREEZING, + models.TenantActivityStatusUNFREEZING, + } + for _, activityStatus := range activityStatuses { + Z.Run(activityStatus, func(z *testing.T) { + err := helper.CreateTenantsReturnError(t, testClass.Class, []*models.Tenant{{Name: "tenant", ActivityStatus: activityStatus}}) + require.NotNil(t, err) + }) + } + }) + + t.Run("Create and update more than 100 tenant", func(Z *testing.T) { + expectedTenants := make([]string, 101) + + for idx := 0; idx < 101; idx++ { + expectedTenants[idx] = fmt.Sprintf("Tenant%d", idx) + } + + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + + helper.CreateClass(t, &testClass) + + tenants := make([]*models.Tenant, len(expectedTenants)) + + for i := range tenants { + tenants[i] = &models.Tenant{ + Name: expectedTenants[i], + ActivityStatus: models.TenantActivityStatusCOLD, + } + } + + err := helper.CreateTenantsReturnError(t, testClass.Class, tenants) + require.Nil(t, err) + + err = helper.UpdateTenantsReturnError(t, testClass.Class, tenants) + require.NotNil(t, err) + ee := &eschema.TenantsUpdateUnprocessableEntity{} + require.True(t, errors.As(err, &ee)) + require.Equal(t, uschema.ErrMsgMaxAllowedTenants, ee.Payload.Error[0].Message) + }) + + t.Run("Create same tenant with different status", func(Z *testing.T) { + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + + helper.CreateClass(t, &testClass) + + err := helper.CreateTenantsReturnError(t, testClass.Class, []*models.Tenant{ + { + Name: "Tenant1", + ActivityStatus: models.TenantActivityStatusCOLD, + }, + { + Name: "Tenant1", + ActivityStatus: models.TenantActivityStatusHOT, + }, + }) + require.NotNil(t, err) + ee := &eschema.TenantsCreateUnprocessableEntity{} + as := errors.As(err, &ee) + require.True(t, as) + require.Contains(t, ee.Payload.Error[0].Message, "existed multiple times") + }) +} + +func TestDeleteTenants(t *testing.T) { + testClass := models.Class{ + Class: "MultiTenantClassDelete", + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: true}, + } + + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + helper.CreateClass(t, &testClass) + + tenants := []*models.Tenant{ + {Name: "tenant1"}, + {Name: "tenant2"}, + {Name: "tenant3"}, + {Name: "tenant4"}, + } + helper.CreateTenants(t, testClass.Class, tenants) + + t.Run("Delete same tenant multiple times", func(t *testing.T) { + err := helper.DeleteTenants(t, testClass.Class, []string{"tenant4"}) + require.Nil(t, err) + + // deleted once + resp, err := helper.Client(t).Nodes.NodesGet(nodes.NewNodesGetParams().WithOutput(&verbose), nil) + require.Nil(t, err) + require.NotNil(t, resp.Payload) + require.NotNil(t, resp.Payload.Nodes) + require.Len(t, resp.Payload.Nodes, 1) + for _, shard := range resp.Payload.Nodes[0].Shards { + // Creating a tenant alone should not result in a loaded shard. + // This check also ensures that the nods api did not cause a + // force load. + assert.False(t, shard.Loaded) + assert.NotEqual(t, "tenant4", shard.Name) + } + respExist, errExist := helper.TenantExists(t, testClass.Class, "tenant4") + require.Nil(t, respExist) + require.NotNil(t, errExist) + + // idempotent operation + err = helper.DeleteTenants(t, testClass.Class, []string{"tenant4"}) + require.Nil(t, err) + }) + + t.Run("Delete duplicate tenant once", func(Z *testing.T) { + err := helper.DeleteTenants(t, testClass.Class, []string{"tenant1", "tenant1"}) + // idempotent operation + require.Nil(t, err) + + // deleted once + resp, err := helper.Client(t).Nodes.NodesGet(nodes.NewNodesGetParams().WithOutput(&verbose), nil) + require.Nil(t, err) + require.NotNil(t, resp.Payload) + require.NotNil(t, resp.Payload.Nodes) + require.Len(t, resp.Payload.Nodes, 1) + require.Len(t, resp.Payload.Nodes[0].Shards, 2) + }) + + t.Run("Delete non-existent tenant alongside existing", func(Z *testing.T) { + err := helper.DeleteTenants(t, testClass.Class, []string{"tenant1", "tenant5"}) + require.Nil(t, err) + + // idempotent - deleting multiple times works - tenant1 is removed + resp, err := helper.Client(t).Nodes.NodesGet(nodes.NewNodesGetParams().WithOutput(&verbose), nil) + require.Nil(t, err) + require.NotNil(t, resp.Payload) + require.NotNil(t, resp.Payload.Nodes) + require.Len(t, resp.Payload.Nodes, 1) + require.Len(t, resp.Payload.Nodes[0].Shards, 2) + }) + + t.Run("Delete tenants", func(Z *testing.T) { + err := helper.DeleteTenants(t, testClass.Class, []string{"tenant1", "tenant3"}) + require.Nil(t, err) + + // successfully deleted + resp, err := helper.Client(t).Nodes.NodesGet(nodes.NewNodesGetParams().WithOutput(&verbose), nil) + require.Nil(t, err) + require.NotNil(t, resp.Payload) + require.NotNil(t, resp.Payload.Nodes) + require.Len(t, resp.Payload.Nodes, 1) + require.Len(t, resp.Payload.Nodes[0].Shards, 1) + }) +} + +func TestTenantsNonMultiTenant(t *testing.T) { + testClass := models.Class{ + Class: "TenantsNoMultiClass", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: false, + }, + } + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + helper.CreateClass(t, &testClass) + + err := helper.CreateTenantsReturnError(t, testClass.Class, []*models.Tenant{{Name: "doesNotMatter"}}) + require.NotNil(t, err) + + _, err = helper.GetTenants(t, testClass.Class) + require.NotNil(t, err) + + err = helper.DeleteTenants(t, testClass.Class, []string{"doesNotMatter"}) + require.NotNil(t, err) +} + +func TestTenantsClassDoesNotExist(t *testing.T) { + err := helper.CreateTenantsReturnError(t, "DoesNotExist", []*models.Tenant{{Name: "doesNotMatter"}}) + require.NotNil(t, err) + + _, err = helper.GetTenants(t, "DoesNotExist") + require.NotNil(t, err) + + _, err = helper.TenantExists(t, "DoesNotExist", "SomeTenant") + require.NotNil(t, err) + + err = helper.DeleteTenants(t, "DoesNotExist", []string{"doesNotMatter"}) + require.NotNil(t, err) +} + +// Testing of tenant updating from HOT/COLD to FROZEN is handled in test/modules/offload-s3 +func TestUpdateTenants(t *testing.T) { + testClass := models.Class{ + Class: "MultiTenantClass", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + + t.Run("Update tenant to COLD from HOT", func(Z *testing.T) { + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + helper.CreateClass(t, &testClass) + + helper.CreateTenants(t, testClass.Class, []*models.Tenant{{Name: "tenant", ActivityStatus: models.TenantActivityStatusHOT}}) + + err := helper.UpdateTenantsReturnError(t, testClass.Class, []*models.Tenant{{Name: "tenant", ActivityStatus: models.TenantActivityStatusCOLD}}) + require.Nil(t, err) + }) + + t.Run("Update tenant to HOT from COLD", func(Z *testing.T) { + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + helper.CreateClass(t, &testClass) + + helper.CreateTenants(t, testClass.Class, []*models.Tenant{{Name: "tenant", ActivityStatus: models.TenantActivityStatusCOLD}}) + + err := helper.UpdateTenantsReturnError(t, testClass.Class, []*models.Tenant{{Name: "tenant", ActivityStatus: models.TenantActivityStatusHOT}}) + require.Nil(t, err) + }) + + t.Run("Fail to update tenant with forbidden activity status:", func(Z *testing.T) { + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + helper.CreateClass(t, &testClass) + + helper.CreateTenants(t, testClass.Class, []*models.Tenant{{Name: "tenant", ActivityStatus: models.TenantActivityStatusHOT}}) + + activityStatuses := []string{ + models.TenantActivityStatusFREEZING, + models.TenantActivityStatusUNFREEZING, + } + for _, activityStatus := range activityStatuses { + Z.Run(activityStatus, func(z *testing.T) { + err := helper.UpdateTenantsReturnError(t, testClass.Class, []*models.Tenant{{Name: "tenant", ActivityStatus: activityStatus}}) + require.NotNil(t, err) + }) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/delete_tenant_objects_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/delete_tenant_objects_test.go new file mode 100644 index 0000000000000000000000000000000000000000..c5d1c87c63f21e3f5ae399c8346ff89c0f09cbeb --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/delete_tenant_objects_test.go @@ -0,0 +1,114 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/cluster/router/types" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" +) + +func TestDeleteTenantObjects(t *testing.T) { + testClass := models.Class{ + Class: "MultiTenantClass", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + tenantNames := []string{ + "Tenant1", "Tenant2", "Tenant3", + } + tenantObjects := []*models.Object{ + { + ID: "0927a1e0-398e-4e76-91fb-04a7a8f0405c", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantNames[0], + }, + Tenant: tenantNames[0], + }, + { + ID: "831ae1d0-f441-44b1-bb2a-46548048e26f", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantNames[1], + }, + Tenant: tenantNames[1], + }, + { + ID: "6f3363e0-c0a0-4618-bf1f-b6cad9cdff59", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantNames[2], + }, + Tenant: tenantNames[2], + }, + } + + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + + t.Run("create class with multi-tenancy enabled", func(t *testing.T) { + helper.CreateClass(t, &testClass) + }) + + t.Run("create tenants", func(t *testing.T) { + tenants := make([]*models.Tenant, len(tenantNames)) + for i := range tenants { + tenants[i] = &models.Tenant{Name: tenantNames[i]} + } + helper.CreateTenants(t, testClass.Class, tenants) + }) + + t.Run("add tenant objects", func(t *testing.T) { + for _, obj := range tenantObjects { + helper.CreateObject(t, obj) + } + + t.Run("verify tenant objects creation", func(t *testing.T) { + for i, obj := range tenantObjects { + resp, err := helper.TenantObject(t, obj.Class, obj.ID, tenantNames[i]) + require.Nil(t, err) + require.Equal(t, obj.ID, resp.ID) + require.Equal(t, obj.Class, resp.Class) + require.Equal(t, obj.Properties, resp.Properties) + } + }) + }) + + t.Run("delete tenant objects", func(t *testing.T) { + for i, obj := range tenantObjects { + helper.DeleteTenantObject(t, obj.Class, obj.ID, tenantNames[i], types.ConsistencyLevelAll) + } + }) + + t.Run("assert object deletion", func(t *testing.T) { + for i, obj := range tenantObjects { + _, err := helper.TenantObject(t, obj.Class, obj.ID, tenantNames[i]) + require.NotNil(t, err) + assert.EqualError(t, &objects.ObjectsClassGetNotFound{}, err.Error()) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/get_shards_status_with_tenant_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/get_shards_status_with_tenant_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ca19caed1c82a856a8323b1ca207c36d9560f72b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/get_shards_status_with_tenant_test.go @@ -0,0 +1,81 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "testing" + + "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +func TestGetShardsStatusWithTenant(t *testing.T) { + testClass := models.Class{ + Class: "ClassGetShardsStatusWithTenant", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + } + + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + + helper.CreateClass(t, &testClass) + helper.CreateTenants(t, testClass.Class, []*models.Tenant{ + { + Name: "tenant1", + }, + { + Name: "tenant2", + }, + }) + + t.Run("get shards status with tenant string", func(t *testing.T) { + tenant := "tenant1" + client := helper.Client(t) + res, err := client.Schema.SchemaObjectsShardsGet( + schema. + NewSchemaObjectsShardsGetParams(). + WithClassName(testClass.Class). + WithTenant(&tenant), + nil, + ) + helper.AssertRequestOk(t, res, err, nil) + }) + + t.Run("get shards status with empty tenant string", func(t *testing.T) { + tenant := "" + client := helper.Client(t) + res, err := client.Schema.SchemaObjectsShardsGet( + schema. + NewSchemaObjectsShardsGetParams(). + WithClassName(testClass.Class). + WithTenant(&tenant), + nil, + ) + helper.AssertRequestOk(t, res, err, nil) + }) + + t.Run("get shards status with nil pointer", func(t *testing.T) { + client := helper.Client(t) + res, err := client.Schema.SchemaObjectsShardsGet( + schema. + NewSchemaObjectsShardsGetParams(). + WithClassName(testClass.Class). + WithTenant(nil), + nil, + ) + helper.AssertRequestOk(t, res, err, nil) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/get_tenant_objects_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/get_tenant_objects_test.go new file mode 100644 index 0000000000000000000000000000000000000000..df6160e819237f1c45011ac04cc320518f59eaed --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/get_tenant_objects_test.go @@ -0,0 +1,474 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" +) + +func TestGetTenantObjects(t *testing.T) { + testClass := models.Class{ + Class: "MultiTenantClass", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + tenantNames := []string{ + "Tenant1", "Tenant2", "Tenant3", + } + tenantObjects := []*models.Object{ + { + ID: "0927a1e0-398e-4e76-91fb-04a7a8f0405c", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantNames[0], + }, + Tenant: tenantNames[0], + }, + { + ID: "831ae1d0-f441-44b1-bb2a-46548048e26f", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantNames[1], + }, + Tenant: tenantNames[1], + }, + { + ID: "6f3363e0-c0a0-4618-bf1f-b6cad9cdff59", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantNames[2], + }, + Tenant: tenantNames[2], + }, + } + + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + + t.Run("create class with multi-tenancy enabled", func(t *testing.T) { + helper.CreateClass(t, &testClass) + }) + + t.Run("create tenants", func(t *testing.T) { + tenants := make([]*models.Tenant, len(tenantNames)) + for i := range tenants { + tenants[i] = &models.Tenant{Name: tenantNames[i]} + } + helper.CreateTenants(t, testClass.Class, tenants) + }) + + t.Run("add tenant objects", func(t *testing.T) { + for _, obj := range tenantObjects { + helper.CreateObject(t, obj) + } + }) + + t.Run("get tenant objects", func(t *testing.T) { + for i, obj := range tenantObjects { + resp, err := helper.TenantObject(t, obj.Class, obj.ID, tenantNames[i]) + require.Nil(t, err) + assert.Equal(t, obj.ID, resp.ID) + assert.Equal(t, obj.Class, resp.Class) + assert.Equal(t, obj.Properties, resp.Properties) + } + }) + + t.Run("get tenant objects with include", func(t *testing.T) { + for i, obj := range tenantObjects { + resp, err := helper.TenantObjectWithInclude(t, obj.Class, obj.ID, tenantNames[i], "vector") + require.Nil(t, err) + assert.Equal(t, obj.ID, resp.ID) + assert.Equal(t, obj.Class, resp.Class) + assert.Equal(t, obj.Properties, resp.Properties) + } + }) +} + +func TestListTenantObjects(t *testing.T) { + tenantNames := []string{ + "Tenant1", "Tenant2", + } + + classMT_1 := models.Class{ + Class: "MultiTenantClass1", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + classMT_2 := models.Class{ + Class: "MultiTenantClass2", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + classMT_3 := models.Class{ + Class: "SingleTenantClass3", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + classMT_4 := models.Class{ + Class: "SingleTenantClass4", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + classNonMT_1 := models.Class{ + Class: "NonTenantClass1", + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + classNonMT_2 := models.Class{ + Class: "NonTenantClass2", + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + + objectsMT_T1 := []*models.Object{ + { + ID: "b1d19f8a-2158-4c41-b648-ba77a0ea7074", + Class: classMT_1.Class, + Properties: map[string]interface{}{ + "name": "Obj1_Class1_Tenant1", + }, + Tenant: tenantNames[0], + }, + { + ID: "a95c027c-07fb-4175-b726-4d5cfd55a7cf", + Class: classMT_1.Class, + Properties: map[string]interface{}{ + "name": "Obj2_Class1_Tenant1", + }, + Tenant: tenantNames[0], + }, + { + ID: "026890f5-8623-4d31-b295-b2820a81b85a", + Class: classMT_2.Class, + Properties: map[string]interface{}{ + "name": "Obj3_Class2_Tenant1", + }, + Tenant: tenantNames[0], + }, + { + ID: "d697d6b6-d7e6-47e6-a268-42e917b614e1", + Class: classMT_3.Class, + Properties: map[string]interface{}{ + "name": "Obj4_Class3_Tenant1", + }, + Tenant: tenantNames[0], + }, + } + objectsMT_T2 := []*models.Object{ + { + ID: "7baead88-a42b-4876-a185-e0ccc61c58ca", + Class: classMT_1.Class, + Properties: map[string]interface{}{ + "name": "Obj1_Class1_Tenant2", + }, + Tenant: tenantNames[1], + }, + { + ID: "7fa1fd17-a883-465a-ae22-44f103250b27", + Class: classMT_2.Class, + Properties: map[string]interface{}{ + "name": "Obj2_Class2_Tenant2", + }, + Tenant: tenantNames[1], + }, + { + ID: "fd4ce87a-8034-4e27-8d47-539fa9dde1f3", + Class: classMT_2.Class, + Properties: map[string]interface{}{ + "name": "Obj3_Class2_Tenant2", + }, + Tenant: tenantNames[1], + }, + { + ID: "b33d8f4c-30f9-426d-94a5-fa256f3fb5e7", + Class: classMT_4.Class, + Properties: map[string]interface{}{ + "name": "Obj4_Class4_Tenant2", + }, + Tenant: tenantNames[1], + }, + } + objectsNonMT := []*models.Object{ + { + ID: "6f019424-bacf-4539-b1be-fc1d3eccb50a", + Class: classNonMT_1.Class, + Properties: map[string]interface{}{ + "name": "Obj1_NonTenant1", + }, + }, + { + ID: "8d02b16c-478c-4cae-9384-3b686bae0f4e", + Class: classNonMT_1.Class, + Properties: map[string]interface{}{ + "name": "Obj2_NonTenant1", + }, + }, + { + ID: "865a820a-c325-4d10-8d8c-4b991bc43778", + Class: classNonMT_2.Class, + Properties: map[string]interface{}{ + "name": "Obj3_NonTenant2", + }, + }, + } + + defer func() { + helper.DeleteClass(t, classMT_1.Class) + helper.DeleteClass(t, classMT_2.Class) + helper.DeleteClass(t, classMT_3.Class) + helper.DeleteClass(t, classMT_4.Class) + helper.DeleteClass(t, classNonMT_1.Class) + helper.DeleteClass(t, classNonMT_2.Class) + }() + + extractIds := func(objs []*models.Object) []string { + ids := make([]string, len(objs)) + for i, obj := range objs { + ids[i] = obj.ID.String() + } + return ids + } + + t.Run("create MT and non-MT classes", func(t *testing.T) { + helper.CreateClass(t, &classMT_1) + helper.CreateClass(t, &classMT_2) + helper.CreateClass(t, &classMT_3) + helper.CreateClass(t, &classMT_4) + helper.CreateClass(t, &classNonMT_1) + helper.CreateClass(t, &classNonMT_2) + }) + + t.Run("create tenants for MT classes", func(t *testing.T) { + tenants := make([]*models.Tenant, len(tenantNames)) + for i := range tenants { + tenants[i] = &models.Tenant{Name: tenantNames[i]} + } + helper.CreateTenants(t, classMT_1.Class, tenants) + helper.CreateTenants(t, classMT_2.Class, tenants) + helper.CreateTenants(t, classMT_3.Class, tenants[:1]) + helper.CreateTenants(t, classMT_4.Class, tenants[1:]) + }) + + t.Run("add objects", func(t *testing.T) { + objects := append(objectsMT_T1, objectsMT_T2...) + objects = append(objects, objectsNonMT...) + + helper.CreateObjectsBatch(t, objects) + }) + + t.Run("list objects for tenant 1", func(t *testing.T) { + t.Run("no class", func(t *testing.T) { + res, err := helper.TenantListObjects(t, "", tenantNames[0]) + require.Nil(t, err) + require.NotNil(t, res) + require.Equal(t, int64(4), res.TotalResults) + assert.ElementsMatch(t, []string{ + "b1d19f8a-2158-4c41-b648-ba77a0ea7074", + "a95c027c-07fb-4175-b726-4d5cfd55a7cf", + "026890f5-8623-4d31-b295-b2820a81b85a", + "d697d6b6-d7e6-47e6-a268-42e917b614e1", + }, extractIds(res.Objects)) + }) + t.Run("classMT_T1T2_1", func(t *testing.T) { + res, err := helper.TenantListObjects(t, classMT_1.Class, tenantNames[0]) + + require.Nil(t, err) + require.NotNil(t, res) + require.Equal(t, int64(2), res.TotalResults) + assert.ElementsMatch(t, []string{ + "b1d19f8a-2158-4c41-b648-ba77a0ea7074", + "a95c027c-07fb-4175-b726-4d5cfd55a7cf", + }, extractIds(res.Objects)) + }) + t.Run("classMT_T1T2_2", func(t *testing.T) { + res, err := helper.TenantListObjects(t, classMT_2.Class, tenantNames[0]) + + require.Nil(t, err) + require.NotNil(t, res) + require.Equal(t, int64(1), res.TotalResults) + assert.ElementsMatch(t, []string{ + "026890f5-8623-4d31-b295-b2820a81b85a", + }, extractIds(res.Objects)) + }) + + t.Run("classMT_T1", func(t *testing.T) { + res, err := helper.TenantListObjects(t, classMT_3.Class, tenantNames[0]) + + require.Nil(t, err) + require.NotNil(t, res) + require.Equal(t, int64(1), res.TotalResults) + assert.ElementsMatch(t, []string{ + "d697d6b6-d7e6-47e6-a268-42e917b614e1", + }, extractIds(res.Objects)) + }) + + t.Run("classMT_T2", func(t *testing.T) { + res, err := helper.TenantListObjects(t, classMT_4.Class, tenantNames[0]) + + require.NotNil(t, err) + expErr := &objects.ObjectsListUnprocessableEntity{} + require.ErrorAs(t, err, &expErr) + assert.Contains(t, expErr.Payload.Error[0].Message, tenantNames[0]) + require.Nil(t, res) + }) + }) + + t.Run("list objects for tenant 2", func(t *testing.T) { + t.Run("no class", func(t *testing.T) { + res, err := helper.TenantListObjects(t, "", tenantNames[1]) + + require.Nil(t, err) + require.NotNil(t, res) + require.Equal(t, int64(4), res.TotalResults) + assert.ElementsMatch(t, []string{ + "7baead88-a42b-4876-a185-e0ccc61c58ca", + "7fa1fd17-a883-465a-ae22-44f103250b27", + "fd4ce87a-8034-4e27-8d47-539fa9dde1f3", + "b33d8f4c-30f9-426d-94a5-fa256f3fb5e7", + }, extractIds(res.Objects)) + }) + + t.Run("classMT_T1T2_1", func(t *testing.T) { + res, err := helper.TenantListObjects(t, classMT_1.Class, tenantNames[1]) + + require.Nil(t, err) + require.NotNil(t, res) + require.Equal(t, int64(1), res.TotalResults) + assert.ElementsMatch(t, []string{ + "7baead88-a42b-4876-a185-e0ccc61c58ca", + }, extractIds(res.Objects)) + }) + + t.Run("classMT_T1T2_2", func(t *testing.T) { + res, err := helper.TenantListObjects(t, classMT_2.Class, tenantNames[1]) + + require.Nil(t, err) + require.NotNil(t, res) + require.Equal(t, int64(2), res.TotalResults) + assert.ElementsMatch(t, []string{ + "7fa1fd17-a883-465a-ae22-44f103250b27", + "fd4ce87a-8034-4e27-8d47-539fa9dde1f3", + }, extractIds(res.Objects)) + }) + + t.Run("classMT_T1", func(t *testing.T) { + res, err := helper.TenantListObjects(t, classMT_3.Class, tenantNames[1]) + + require.NotNil(t, err) + expErr := &objects.ObjectsListUnprocessableEntity{} + require.ErrorAs(t, err, &expErr) + assert.Contains(t, expErr.Payload.Error[0].Message, tenantNames[1]) + require.Nil(t, res) + }) + + t.Run("classMT_T2", func(t *testing.T) { + res, err := helper.TenantListObjects(t, classMT_4.Class, tenantNames[1]) + + require.Nil(t, err) + require.NotNil(t, res) + require.Equal(t, int64(1), res.TotalResults) + assert.ElementsMatch(t, []string{ + "b33d8f4c-30f9-426d-94a5-fa256f3fb5e7", + }, extractIds(res.Objects)) + }) + }) + + t.Run("list objects no tenant", func(t *testing.T) { + t.Run("no class", func(t *testing.T) { + res, err := helper.ListObjects(t, "") + + require.Nil(t, err) + require.NotNil(t, res) + require.Equal(t, int64(3), res.TotalResults) + assert.ElementsMatch(t, []string{ + "6f019424-bacf-4539-b1be-fc1d3eccb50a", + "8d02b16c-478c-4cae-9384-3b686bae0f4e", + "865a820a-c325-4d10-8d8c-4b991bc43778", + }, extractIds(res.Objects)) + }) + + t.Run("classNonMT_1", func(t *testing.T) { + res, err := helper.ListObjects(t, classNonMT_1.Class) + + require.Nil(t, err) + require.NotNil(t, res) + require.Equal(t, int64(2), res.TotalResults) + assert.ElementsMatch(t, []string{ + "6f019424-bacf-4539-b1be-fc1d3eccb50a", + "8d02b16c-478c-4cae-9384-3b686bae0f4e", + }, extractIds(res.Objects)) + }) + + t.Run("classNonMT_2", func(t *testing.T) { + res, err := helper.ListObjects(t, classNonMT_2.Class) + + require.Nil(t, err) + require.NotNil(t, res) + require.Equal(t, int64(1), res.TotalResults) + assert.ElementsMatch(t, []string{ + "865a820a-c325-4d10-8d8c-4b991bc43778", + }, extractIds(res.Objects)) + }) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/gql_aggregate_tenant_objects_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/gql_aggregate_tenant_objects_test.go new file mode 100644 index 0000000000000000000000000000000000000000..16e05fd30c1a5a9fc938a45a3e65b5024810cc64 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/gql_aggregate_tenant_objects_test.go @@ -0,0 +1,211 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "encoding/json" + "fmt" + "testing" + "time" + + "github.com/go-openapi/strfmt" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/nodes" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" +) + +func TestGQLAggregateTenantObjects(t *testing.T) { + testClass := models.Class{ + Class: "MultiTenantClass", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + tenantName1 := "Tenant1" + tenantName2 := "Tenant2" + numTenantObjs1 := 5 + numTenantObjs2 := 3 + + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + helper.CreateClass(t, &testClass) + + tenants := []*models.Tenant{ + {Name: tenantName1}, + {Name: tenantName2}, + } + helper.CreateTenants(t, testClass.Class, tenants) + + batch1 := makeTenantBatch(batchParams{ + className: testClass.Class, + tenantName: tenantName1, + batchSize: numTenantObjs1, + }) + batch2 := makeTenantBatch(batchParams{ + className: testClass.Class, + tenantName: tenantName2, + batchSize: numTenantObjs2, + }) + + helper.CreateObjectsBatch(t, batch1) + helper.CreateObjectsBatch(t, batch2) + + t.Run("GQL Aggregate tenant objects", func(t *testing.T) { + testAggregateTenantSuccess(t, testClass.Class, tenantName1, numTenantObjs1, "") + testAggregateTenantSuccess(t, testClass.Class, tenantName2, numTenantObjs2, "") + }) + + t.Run("GQL Aggregate tenant objects near object", func(t *testing.T) { + testAggregateTenantSuccess(t, testClass.Class, tenantName1, numTenantObjs1, string(batch1[0].ID)) + testAggregateTenantSuccess(t, testClass.Class, tenantName2, numTenantObjs2, string(batch2[0].ID)) + }) + + t.Run("Get global tenant objects count", func(t *testing.T) { + assert.Eventually(t, func() bool { + params := nodes.NewNodesGetClassParams().WithClassName(testClass.Class).WithOutput(&verbose) + resp, err := helper.Client(t).Nodes.NodesGetClass(params, nil) + require.Nil(t, err) + + payload := resp.GetPayload() + require.NotNil(t, payload) + require.NotNil(t, payload.Nodes) + require.Len(t, payload.Nodes, 1) + + node := payload.Nodes[0] + require.NotNil(t, node) + assert.Equal(t, models.NodeStatusStatusHEALTHY, *node.Status) + assert.True(t, len(node.Name) > 0) + assert.True(t, node.GitHash != "" && node.GitHash != "unknown") + assert.Len(t, node.Shards, 2) + + shardCount := map[string]int64{ + tenantName1: int64(numTenantObjs1), + tenantName2: int64(numTenantObjs2), + } + + for _, shard := range node.Shards { + count, ok := shardCount[shard.Name] + require.True(t, ok, "expected shard %q to be in %+v", + shard.Name, []string{tenantName1, tenantName2}) + + assert.Equal(t, testClass.Class, shard.Class) + if count != shard.ObjectCount { + return false + } + } + + require.NotNil(t, node.Stats) + assert.Equal(t, int64(2), node.Stats.ShardCount) + return int64(numTenantObjs1+numTenantObjs2) == node.Stats.ObjectCount + }, 15*time.Second, 500*time.Millisecond) + }) +} + +func TestGQLAggregateTenantObjects_InvalidTenant(t *testing.T) { + testClass := models.Class{ + Class: "MultiTenantClass", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + tenantName := "Tenant1" + numTenantObjs := 5 + + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + + t.Run("setup test data", func(t *testing.T) { + t.Run("create class with multi-tenancy enabled", func(t *testing.T) { + helper.CreateClass(t, &testClass) + }) + + t.Run("create tenants", func(t *testing.T) { + tenants := []*models.Tenant{ + {Name: tenantName}, + } + helper.CreateTenants(t, testClass.Class, tenants) + }) + + t.Run("add tenant objects", func(t *testing.T) { + batch := makeTenantBatch(batchParams{ + className: testClass.Class, + tenantName: tenantName, + batchSize: numTenantObjs, + }) + helper.CreateObjectsBatch(t, batch) + }) + }) + + t.Run("non-existent tenant key", func(t *testing.T) { + query := fmt.Sprintf(`{Aggregate{%s(tenant:"DNE"){meta{count}}}}`, testClass.Class) + expected := `"DNE"` + resp, err := graphqlhelper.QueryGraphQL(t, helper.RootAuth, "", query, nil) + require.Nil(t, err) + assert.Nil(t, resp.Data["Aggregate"].(map[string]interface{})[testClass.Class]) + assert.Len(t, resp.Errors, 1) + assert.Contains(t, resp.Errors[0].Message, expected) + }) +} + +type batchParams struct { + className string + tenantName string + batchSize int +} + +func makeTenantBatch(params batchParams) []*models.Object { + batch := make([]*models.Object, params.batchSize) + for i := range batch { + batch[i] = &models.Object{ + ID: strfmt.UUID(uuid.New().String()), + Class: params.className, + Properties: map[string]interface{}{ + "name": params.tenantName, + }, + Tenant: params.tenantName, + } + } + return batch +} + +func testAggregateTenantSuccess(t *testing.T, className, tenantName string, expectedCount int, nearObjectId string) { + nearObject := "" + if nearObjectId != "" { + nearObject = fmt.Sprintf(`nearObject: {id: "%s", certainty: 0.4},`, nearObjectId) + } + + query := fmt.Sprintf(`{Aggregate{%s(%s,tenant:%q){meta{count}}}}`, className, nearObject, tenantName) + resp := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + result := resp.Get("Aggregate", className).AsSlice() + require.Len(t, result, 1) + count := result[0].(map[string]any)["meta"].(map[string]any)["count"].(json.Number) + assert.Equal(t, json.Number(fmt.Sprint(expectedCount)), count) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/gql_get_tenant_objects_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/gql_get_tenant_objects_test.go new file mode 100644 index 0000000000000000000000000000000000000000..879b11fe63adeae3dc24fd8dce24750ebb4b93eb --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/gql_get_tenant_objects_test.go @@ -0,0 +1,249 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "fmt" + "strings" + "testing" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" + graphqlhelper "github.com/weaviate/weaviate/test/helper/graphql" +) + +func TestGQLGetTenantObjects(t *testing.T) { + testClass := models.Class{ + Class: "MultiTenantClass", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + { + Name: "text", + DataType: schema.DataTypeText.PropString(), + }, + }, + Vectorizer: "text2vec-contextionary", + } + tenant := "Tenant1" + otherTenant := "otherTenant" + tenantObjects := []*models.Object{ + { + ID: "0927a1e0-398e-4e76-91fb-04a7a8f0405c", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenant, + "text": "meat", + }, + Tenant: tenant, + }, + { + ID: "831ae1d0-f441-44b1-bb2a-46548048e26f", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenant, + "text": "bananas", + }, + Tenant: tenant, + }, + { + ID: "6f3363e0-c0a0-4618-bf1f-b6cad9cdff59", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenant, + "text": "kiwi", + }, + Tenant: tenant, + }, + { + ID: "6f3363e0-c0a0-4618-bf1f-b6cad9cdff59", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenant, + "text": "kiwi", + }, + Tenant: otherTenant, + }, + } + + // add more objects for other tenant, won't show up in search + + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + + helper.CreateClass(t, &testClass) + helper.CreateTenants(t, testClass.Class, []*models.Tenant{{Name: tenant}, {Name: otherTenant}}) + helper.CreateObjectsBatch(t, tenantObjects) + + t.Run("Test tenant objects", func(t *testing.T) { + for _, obj := range tenantObjects { + resp, err := helper.TenantObject(t, obj.Class, obj.ID, obj.Tenant) + require.Nil(t, err) + assert.Equal(t, obj.ID, resp.ID) + assert.Equal(t, obj.Class, resp.Class) + assert.Equal(t, obj.Properties, resp.Properties) + } + }) + + t.Run("GQL Get tenant objects", func(t *testing.T) { + expectedIDs := map[strfmt.UUID]bool{} + for _, obj := range tenantObjects { + expectedIDs[obj.ID] = false + } + + query := fmt.Sprintf(`{Get{%s(tenant:%q){_additional{id}}}}`, testClass.Class, tenant) + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + for _, obj := range result.Get("Get", testClass.Class).AsSlice() { + id := obj.(map[string]any)["_additional"].(map[string]any)["id"].(string) + if _, ok := expectedIDs[strfmt.UUID(id)]; ok { + expectedIDs[strfmt.UUID(id)] = true + } else { + t.Fatalf("found unexpected id %q", id) + } + } + + for id, found := range expectedIDs { + if !found { + t.Fatalf("expected to find id %q, but didn't", id) + } + } + }) + + t.Run("GQL near objects", func(t *testing.T) { + expectedIDs := map[strfmt.UUID]bool{} + for _, obj := range tenantObjects { + expectedIDs[obj.ID] = false + } + + query := fmt.Sprintf(`{Get{%s(nearObject:{id: %q}, tenant:%q){_additional{id}}}}`, testClass.Class, tenantObjects[0].ID, tenant) + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + res := result.Get("Get", testClass.Class) + require.NotNil(t, res) // objects have no content, so no result + }) + + t.Run("GQL near text", func(t *testing.T) { + expectedIDs := map[strfmt.UUID]bool{} + for _, obj := range tenantObjects { + expectedIDs[obj.ID] = false + } + + query := fmt.Sprintf(`{Get{%s(nearText:{concepts: "apple", moveTo: {concepts: ["fruit"], force: 0.1}, moveAwayFrom: {objects: [{id:"0927a1e0-398e-4e76-91fb-04a7a8f0405c"}], force: 0.1}}, tenant:%q){_additional{id}}}}`, testClass.Class, tenant) + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + res := result.Get("Get", testClass.Class) + require.NotNil(t, res) + require.Len(t, res.Result, 3) // don't find object from other tenants + }) + + t.Run("GQL bm25", func(t *testing.T) { + expectedIDs := map[strfmt.UUID]bool{} + for _, obj := range tenantObjects { + expectedIDs[obj.ID] = false + } + + query := fmt.Sprintf(`{Get{%s(bm25:{query: "kiwi"}, tenant:%q){_additional{id}}}}`, testClass.Class, tenant) + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + res := result.Get("Get", testClass.Class) + require.NotNil(t, res) + require.Len(t, res.Result, 1) // don't find object from other tenants + }) + + t.Run("GQL hybrid", func(t *testing.T) { + expectedIDs := map[strfmt.UUID]bool{} + for _, obj := range tenantObjects { + expectedIDs[obj.ID] = false + } + + query := fmt.Sprintf(`{Get{%s(hybrid:{query: "kiwi", alpha: 0.1}, tenant:%q, autocut:1){text _additional{id}}}}`, testClass.Class, tenant) + result := graphqlhelper.AssertGraphQL(t, helper.RootAuth, query) + res := result.Get("Get", testClass.Class) + require.NotNil(t, res) + require.Len(t, res.Result, 1) // find only relevant results from tenant + }) +} + +func TestGQLGetTenantObjects_MissingTenant(t *testing.T) { + testClass := models.Class{ + Class: "MultiTenantClass", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + tenantName := "Tenant1" + tenantObjects := []*models.Object{ + { + ID: "0927a1e0-398e-4e76-91fb-04a7a8f0405c", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantName, + }, + Tenant: tenantName, + }, + { + ID: "831ae1d0-f441-44b1-bb2a-46548048e26f", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantName, + }, + Tenant: tenantName, + }, + { + ID: "6f3363e0-c0a0-4618-bf1f-b6cad9cdff59", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantName, + }, + Tenant: tenantName, + }, + } + + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + + helper.CreateClass(t, &testClass) + helper.CreateTenants(t, testClass.Class, []*models.Tenant{{Name: tenantName}}) + helper.CreateObjectsBatch(t, tenantObjects) + + for _, obj := range tenantObjects { + resp, err := helper.TenantObject(t, obj.Class, obj.ID, tenantName) + require.Nil(t, err) + assert.Equal(t, obj.ID, resp.ID) + assert.Equal(t, obj.Class, resp.Class) + assert.Equal(t, obj.Properties, resp.Properties) + } + + query := fmt.Sprintf(`{Get{%s{_additional{id}}}}`, testClass.Class) + result, err := graphqlhelper.QueryGraphQL(t, helper.RootAuth, "", query, nil) + require.Nil(t, err) + require.Len(t, result.Errors, 1) + assert.Nil(t, result.Data["Get"].(map[string]interface{})[testClass.Class]) + msg := fmt.Sprintf(`explorer: list class: search: object search at index %s: `, + strings.ToLower(testClass.Class)) + + fmt.Sprintf(`class %s has multi-tenancy enabled, but request was without tenant`, testClass.Class) + assert.Equal(t, result.Errors[0].Message, msg) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/head_tenant_objects_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/head_tenant_objects_test.go new file mode 100644 index 0000000000000000000000000000000000000000..c95ea766f64a117c974a9774685ab2adbb7ef7dd --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/head_tenant_objects_test.go @@ -0,0 +1,96 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" +) + +func TestHeadTenantObjects(t *testing.T) { + testClass := models.Class{ + Class: "MultiTenantClass", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + tenantNames := []string{ + "Tenant1", "Tenant2", "Tenant3", + } + tenantObjects := []*models.Object{ + { + ID: "0927a1e0-398e-4e76-91fb-04a7a8f0405c", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantNames[0], + }, + Tenant: tenantNames[0], + }, + { + ID: "831ae1d0-f441-44b1-bb2a-46548048e26f", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantNames[1], + }, + Tenant: tenantNames[1], + }, + { + ID: "6f3363e0-c0a0-4618-bf1f-b6cad9cdff59", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantNames[2], + }, + Tenant: tenantNames[2], + }, + } + + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + + t.Run("create class with multi-tenancy enabled", func(t *testing.T) { + helper.CreateClass(t, &testClass) + }) + + t.Run("create tenants", func(t *testing.T) { + tenants := make([]*models.Tenant, len(tenantNames)) + for i := range tenants { + tenants[i] = &models.Tenant{Name: tenantNames[i]} + } + helper.CreateTenants(t, testClass.Class, tenants) + }) + + t.Run("add tenant objects", func(t *testing.T) { + for _, obj := range tenantObjects { + helper.CreateObject(t, obj) + } + }) + + t.Run("head tenant objects", func(t *testing.T) { + for i, obj := range tenantObjects { + exists, err := helper.TenantObjectExists(t, obj.Class, obj.ID, tenantNames[i]) + require.Nil(t, err) + assert.True(t, exists) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/patch_tenant_objects_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/patch_tenant_objects_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ea6b57e4ac41a8952f2d4f67cd6d993d69a919b2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/patch_tenant_objects_test.go @@ -0,0 +1,201 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "errors" + "fmt" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" +) + +func TestPatchTenantObjects(t *testing.T) { + mutableProp := "mutableProp" + testClass := models.Class{ + Class: "MultiTenantClass", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, { + Name: mutableProp, + DataType: schema.DataTypeText.PropString(), + }, + }, + } + tenantNames := []string{ + "Tenant1", "Tenant2", "Tenant3", + } + tenantObjects := []*models.Object{ + { + ID: "0927a1e0-398e-4e76-91fb-04a7a8f0405c", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantNames[0], + mutableProp: "obj#0", + }, + Tenant: tenantNames[0], + }, + { + ID: "831ae1d0-f441-44b1-bb2a-46548048e26f", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantNames[1], + mutableProp: "obj#1", + }, + Tenant: tenantNames[1], + }, + { + ID: "6f3363e0-c0a0-4618-bf1f-b6cad9cdff59", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantNames[2], + mutableProp: "obj#2", + }, + Tenant: tenantNames[2], + }, + } + + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + + t.Run("create class with multi-tenancy enabled", func(t *testing.T) { + helper.CreateClass(t, &testClass) + }) + + t.Run("create tenants", func(t *testing.T) { + tenants := make([]*models.Tenant, len(tenantNames)) + for i := range tenants { + tenants[i] = &models.Tenant{Name: tenantNames[i]} + } + helper.CreateTenants(t, testClass.Class, tenants) + }) + + t.Run("add tenant objects", func(t *testing.T) { + for _, obj := range tenantObjects { + helper.CreateObject(t, obj) + } + + t.Run("verify tenant object creation", func(t *testing.T) { + for i, obj := range tenantObjects { + resp, err := helper.TenantObject(t, obj.Class, obj.ID, tenantNames[i]) + require.Nil(t, err) + require.Equal(t, obj.ID, resp.ID) + require.Equal(t, obj.Class, resp.Class) + require.Equal(t, obj.Properties, resp.Properties) + } + }) + }) + + t.Run("patch tenant objects", func(t *testing.T) { + for i, obj := range tenantObjects { + mut := obj.Properties.(map[string]interface{})[mutableProp] + toUpdate := &models.Object{ + Class: testClass.Class, + ID: obj.ID, + Properties: map[string]interface{}{ + "name": tenantNames[i], + mutableProp: fmt.Sprintf("%s--patched", mut), + }, + Tenant: tenantNames[i], + } + helper.PatchObject(t, toUpdate) + } + + t.Run("assert tenant object updates", func(t *testing.T) { + for i, obj := range tenantObjects { + resp, err := helper.TenantObject(t, obj.Class, obj.ID, tenantNames[i]) + require.Nil(t, err) + require.Equal(t, obj.ID, resp.ID) + require.Equal(t, obj.Class, resp.Class) + expectedProps := obj.Properties.(map[string]interface{}) + expectedProps[mutableProp] = fmt.Sprintf("%s--patched", expectedProps[mutableProp]) + require.Equal(t, expectedProps, resp.Properties) + } + }) + }) +} + +func TestPatchTenantObjects_ChangeTenant(t *testing.T) { + className := "MultiTenantClassPatch" + tenantName := "Tenant1" + testClass := models.Class{ + Class: className, + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + tenantObject := models.Object{ + ID: "0927a1e0-398e-4e76-91fb-04a7a8f0405c", + Class: className, + Properties: map[string]interface{}{ + "name": tenantName, + }, + Tenant: tenantName, + } + + defer func() { + helper.DeleteClass(t, className) + }() + + t.Run("create class with multi-tenancy enabled", func(t *testing.T) { + helper.CreateClass(t, &testClass) + helper.CreateTenants(t, className, []*models.Tenant{{Name: tenantName}}) + }) + + t.Run("add tenant object", func(t *testing.T) { + params := objects.NewObjectsCreateParams(). + WithBody(&tenantObject) + _, err := helper.Client(t).Objects.ObjectsCreate(params, nil) + require.Nil(t, err) + }) + + t.Run("patch tenant object", func(t *testing.T) { + toUpdate := models.Object{ + Class: testClass.Class, + ID: tenantObject.ID, + Properties: map[string]interface{}{ + "name": "updatedTenantName", + }, + Tenant: "updatedTenantName", + } + params := objects.NewObjectsClassPatchParams().WithClassName(toUpdate.Class). + WithID(toUpdate.ID).WithBody(&toUpdate) + _, err := helper.Client(t).Objects.ObjectsClassPatch(params, nil) + require.NotNil(t, err) // tenant does not exist + var parsedErr *objects.ObjectsClassPatchUnprocessableEntity + require.True(t, errors.As(err, &parsedErr)) + require.NotNil(t, parsedErr.Payload.Error) + require.Len(t, parsedErr.Payload.Error, 1) + assert.Contains(t, err.Error(), fmt.Sprint(http.StatusUnprocessableEntity)) + expected := "\"updatedTenantName\"" + assert.Contains(t, parsedErr.Payload.Error[0].Message, expected) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/tenant_objects_reference_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/tenant_objects_reference_test.go new file mode 100644 index 0000000000000000000000000000000000000000..0a38fdbe1215b76cf1e6bc46b52b646b4c45ee04 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/tenant_objects_reference_test.go @@ -0,0 +1,180 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" +) + +func TestTenantObjectsReference(t *testing.T) { + className := "MultiTenantClass" + mutableProp := "mutableProp" + refProp := "refProp" + testClass := models.Class{ + Class: className, + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + { + Name: mutableProp, + DataType: schema.DataTypeText.PropString(), + }, + { + Name: refProp, + DataType: []string{className}, + }, + }, + } + tenantNames := []string{ + "Tenant1", "Tenant2", "Tenant3", + } + tenantObjects := []*models.Object{ + { + ID: "0927a1e0-398e-4e76-91fb-04a7a8f0405c", + Class: className, + Properties: map[string]interface{}{ + "name": tenantNames[0], + mutableProp: "obj#0", + }, + Tenant: tenantNames[0], + }, + { + ID: "831ae1d0-f441-44b1-bb2a-46548048e26f", + Class: className, + Properties: map[string]interface{}{ + "name": tenantNames[1], + mutableProp: "obj#1", + }, + Tenant: tenantNames[1], + }, + { + ID: "6f3363e0-c0a0-4618-bf1f-b6cad9cdff59", + Class: className, + Properties: map[string]interface{}{ + "name": tenantNames[2], + mutableProp: "obj#2", + }, + Tenant: tenantNames[2], + }, + } + tenantRefs := []*models.Object{ + { + ID: "169b62a7-ef1c-481d-8fb0-27f11716bde7", + Class: className, + Properties: map[string]interface{}{ + "name": tenantNames[0], + mutableProp: "ref#0", + }, + Tenant: tenantNames[0], + }, + { + ID: "4d78424d-f7bd-479b-bd8a-52510e2db0fd", + Class: className, + Properties: map[string]interface{}{ + "name": tenantNames[1], + mutableProp: "ref#1", + }, + Tenant: tenantNames[1], + }, + { + ID: "c1db0a06-d5f9-4f77-aa3c-08a44f16e358", + Class: className, + Properties: map[string]interface{}{ + "name": tenantNames[2], + mutableProp: "ref#2", + }, + Tenant: tenantNames[2], + }, + } + + defer func() { + helper.DeleteClass(t, className) + }() + + t.Run("create class with multi-tenancy enabled", func(t *testing.T) { + helper.CreateClass(t, &testClass) + }) + + t.Run("create tenants", func(t *testing.T) { + tenants := make([]*models.Tenant, len(tenantNames)) + for i := range tenants { + tenants[i] = &models.Tenant{Name: tenantNames[i]} + } + helper.CreateTenants(t, className, tenants) + }) + + t.Run("add tenant objects", func(t *testing.T) { + for i, obj := range tenantObjects { + helper.CreateObject(t, obj) + helper.CreateObject(t, tenantRefs[i]) + } + + t.Run("verify tenant object creation", func(t *testing.T) { + for i, obj := range tenantObjects { + resp, err := helper.TenantObject(t, obj.Class, obj.ID, tenantNames[i]) + require.Nil(t, err) + require.Equal(t, obj.ID, resp.ID) + require.Equal(t, obj.Class, resp.Class) + require.Equal(t, obj.Properties, resp.Properties) + } + }) + }) + + t.Run("add tenant object references", func(t *testing.T) { + for i, obj := range tenantObjects { + ref := &models.SingleRef{Beacon: helper.NewBeacon(className, tenantRefs[i].ID)} + helper.AddReferenceTenant(t, obj, ref, refProp, tenantNames[i]) + } + + t.Run("assert tenant object references", func(t *testing.T) { + for i, obj := range tenantObjects { + resp, err := helper.TenantObject(t, obj.Class, obj.ID, tenantNames[i]) + require.Nil(t, err) + require.Equal(t, obj.ID, resp.ID) + require.Equal(t, obj.Class, resp.Class) + refs := resp.Properties.(map[string]interface{})[refProp].([]interface{}) + require.Len(t, refs, 1) + expectedBeacon := helper.NewBeacon(className, tenantRefs[i].ID).String() + assert.Equal(t, expectedBeacon, refs[0].(map[string]interface{})["beacon"]) + } + }) + }) + + t.Run("delete tenant object references", func(Z *testing.T) { + for i, obj := range tenantObjects { + ref := &models.SingleRef{Beacon: helper.NewBeacon(className, tenantRefs[i].ID)} + helper.DeleteReferenceTenant(t, obj, ref, refProp, tenantNames[i]) + } + + t.Run("assert tenant object references", func(t *testing.T) { + for i, obj := range tenantObjects { + resp, err := helper.TenantObject(t, obj.Class, obj.ID, tenantNames[i]) + require.Nil(t, err) + require.Equal(t, obj.ID, resp.ID) + require.Equal(t, obj.Class, resp.Class) + refs := resp.Properties.(map[string]interface{})[refProp].([]interface{}) + require.Len(t, refs, 0) + } + }) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/update_tenant_objects_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/update_tenant_objects_test.go new file mode 100644 index 0000000000000000000000000000000000000000..03b33f360926bb5d436c6ee4950714806b85b668 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/update_tenant_objects_test.go @@ -0,0 +1,201 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "errors" + "fmt" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" +) + +func TestUpdateTenantObjects(t *testing.T) { + mutableProp := "mutableProp" + testClass := models.Class{ + Class: "MultiTenantClass", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, { + Name: mutableProp, + DataType: schema.DataTypeText.PropString(), + }, + }, + } + tenantNames := []string{ + "Tenant1", "Tenant2", "Tenant3", + } + tenantObjects := []*models.Object{ + { + ID: "0927a1e0-398e-4e76-91fb-04a7a8f0405c", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantNames[0], + mutableProp: "obj#0", + }, + Tenant: tenantNames[0], + }, + { + ID: "831ae1d0-f441-44b1-bb2a-46548048e26f", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantNames[1], + mutableProp: "obj#1", + }, + Tenant: tenantNames[1], + }, + { + ID: "6f3363e0-c0a0-4618-bf1f-b6cad9cdff59", + Class: testClass.Class, + Properties: map[string]interface{}{ + "name": tenantNames[2], + mutableProp: "obj#2", + }, + Tenant: tenantNames[2], + }, + } + + defer func() { + helper.DeleteClass(t, testClass.Class) + }() + + t.Run("create class with multi-tenancy enabled", func(t *testing.T) { + helper.CreateClass(t, &testClass) + }) + + t.Run("create tenants", func(t *testing.T) { + tenants := make([]*models.Tenant, len(tenantNames)) + for i := range tenants { + tenants[i] = &models.Tenant{Name: tenantNames[i]} + } + helper.CreateTenants(t, testClass.Class, tenants) + }) + + t.Run("add tenant objects", func(t *testing.T) { + for _, obj := range tenantObjects { + helper.CreateObject(t, obj) + } + + t.Run("verify tenant object creation", func(t *testing.T) { + for i, obj := range tenantObjects { + resp, err := helper.TenantObject(t, obj.Class, obj.ID, tenantNames[i]) + require.Nil(t, err) + require.Equal(t, obj.ID, resp.ID) + require.Equal(t, obj.Class, resp.Class) + require.Equal(t, obj.Properties, resp.Properties) + } + }) + }) + + t.Run("update tenant objects", func(t *testing.T) { + for i, obj := range tenantObjects { + mut := obj.Properties.(map[string]interface{})[mutableProp] + toUpdate := &models.Object{ + Class: testClass.Class, + ID: obj.ID, + Properties: map[string]interface{}{ + "name": tenantNames[i], + mutableProp: fmt.Sprintf("%s--updated", mut), + }, + Tenant: tenantNames[i], + } + helper.UpdateObject(t, toUpdate) + } + + t.Run("assert tenant object updates", func(t *testing.T) { + for i, obj := range tenantObjects { + resp, err := helper.TenantObject(t, obj.Class, obj.ID, tenantNames[i]) + require.Nil(t, err) + require.Equal(t, obj.ID, resp.ID) + require.Equal(t, obj.Class, resp.Class) + expectedProps := obj.Properties.(map[string]interface{}) + expectedProps[mutableProp] = fmt.Sprintf("%s--updated", expectedProps[mutableProp]) + require.Equal(t, expectedProps, resp.Properties) + } + }) + }) +} + +func TestUpdateTenantObjects_UpdateTenant(t *testing.T) { + className := "MultiTenantClass" + tenantName := "Tenant1" + testClass := models.Class{ + Class: className, + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + }, + } + tenantObject := models.Object{ + ID: "0927a1e0-398e-4e76-91fb-04a7a8f0405c", + Class: className, + Properties: map[string]interface{}{ + "name": tenantName, + }, + Tenant: tenantName, + } + + defer func() { + helper.DeleteClass(t, className) + }() + + t.Run("create class with multi-tenancy enabled", func(t *testing.T) { + helper.CreateClass(t, &testClass) + helper.CreateTenants(t, className, []*models.Tenant{{Name: tenantName}}) + }) + + t.Run("add tenant object", func(t *testing.T) { + params := objects.NewObjectsCreateParams(). + WithBody(&tenantObject) + _, err := helper.Client(t).Objects.ObjectsCreate(params, nil) + require.Nil(t, err) + }) + + t.Run("update tenant object", func(t *testing.T) { + toUpdate := models.Object{ + Class: testClass.Class, + ID: tenantObject.ID, + Properties: map[string]interface{}{ + "name": "updatedTenantName", + }, + Tenant: "updatedTenantName", + } + params := objects.NewObjectsClassPutParams().WithClassName(toUpdate.Class). + WithID(toUpdate.ID).WithBody(&toUpdate) + _, err := helper.Client(t).Objects.ObjectsClassPut(params, nil) + require.NotNil(t, err) // tenant does not exist + var parsedErr *objects.ObjectsClassPutUnprocessableEntity + require.True(t, errors.As(err, &parsedErr)) + require.NotNil(t, parsedErr.Payload.Error) + require.Len(t, parsedErr.Payload.Error, 1) + assert.Contains(t, err.Error(), fmt.Sprint(http.StatusUnprocessableEntity)) + expected := "\"updatedTenantName\"" + assert.Contains(t, parsedErr.Payload.Error[0].Message, expected) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/update_tenant_references_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/update_tenant_references_test.go new file mode 100644 index 0000000000000000000000000000000000000000..c44cfbfa92292ca189f2500115666d5745ab2ec8 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/multi_tenancy/update_tenant_references_test.go @@ -0,0 +1,134 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "testing" + + "github.com/go-openapi/strfmt" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +func TestTenantObjectsReferenceUpdates(t *testing.T) { + className := "MultiTenantClass" + refProp := "refProp1" + + class := models.Class{ + Class: className, + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: true}, + Properties: []*models.Property{ + {Name: refProp, DataType: []string{className}}, + }, + } + defer func() { + helper.DeleteClass(t, className) + }() + helper.CreateClass(t, &class) + + tenantNames := []string{"Tenant1", "Tenant2", "Tenant3"} + tenants := make([]*models.Tenant, len(tenantNames)) + for i := range tenants { + tenants[i] = &models.Tenant{Name: tenantNames[i]} + } + helper.CreateTenants(t, className, tenants) + + objs := make([]*models.Object, 0) + for i := 0; i < len(tenantNames)*4; i++ { + obj := &models.Object{ + ID: strfmt.UUID(uuid.New().String()), + Class: className, + Tenant: tenantNames[i%len(tenantNames)], + } + objs = append(objs, obj) + helper.CreateObject(t, obj) + } + + for _, obj := range objs { + ref := &models.SingleRef{Beacon: helper.NewBeacon(className, obj.ID)} + helper.AddReferenceTenant(t, obj, ref, refProp, obj.Tenant) + helper.AddReferenceTenant(t, obj, ref, refProp, obj.Tenant) + } + + for _, obj := range objs { + resp, err := helper.TenantObject(t, obj.Class, obj.ID, obj.Tenant) + require.Nil(t, err) + require.Equal(t, obj.ID, resp.ID) + require.Equal(t, obj.Class, resp.Class) + refs := resp.Properties.(map[string]interface{})[refProp].([]interface{}) + require.Len(t, refs, 2) + expectedBeacon := helper.NewBeacon(className, obj.ID).String() + assert.Equal(t, expectedBeacon, refs[0].(map[string]interface{})["beacon"]) + } + + t.Run("Update reference - single", func(t *testing.T) { + for i, obj := range objs { + j := (i + len(tenantNames)) % len(objs) // always reference the next object with the same tenant + ref := models.MultipleRef{{Beacon: helper.NewBeacon(className, objs[j].ID)}} + helper.UpdateReferenceTenant(t, obj, ref, refProp, obj.Tenant) + } + + for i, obj := range objs { + resp, err := helper.TenantObject(t, obj.Class, obj.ID, obj.Tenant) + require.Nil(t, err) + require.Equal(t, obj.ID, resp.ID) + require.Equal(t, obj.Class, resp.Class) + refs := resp.Properties.(map[string]interface{})[refProp].([]interface{}) + require.Len(t, refs, 1) + j := (i + len(tenantNames)) % len(objs) // always reference the next object with the same tenant + expectedBeacon := helper.NewBeacon(className, objs[j].ID).String() + assert.Equal(t, expectedBeacon, refs[0].(map[string]interface{})["beacon"]) + } + }) + + t.Run("Update reference - multiple", func(t *testing.T) { + for i, obj := range objs { + ref := models.MultipleRef{} + j := (i + 2*len(tenantNames)) % len(objs) + ref = append(ref, &models.SingleRef{Beacon: helper.NewBeacon(className, objs[j].ID)}) + ref = append(ref, &models.SingleRef{Beacon: helper.NewBeacon(className, objs[j].ID)}) + ref = append(ref, &models.SingleRef{Beacon: helper.NewBeacon(className, objs[j].ID)}) + helper.UpdateReferenceTenant(t, obj, ref, refProp, obj.Tenant) + } + + for i, obj := range objs { + resp, err := helper.TenantObject(t, obj.Class, obj.ID, obj.Tenant) + require.Nil(t, err) + require.Equal(t, obj.ID, resp.ID) + require.Equal(t, obj.Class, resp.Class) + refs := resp.Properties.(map[string]interface{})[refProp].([]interface{}) + require.Len(t, refs, 3) + j := (i + 2*len(tenantNames)) % len(objs) // always reference the next object with the same tenant + expectedBeacon := helper.NewBeacon(className, objs[j].ID).String() + assert.Equal(t, expectedBeacon, refs[0].(map[string]interface{})["beacon"]) + } + }) + + t.Run("Update reference - empty", func(t *testing.T) { + for _, obj := range objs { + ref := models.MultipleRef{} + helper.UpdateReferenceTenant(t, obj, ref, refProp, obj.Tenant) + } + + for _, obj := range objs { + resp, err := helper.TenantObject(t, obj.Class, obj.ID, obj.Tenant) + require.Nil(t, err) + require.Equal(t, obj.ID, resp.ID) + require.Equal(t, obj.Class, resp.Class) + refs := resp.Properties.(map[string]interface{})[refProp].([]interface{}) + require.Len(t, refs, 0) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/nodes/nodes_api_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/nodes/nodes_api_test.go new file mode 100644 index 0000000000000000000000000000000000000000..83b9c1af368375cd38c0a696bc7f0bb81740c320 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/nodes/nodes_api_test.go @@ -0,0 +1,461 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/batch" + "github.com/weaviate/weaviate/client/meta" + "github.com/weaviate/weaviate/client/nodes" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/verbosity" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/books" + "github.com/weaviate/weaviate/test/helper/sample-schema/documents" + "github.com/weaviate/weaviate/test/helper/sample-schema/multishard" +) + +func Test_NodesAPI(t *testing.T) { + ctx := context.Background() + compose, err := docker.New(). + WithWeaviate(). + WithText2VecContextionary(). + WithWeaviateEnv("PERSISTENCE_MAX_REUSE_WAL_SIZE", "0"). + WithWeaviateEnv("PERSISTENCE_MEMTABLES_FLUSH_DIRTY_AFTER_SECONDS", "2"). // flush fast enough so object counts are correct + Start(ctx) + require.NoError(t, err) + defer func() { + require.NoError(t, compose.Terminate(ctx)) + }() + + defer helper.SetupClient(fmt.Sprintf("%s:%s", helper.ServerHost, helper.ServerPort)) + helper.SetupClient(compose.GetWeaviate().URI()) + + t.Run("empty DB", func(t *testing.T) { + meta, err := helper.Client(t).Meta.MetaGet(meta.NewMetaGetParams(), nil) + require.Nil(t, err) + assert.NotNil(t, meta.GetPayload()) + + assertions := func(t require.TestingT, nodeStatus *models.NodeStatus) { + require.NotNil(t, nodeStatus) + assert.Equal(t, models.NodeStatusStatusHEALTHY, *nodeStatus.Status) + assert.True(t, len(nodeStatus.Name) > 0) + assert.True(t, nodeStatus.GitHash != "" && nodeStatus.GitHash != "unknown") + assert.Equal(t, meta.Payload.Version, nodeStatus.Version) + assert.Empty(t, nodeStatus.Shards) + require.Nil(t, nodeStatus.Stats) + } + + testStatusResponse(t, assertions, nil, "") + }) + + t.Run("DB with Books (1 class ,1 shard configuration, 1 node)", func(t *testing.T) { + booksClass := books.ClassContextionaryVectorizer() + helper.CreateClass(t, booksClass) + defer helper.DeleteClass(t, booksClass.Class) + + for _, book := range books.Objects() { + helper.CreateObject(t, book) + helper.AssertGetObjectEventually(t, book.Class, book.ID) + } + + minimalAssertions := func(t require.TestingT, nodeStatus *models.NodeStatus) { + require.NotNil(t, nodeStatus) + assert.Equal(t, models.NodeStatusStatusHEALTHY, *nodeStatus.Status) + assert.True(t, len(nodeStatus.Name) > 0) + assert.True(t, nodeStatus.GitHash != "" && nodeStatus.GitHash != "unknown") + } + + verboseAssertions := func(t require.TestingT, nodeStatus *models.NodeStatus) { + require.Len(t, nodeStatus.Shards, 1) + shard := nodeStatus.Shards[0] + assert.True(t, len(shard.Name) > 0) + assert.Equal(t, booksClass.Class, shard.Class) + assert.Equal(t, int64(3), shard.ObjectCount) + assert.Equal(t, int64(1), shard.ReplicationFactor) + assert.Equal(t, int64(1), shard.NumberOfReplicas) + require.NotNil(t, nodeStatus.Stats) + assert.Equal(t, int64(3), nodeStatus.Stats.ObjectCount) + assert.Equal(t, int64(1), nodeStatus.Stats.ShardCount) + } + + testStatusResponse(t, minimalAssertions, verboseAssertions, "") + }) + + t.Run("DB with MultiShard (1 class, 2 shards configuration, 1 node)", func(t *testing.T) { + multiShardClass := multishard.ClassContextionaryVectorizer() + helper.CreateClass(t, multiShardClass) + defer helper.DeleteClass(t, multiShardClass.Class) + + for _, multiShard := range multishard.Objects() { + helper.CreateObject(t, multiShard) + helper.AssertGetObjectEventually(t, multiShard.Class, multiShard.ID) + } + + minimalAssertions := func(t require.TestingT, nodeStatus *models.NodeStatus) { + require.NotNil(t, nodeStatus) + assert.Equal(t, models.NodeStatusStatusHEALTHY, *nodeStatus.Status) + assert.True(t, len(nodeStatus.Name) > 0) + assert.True(t, nodeStatus.GitHash != "" && nodeStatus.GitHash != "unknown") + } + + verboseAsssertions := func(t require.TestingT, nodeStatus *models.NodeStatus) { + assert.Len(t, nodeStatus.Shards, 2) + for _, shard := range nodeStatus.Shards { + assert.True(t, len(shard.Name) > 0) + assert.Equal(t, multiShardClass.Class, shard.Class) + assert.GreaterOrEqual(t, shard.ObjectCount, int64(0)) + assert.Equal(t, int64(1), shard.ReplicationFactor) + assert.Equal(t, int64(1), shard.NumberOfReplicas) + require.NotNil(t, nodeStatus.Stats) + assert.Equal(t, int64(3), nodeStatus.Stats.ObjectCount) + assert.Equal(t, int64(2), nodeStatus.Stats.ShardCount) + } + } + + testStatusResponse(t, minimalAssertions, verboseAsssertions, "") + }) + + t.Run("with class name: DB with Books and Documents, 1 shard, 1 node", func(t *testing.T) { + booksClass := books.ClassContextionaryVectorizer() + helper.CreateClass(t, booksClass) + defer helper.DeleteClass(t, booksClass.Class) + + t.Run("insert and check books", func(t *testing.T) { + for _, book := range books.Objects() { + helper.CreateObject(t, book) + helper.AssertGetObjectEventually(t, book.Class, book.ID) + } + + minimalAssertions := func(t require.TestingT, nodeStatus *models.NodeStatus) {} + verboseAssertions := func(t require.TestingT, nodeStatus *models.NodeStatus) { + require.NotNil(t, nodeStatus.Stats) + assert.Equal(t, int64(3), nodeStatus.Stats.ObjectCount) + assert.Equal(t, int64(1), nodeStatus.Stats.ShardCount) + } + + testStatusResponse(t, minimalAssertions, verboseAssertions, "") + }) + + t.Run("insert and check documents", func(t *testing.T) { + docsClasses := documents.ClassesContextionaryVectorizer(false) + helper.CreateClass(t, docsClasses[0]) + helper.CreateClass(t, docsClasses[1]) + defer helper.DeleteClass(t, docsClasses[0].Class) + defer helper.DeleteClass(t, docsClasses[1].Class) + + for _, doc := range documents.Objects() { + helper.CreateObject(t, doc) + helper.AssertGetObjectEventually(t, doc.Class, doc.ID) + } + + docsClass := docsClasses[0] + + minimalAssertions := func(t require.TestingT, nodeStatus *models.NodeStatus) { + assert.Equal(t, models.NodeStatusStatusHEALTHY, *nodeStatus.Status) + assert.True(t, len(nodeStatus.Name) > 0) + assert.True(t, nodeStatus.GitHash != "" && nodeStatus.GitHash != "unknown") + } + + verboseAssertions := func(t require.TestingT, nodeStatus *models.NodeStatus) { + require.NotNil(t, nodeStatus.Stats) + assert.Equal(t, int64(2), nodeStatus.Stats.ObjectCount) + assert.Equal(t, int64(1), nodeStatus.Stats.ShardCount) + assert.Len(t, nodeStatus.Shards, 1) + shard := nodeStatus.Shards[0] + assert.True(t, len(shard.Name) > 0) + assert.Equal(t, docsClass.Class, shard.Class) + assert.Equal(t, int64(2), shard.ObjectCount) + assert.Equal(t, int64(1), shard.ReplicationFactor) + assert.Equal(t, int64(1), shard.NumberOfReplicas) + } + + testStatusResponse(t, minimalAssertions, verboseAssertions, docsClass.Class) + }) + }) + + // This test prevents a regression of + // https://github.com/weaviate/weaviate/issues/2454 + t.Run("validate count with updates", func(t *testing.T) { + booksClass := books.ClassContextionaryVectorizer() + helper.CreateClass(t, booksClass) + defer helper.DeleteClass(t, booksClass.Class) + + _, err := helper.BatchClient(t).BatchObjectsCreate( + batch.NewBatchObjectsCreateParams().WithBody(batch.BatchObjectsCreateBody{ + Objects: []*models.Object{ + { + ID: strfmt.UUID("2D0D3E3B-54B2-48D4-BFE0-4BE2C060110E"), + Class: booksClass.Class, + Properties: map[string]interface{}{ + "title": "A book that changes", + "description": "First iteration", + }, + }, + }, + }), nil) + require.Nil(t, err) + + // Note that this is the same ID as before, so this is an update!! + _, err = helper.BatchClient(t).BatchObjectsCreate( + batch.NewBatchObjectsCreateParams().WithBody(batch.BatchObjectsCreateBody{ + Objects: []*models.Object{ + { + ID: strfmt.UUID("2D0D3E3B-54B2-48D4-BFE0-4BE2C060110E"), + Class: booksClass.Class, + Properties: map[string]interface{}{ + "title": "A book that changes", + "description": "A new (second) iteration", + }, + }, + }, + }), nil) + require.Nil(t, err) + + minimalAssertions := func(t require.TestingT, nodeStatus *models.NodeStatus) {} + verboseAssertions := func(t require.TestingT, nodeStatus *models.NodeStatus) { + require.NotNil(t, nodeStatus.Stats) + assert.Equal(t, int64(1), nodeStatus.Stats.ObjectCount) + } + + testStatusResponse(t, minimalAssertions, verboseAssertions, "") + }) +} + +func TestNodesApi_Compression_AsyncIndexing(t *testing.T) { + ctx := context.Background() + compose, err := docker.New(). + WithWeaviate(). + WithText2VecContextionary(). + WithWeaviateEnv("ASYNC_INDEXING", "true"). + WithWeaviateEnv("ASYNC_INDEXING_STALE_TIMEOUT", "500ms"). + WithWeaviateEnv("QUEUE_SCHEDULER_INTERVAL", "100ms"). + Start(ctx) + require.NoError(t, err) + defer func() { + require.NoError(t, compose.Terminate(ctx)) + }() + + defer helper.SetupClient(fmt.Sprintf("%s:%s", helper.ServerHost, helper.ServerPort)) + helper.SetupClient(compose.GetWeaviate().URI()) + + t.Run("validate flat compression status", func(t *testing.T) { + booksClass := books.ClassContextionaryVectorizer() + booksClass.VectorIndexType = "flat" + booksClass.VectorIndexConfig = map[string]interface{}{ + "bq": map[string]interface{}{ + "enabled": true, + }, + } + helper.CreateClass(t, booksClass) + defer helper.DeleteClass(t, booksClass.Class) + + t.Run("check compressed true", func(t *testing.T) { + checkThunk := func() interface{} { + verbose := "verbose" + params := nodes.NewNodesGetParams().WithOutput(&verbose) + resp, err := helper.Client(t).Nodes.NodesGet(params, nil) + require.Nil(t, err) + + nodeStatusResp := resp.GetPayload() + require.NotNil(t, nodeStatusResp) + + nodes := nodeStatusResp.Nodes + require.NotNil(t, nodes) + require.Len(t, nodes, 1) + + nodeStatus := nodes[0] + require.NotNil(t, nodeStatus) + return nodeStatus.Shards[0].Compressed + } + + helper.AssertEventuallyEqualWithFrequencyAndTimeout(t, true, checkThunk, 100*time.Millisecond, 15*time.Second) + }) + }) + + t.Run("validate hnsw pq async compression", func(t *testing.T) { + booksClass := books.ClassContextionaryVectorizer() + booksClass.VectorIndexConfig = map[string]interface{}{ + "pq": map[string]interface{}{ + "trainingLimit": 256, + "enabled": true, + "segments": 1, + }, + } + helper.CreateClass(t, booksClass) + defer helper.DeleteClass(t, booksClass.Class) + + t.Run("check compressed initially false", func(t *testing.T) { + verbose := "verbose" + params := nodes.NewNodesGetParams().WithOutput(&verbose) + resp, err := helper.Client(t).Nodes.NodesGet(params, nil) + require.Nil(t, err) + + nodeStatusResp := resp.GetPayload() + require.NotNil(t, nodeStatusResp) + + nodes := nodeStatusResp.Nodes + require.NotNil(t, nodes) + require.Len(t, nodes, 1) + + nodeStatus := nodes[0] + require.NotNil(t, nodeStatus) + + require.False(t, nodeStatus.Shards[0].Compressed) + }) + + t.Run("load data for pq", func(t *testing.T) { + num := 1024 + objects := make([]*models.Object, num) + + for i := 0; i < num; i++ { + objects[i] = &models.Object{ + Class: booksClass.Class, + Vector: []float32{float32(i % 32), float32(i), 3.0, 4.0}, + } + } + + _, err := helper.BatchClient(t).BatchObjectsCreate( + batch.NewBatchObjectsCreateParams().WithBody(batch.BatchObjectsCreateBody{ + Objects: objects, + }, + ), nil) + require.Nil(t, err) + }) + + t.Run("check eventually compressed if async enabled", func(t *testing.T) { + checkThunk := func() interface{} { + verbose := "verbose" + params := nodes.NewNodesGetParams().WithOutput(&verbose) + resp, err := helper.Client(t).Nodes.NodesGet(params, nil) + require.Nil(t, err) + + nodeStatusResp := resp.GetPayload() + require.NotNil(t, nodeStatusResp) + + nodes := nodeStatusResp.Nodes + require.NotNil(t, nodes) + require.Len(t, nodes, 1) + + nodeStatus := nodes[0] + require.NotNil(t, nodeStatus) + return nodeStatus.Shards[0].Compressed + } + + helper.AssertEventuallyEqualWithFrequencyAndTimeout(t, true, checkThunk, 100*time.Millisecond, 15*time.Second) + }) + }) +} + +func TestNodesApi_Compression_SyncIndexing(t *testing.T) { + ctx := context.Background() + compose, err := docker.New(). + WithWeaviate(). + WithText2VecContextionary(). + Start(ctx) + require.NoError(t, err) + defer func() { + require.NoError(t, compose.Terminate(ctx)) + }() + + defer helper.SetupClient(fmt.Sprintf("%s:%s", helper.ServerHost, helper.ServerPort)) + helper.SetupClient(compose.GetWeaviate().URI()) + t.Run("validate flat compression status", func(t *testing.T) { + booksClass := books.ClassContextionaryVectorizer() + booksClass.VectorIndexType = "flat" + booksClass.VectorIndexConfig = map[string]interface{}{ + "bq": map[string]interface{}{ + "enabled": true, + }, + } + helper.CreateClass(t, booksClass) + defer helper.DeleteClass(t, booksClass.Class) + + t.Run("check compressed true", func(t *testing.T) { + checkThunk := func() interface{} { + verbose := "verbose" + params := nodes.NewNodesGetParams().WithOutput(&verbose) + resp, err := helper.Client(t).Nodes.NodesGet(params, nil) + require.Nil(t, err) + + nodeStatusResp := resp.GetPayload() + require.NotNil(t, nodeStatusResp) + + nodes := nodeStatusResp.Nodes + require.NotNil(t, nodes) + require.Len(t, nodes, 1) + + nodeStatus := nodes[0] + require.NotNil(t, nodeStatus) + return nodeStatus.Shards[0].Compressed + } + + helper.AssertEventuallyEqualWithFrequencyAndTimeout(t, true, checkThunk, 100*time.Millisecond, 10*time.Second) + }) + }) +} + +func testStatusResponse(t *testing.T, minimalAssertions, verboseAssertions func(require.TestingT, *models.NodeStatus), + class string, +) { + minimal, verbose := verbosity.OutputMinimal, verbosity.OutputVerbose + + commonTests := func(resp *nodes.NodesGetOK) { + require.NotNil(t, resp.Payload) + nodes := resp.Payload.Nodes + require.NotNil(t, nodes) + require.Len(t, nodes, 1) + minimalAssertions(t, nodes[0]) + } + + t.Run("minimal", func(t *testing.T) { + payload, err := getNodesStatus(t, minimal, class) + require.Nil(t, err) + commonTests(&nodes.NodesGetOK{Payload: payload}) + }) + + if verboseAssertions != nil { + t.Run("verbose", func(t *testing.T) { + getNodes := func() (*models.NodesStatusResponse, error) { + return getNodesStatus(t, verbose, class) + } + assert.EventuallyWithT(t, func(t *assert.CollectT) { + payload, err := getNodes() + require.Nil(t, err) + commonTests(&nodes.NodesGetOK{Payload: payload}) + // If commonTests pass, resp.Nodes[0] != nil + verboseAssertions(t, payload.Nodes[0]) + }, 15*time.Second, 500*time.Millisecond) + }) + } +} + +func getNodesStatus(t *testing.T, output, class string) (payload *models.NodesStatusResponse, err error) { + if class != "" { + params := nodes.NewNodesGetClassParams().WithOutput(&output).WithClassName(class) + body, clientErr := helper.Client(t).Nodes.NodesGetClass(params, nil) + payload, err = body.Payload, clientErr + } else { + params := nodes.NewNodesGetParams().WithOutput(&output) + body, clientErr := helper.Client(t).Nodes.NodesGet(params, nil) + payload, err = body.Payload, clientErr + } + return +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/objects/additional_props_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/additional_props_test.go new file mode 100644 index 0000000000000000000000000000000000000000..9e4fa0549775e03b7d9984e20e380840ae15d2e1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/additional_props_test.go @@ -0,0 +1,95 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +func searchNeighbors(t *testing.T) { + listParams := objects.NewObjectsListParams().WithInclude(ptString("nearestNeighbors")) + res, err := helper.Client(t).Objects.ObjectsList(listParams, nil) + require.Nil(t, err, "should not error") + + extractNeighbor := func(in *models.Object) []interface{} { + // marshalling to JSON and back into an untyped map to make sure we assert + // on the actual JSON structure. This way if we accidentally change the + // goswagger generation so it affects both the client and the server in the + // same way, this test should catch it + b, err := json.Marshal(in) + require.Nil(t, err) + + var untyped map[string]interface{} + err = json.Unmarshal(b, &untyped) + require.Nil(t, err) + + return untyped["additional"].(map[string]interface{})["nearestNeighbors"].(map[string]interface{})["neighbors"].([]interface{}) + } + + validateNeighbors(t, extractNeighbor(res.Payload.Objects[0]), extractNeighbor(res.Payload.Objects[1])) +} + +func featureProjection(t *testing.T) { + listParams := objects.NewObjectsListParams().WithInclude(ptString("featureProjection")) + res, err := helper.Client(t).Objects.ObjectsList(listParams, nil) + require.Nil(t, err, "should not error") + + extractProjection := func(in *models.Object) []interface{} { + // marshalling to JSON and back into an untyped map to make sure we assert + // on the actual JSON structure. This way if we accidentally change the + // goswagger generation so it affects both the client and the server in the + // same way, this test should catch it + b, err := json.Marshal(in) + require.Nil(t, err) + + var untyped map[string]interface{} + err = json.Unmarshal(b, &untyped) + require.Nil(t, err) + + return untyped["additional"].(map[string]interface{})["featureProjection"].(map[string]interface{})["vector"].([]interface{}) + } + + validateProjections(t, 2, extractProjection(res.Payload.Objects[0]), extractProjection(res.Payload.Objects[1])) +} + +func ptString(in string) *string { + return &in +} + +func validateNeighbors(t *testing.T, neighborsGroups ...[]interface{}) { + for i, group := range neighborsGroups { + if len(group) == 0 { + t.Fatalf("group %d: length of neighbors is 0", i) + } + + for j, neighbor := range group { + asMap := neighbor.(map[string]interface{}) + if len(asMap["concept"].(string)) == 0 { + t.Fatalf("group %d: element %d: concept has length 0", i, j) + } + } + } +} + +func validateProjections(t *testing.T, dims int, vectors ...[]interface{}) { + for _, vector := range vectors { + if len(vector) != dims { + t.Fatalf("expected feature projection vector to have length 3, got: %d", len(vector)) + } + } +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/objects/auto_schema_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/auto_schema_test.go new file mode 100644 index 0000000000000000000000000000000000000000..873224a94b06f7debe7c54cce5db71c56cc7e47e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/auto_schema_test.go @@ -0,0 +1,271 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +// Acceptance tests for objects. + +import ( + "context" + "encoding/json" + "testing" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/batch" + "github.com/weaviate/weaviate/client/schema" + pb "github.com/weaviate/weaviate/grpc/generated/protocol/v1" + "google.golang.org/protobuf/types/known/structpb" + + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" +) + +const UUID = strfmt.UUID("73f2eb5f-5abf-447a-81ca-74b1dd168241") + +func TestAutoSchemaWithDifferentProperties(t *testing.T) { + // Add two objects with different properties to the same class. With autoschema enabled both should be added and + // the class should have properties form both classes at the end + className := "RandomName234234" + + testCases := []struct { + name string + names []string + }{ + {name: "UpperCase", names: []string{"NonExistingProperty", "OtherNonExistingProperty"}}, + {name: "LowerCase", names: []string{"nonExistingProperty", "otherNonExistingProperty"}}, + } + + for _, test := range testCases { + t.Run(test.name, func(t *testing.T) { + obj1 := &models.Object{ + Class: className, + Properties: map[string]interface{}{ + test.names[0]: "test", + }, + } + params := objects.NewObjectsCreateParams().WithBody(obj1) + resp, err := helper.Client(t).Objects.ObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + + obj2 := &models.Object{ + Class: className, + Properties: map[string]interface{}{ + test.names[1]: "test", + }, + } + params2 := objects.NewObjectsCreateParams().WithBody(obj2) + resp2, err2 := helper.Client(t).Objects.ObjectsCreate(params2, nil) + helper.AssertRequestOk(t, resp2, err2, nil) + + SchemaParams := schema.NewSchemaDumpParams() + resp3, err3 := helper.Client(t).Schema.SchemaDump(SchemaParams, nil) + helper.AssertRequestOk(t, resp3, err3, nil) + assert.Len(t, resp3.Payload.Classes, 1) + class := resp3.Payload.Classes[0] + assert.Len(t, class.Properties, 2) + props := class.Properties + assert.ElementsMatch(t, []string{props[0].Name, props[1].Name}, []string{"nonExistingProperty", "otherNonExistingProperty"}) + deleteObjectClass(t, className) + }) + } +} + +// run from setup_test.go +func autoSchemaObjects(t *testing.T) { + autoSchemaObjectTestCases := []struct { + // the name of the test + name string + // the example object, with non existent classes and properties. + object func() *models.Object + }{ + { + name: "non existing class", + object: func() *models.Object { + return &models.Object{ + ID: "8e2997f2-1972-4ee2-ad35-5fc704f2893e", + Class: "NonExistingClass", + Properties: map[string]interface{}{ + "testString": "test", + "testNumber": json.Number("1"), + "testDate": "2002-10-02T15:00:00Z", + "testBoolean": true, + "testGeoCoordinates": map[string]interface{}{ + "latitude": json.Number("1.01"), + "longitude": json.Number("1.01"), + }, + "testPhoneNumber": map[string]interface{}{ + "input": "020 1234567", + "defaultCountry": "nl", + }, + "textArray": []string{"a", "b", "c"}, + "intArray": []int{1, 2, 3}, + "numberArray": []int{11.0, 22.0, 33.0}, + }, + } + }, + }, + { + name: "non existing property", + object: func() *models.Object { + return &models.Object{ + Class: "TestObject", + Properties: map[string]interface{}{ + "nonExistingProperty": "test", + }, + } + }, + }, + { + name: "non existing property update class", + object: func() *models.Object { + return &models.Object{ + ID: "8e2997f2-1972-4ee2-ad35-5fc704f2893f", + Class: "TestObject", + Properties: map[string]interface{}{ + "nonExistingDateProperty": "2002-10-02T15:00:00Z", + "nonExistingNumberProperty": json.Number("1"), + }, + } + }, + }, + } + + t.Run("auto schema should create object with missing classes and properties", func(t *testing.T) { + for _, example_ := range autoSchemaObjectTestCases { + t.Run(example_.name, func(t *testing.T) { + example := example_ // Needed; example is updated to point to a new test case. + t.Parallel() + + params := objects.NewObjectsCreateParams().WithBody(example.object()) + resp, err := helper.Client(t).Objects.ObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + }) + } + }) + + autoSchemaCrossRefTestCases := []struct { + // the name of the test + name string + // the example object, with non existent classes and properties. + object func() *models.Object + }{ + { + name: "non existing cross ref property update class", + object: func() *models.Object { + return &models.Object{ + Class: "TestObject", + Properties: map[string]interface{}{ + "hasNonExistingClass": []interface{}{ + map[string]interface{}{ + "beacon": "weaviate://localhost/8e2997f2-1972-4ee2-ad35-5fc704f2893e", + }, + }, + }, + } + }, + }, + { + name: "non existing cross ref property update class", + object: func() *models.Object { + return &models.Object{ + Class: "TestObject", + Properties: map[string]interface{}{ + "hasNonExistingClassAndTestObject": []interface{}{ + map[string]interface{}{ + "beacon": "weaviate://localhost/8e2997f2-1972-4ee2-ad35-5fc704f2893e", + }, + map[string]interface{}{ + "beacon": "weaviate://localhost/8e2997f2-1972-4ee2-ad35-5fc704f2893f", + }, + }, + }, + } + }, + }, + } + + t.Run("auto schema should create object with missing cross ref properties", func(t *testing.T) { + for _, example_ := range autoSchemaCrossRefTestCases { + t.Run(example_.name, func(t *testing.T) { + example := example_ // Needed; example is updated to point to a new test case. + params := objects.NewObjectsCreateParams().WithBody(example.object()) + resp, err := helper.Client(t).Objects.ObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + }) + } + }) +} + +func TestClassCapitalisationInBatchWithAutoSchemaAndExistingClass(t *testing.T) { + ctx := context.Background() + compose, err := docker.New(). + WithWeaviateWithGRPC(). + WithWeaviateEnv("AUTOSCHEMA_ENABLED", "true"). + Start(ctx) + require.NoError(t, err) + defer func() { + require.NoError(t, compose.Terminate(ctx)) + }() + + defer helper.ResetClient() + helper.SetupClient(compose.GetWeaviate().URI()) + helper.SetupGRPCClient(t, compose.GetWeaviate().GrpcURI()) + + helper.CreateClass(t, &models.Class{ + Class: "Test", + Properties: []*models.Property{{ + DataType: []string{"text"}, + Name: "name", + }}, + }) + defer helper.DeleteClass(t, "Test") + + t.Run("batch insert object over grpc with lowercase class name", func(t *testing.T) { + res, err := helper.ClientGRPC(t).BatchObjects(ctx, &pb.BatchObjectsRequest{ + Objects: []*pb.BatchObject{{ + Collection: "test", + Properties: &pb.BatchObject_Properties{ + NonRefProperties: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "name": { + Kind: &structpb.Value_StringValue{ + StringValue: "test", + }, + }, + }, + }, + }, + Uuid: string(UUID), + }}, + }) + require.Nil(t, err) + require.Len(t, res.Errors, 0) + }) + + t.Run("batch insert object over rest with lowercase class name", func(t *testing.T) { + res, err := helper.Client(t).Batch.BatchObjectsCreate(batch.NewBatchObjectsCreateParams().WithBody(batch.BatchObjectsCreateBody{ + Objects: []*models.Object{ + { + Class: "test", + Properties: map[string]interface{}{ + "name": "test", + }, + }, + }, + }), nil) + require.Nil(t, err) + require.Nil(t, res.Payload[0].Result.Errors) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/objects/crefs_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/crefs_test.go new file mode 100644 index 0000000000000000000000000000000000000000..d7000886f02a3d1331eaebbfa6fee3e52443a95a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/crefs_test.go @@ -0,0 +1,991 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "fmt" + "testing" + + "github.com/google/uuid" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/client/batch" + + "github.com/go-openapi/strfmt" + "github.com/weaviate/weaviate/client/objects" + clschema "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" +) + +const ( + beaconStart = "weaviate://localhost/" + pathStart = "/v1/objects/" +) + +func TestRefsWithTenantWithoutToClass(t *testing.T) { + refToClassName := "ReferenceTo" + refFromClassName := "ReferenceFrom" + + toParam := clschema.NewSchemaObjectsCreateParams().WithObjectClass( + &models.Class{Class: refToClassName, MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: true}}, + ) + respTo, err := helper.Client(t).Schema.SchemaObjectsCreate(toParam, nil) + helper.AssertRequestOk(t, respTo, err, nil) + + fromParam := clschema.NewSchemaObjectsCreateParams().WithObjectClass( + &models.Class{ + Class: refFromClassName, + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: true}, + Properties: []*models.Property{ + { + DataType: []string{refToClassName}, + Name: "ref", + }, + }, + }, + ) + respFrom, err := helper.Client(t).Schema.SchemaObjectsCreate(fromParam, nil) + helper.AssertRequestOk(t, respFrom, err, nil) + + defer deleteObjectClass(t, refToClassName) + defer deleteObjectClass(t, refFromClassName) + + tenant := "tenant" + tenants := make([]*models.Tenant, 1) + for i := range tenants { + tenants[i] = &models.Tenant{Name: tenant} + } + helper.CreateTenants(t, refToClassName, tenants) + helper.CreateTenants(t, refFromClassName, tenants) + + refToId := strfmt.UUID(uuid.New().String()) + assertCreateObjectWithID(t, refToClassName, tenant, refToId, map[string]interface{}{}) + + refFromId1 := strfmt.UUID(uuid.New().String()) + assertCreateObjectWithID(t, refFromClassName, tenant, refFromId1, map[string]interface{}{}) + + // add reference between objects without to class name + postRefParams := objects.NewObjectsClassReferencesCreateParams(). + WithID(refFromId1). + WithPropertyName("ref").WithClassName(refFromClassName). + WithBody(&models.SingleRef{ + Beacon: strfmt.URI(fmt.Sprintf(beaconStart+"%s", refToId.String())), + }).WithTenant(&tenant) + postRefResponse, err := helper.Client(t).Objects.ObjectsClassReferencesCreate(postRefParams, nil) + helper.AssertRequestOk(t, postRefResponse, err, nil) + + // add reference from batch + refFromId2 := strfmt.UUID(uuid.New().String()) + assertCreateObjectWithID(t, refFromClassName, tenant, refFromId2, map[string]interface{}{}) + + // add refs without toClass + batchRefs := []*models.BatchReference{ + {From: strfmt.URI(beaconStart + "ReferenceFrom/" + refFromId2 + "/ref"), To: strfmt.URI(beaconStart + refToId), Tenant: tenant}, + } + postRefBatchParams := batch.NewBatchReferencesCreateParams().WithBody(batchRefs) + postRefBatchResponse, err := helper.Client(t).Batch.BatchReferencesCreate(postRefBatchParams, nil) + helper.AssertRequestOk(t, postRefBatchResponse, err, nil) + require.Nil(t, postRefBatchResponse.Payload[0].Result.Errors) +} + +func TestRefsWithoutToClass(t *testing.T) { + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(&models.Class{Class: "ReferenceTo"}) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + refToClassName := "ReferenceTo" + refFromClassName := "ReferenceFrom" + otherClassMT := "Other" + + paramsMT := clschema.NewSchemaObjectsCreateParams().WithObjectClass( + &models.Class{ + Class: otherClassMT, + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: true}, + Properties: []*models.Property{ + { + DataType: []string{refToClassName}, + Name: "ref", + }, + }, + }, + ) + respMT, err := helper.Client(t).Schema.SchemaObjectsCreate(paramsMT, nil) + helper.AssertRequestOk(t, respMT, err, nil) + + tenant := "tenant" + tenants := make([]*models.Tenant, 1) + for i := range tenants { + tenants[i] = &models.Tenant{Name: tenant} + } + helper.CreateTenants(t, otherClassMT, tenants) + + refFromClass := &models.Class{ + Class: refFromClassName, + Properties: []*models.Property{ + { + DataType: []string{refToClassName}, + Name: "ref", + }, + }, + } + params2 := clschema.NewSchemaObjectsCreateParams().WithObjectClass(refFromClass) + resp2, err := helper.Client(t).Schema.SchemaObjectsCreate(params2, nil) + helper.AssertRequestOk(t, resp2, err, nil) + + defer deleteObjectClass(t, refToClassName) + defer deleteObjectClass(t, refFromClassName) + defer deleteObjectClass(t, otherClassMT) + + refToId := assertCreateObject(t, refToClassName, map[string]interface{}{}) + assertGetObjectWithClass(t, refToId, refToClassName) + assertCreateObjectWithID(t, otherClassMT, tenant, refToId, map[string]interface{}{}) + refFromId := assertCreateObject(t, refFromClassName, map[string]interface{}{}) + assertGetObjectWithClass(t, refFromId, refFromClassName) + + postRefParams := objects.NewObjectsClassReferencesCreateParams(). + WithID(refFromId). + WithPropertyName("ref").WithClassName(refFromClass.Class). + WithBody(&models.SingleRef{ + Beacon: strfmt.URI(fmt.Sprintf(beaconStart+"%s", refToId.String())), + }) + postRefResponse, err := helper.Client(t).Objects.ObjectsClassReferencesCreate(postRefParams, nil) + helper.AssertRequestOk(t, postRefResponse, err, nil) + + // validate that ref was create for the correct class + objWithRef := func() interface{} { + obj := assertGetObjectWithClass(t, refFromId, refFromClassName) + return obj.Properties + } + helper.AssertEventuallyEqual(t, map[string]interface{}{ + "ref": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf(beaconStart+"%s/%s", refToClassName, refToId.String()), + "href": fmt.Sprintf(pathStart+"%s/%s", refToClassName, refToId.String()), + }, + }, + }, objWithRef) + + // update prop with multiple references + updateRefParams := objects.NewObjectsClassReferencesPutParams(). + WithID(refFromId). + WithPropertyName("ref").WithClassName(refFromClass.Class). + WithBody(models.MultipleRef{ + {Beacon: strfmt.URI(fmt.Sprintf(beaconStart+"%s", refToId.String()))}, + {Beacon: strfmt.URI(fmt.Sprintf(beaconStart+"%s/%s", refToClassName, refToId.String()))}, + }) + updateRefResponse, err := helper.Client(t).Objects.ObjectsClassReferencesPut(updateRefParams, nil) + helper.AssertRequestOk(t, updateRefResponse, err, nil) + + objWithTwoRef := func() interface{} { + obj := assertGetObjectWithClass(t, refFromId, refFromClassName) + return obj.Properties + } + helper.AssertEventuallyEqual(t, map[string]interface{}{ + "ref": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf(beaconStart+"%s/%s", refToClassName, refToId.String()), + "href": fmt.Sprintf(pathStart+"%s/%s", refToClassName, refToId.String()), + }, + map[string]interface{}{ + "beacon": fmt.Sprintf(beaconStart+"%s/%s", refToClassName, refToId.String()), + "href": fmt.Sprintf(pathStart+"%s/%s", refToClassName, refToId.String()), + }, + }, + }, objWithTwoRef) + + // delete reference without class + deleteRefParams := objects.NewObjectsClassReferencesDeleteParams(). + WithID(refFromId). + WithPropertyName("ref").WithClassName(refFromClass.Class). + WithBody(&models.SingleRef{ + Beacon: strfmt.URI(fmt.Sprintf(beaconStart+"%s", refToId.String())), + }) + deleteRefResponse, err := helper.Client(t).Objects.ObjectsClassReferencesDelete(deleteRefParams, nil) + helper.AssertRequestOk(t, deleteRefResponse, err, nil) + objWithoutRef := func() interface{} { + obj := assertGetObjectWithClass(t, refFromId, refFromClassName) + return obj.Properties + } + helper.AssertEventuallyEqual(t, map[string]interface{}{ + "ref": []interface{}{}, + }, objWithoutRef) +} + +func TestRefsMultiTarget(t *testing.T) { + refToClassName := "ReferenceTo" + refFromClassName := "ReferenceFrom" + defer deleteObjectClass(t, refToClassName) + defer deleteObjectClass(t, refFromClassName) + + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(&models.Class{Class: refToClassName}) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + + refFromClass := &models.Class{ + Class: refFromClassName, + Properties: []*models.Property{ + { + DataType: []string{refToClassName, refFromClassName}, + Name: "ref", + }, + }, + } + params2 := clschema.NewSchemaObjectsCreateParams().WithObjectClass(refFromClass) + resp2, err := helper.Client(t).Schema.SchemaObjectsCreate(params2, nil) + helper.AssertRequestOk(t, resp2, err, nil) + + refToId := assertCreateObject(t, refToClassName, map[string]interface{}{}) + assertGetObjectEventually(t, refToId) + refFromId := assertCreateObject(t, refFromClassName, map[string]interface{}{}) + assertGetObjectEventually(t, refFromId) + + cases := []struct { + classRef string + id string + }{ + {classRef: "", id: refToId.String()}, + {classRef: refToClassName + "/", id: refToId.String()}, + {classRef: refFromClassName + "/", id: refFromId.String()}, + } + for _, tt := range cases { + postRefParams := objects.NewObjectsClassReferencesCreateParams(). + WithID(refFromId). + WithPropertyName("ref").WithClassName(refFromClass.Class). + WithBody(&models.SingleRef{ + Beacon: strfmt.URI(fmt.Sprintf(beaconStart+"%s%s", tt.classRef, tt.id)), + }) + postRefResponse, err := helper.Client(t).Objects.ObjectsClassReferencesCreate(postRefParams, nil) + helper.AssertRequestOk(t, postRefResponse, err, nil) + + // validate that ref was create for the correct class + objWithRef := func() interface{} { + obj := assertGetObjectWithClass(t, refFromId, refFromClassName) + return obj.Properties + } + helper.AssertEventuallyEqual(t, map[string]interface{}{ + "ref": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf(beaconStart+"%s%s", tt.classRef, tt.id), + "href": fmt.Sprintf(pathStart+"%s%s", tt.classRef, tt.id), + }, + }, + }, objWithRef) + + // delete refs + updateRefParams := objects.NewObjectsClassReferencesPutParams(). + WithID(refFromId). + WithPropertyName("ref").WithClassName(refFromClass.Class). + WithBody(models.MultipleRef{}) + updateRefResponse, err := helper.Client(t).Objects.ObjectsClassReferencesPut(updateRefParams, nil) + helper.AssertRequestOk(t, updateRefResponse, err, nil) + } +} + +func TestBatchRefsMultiTarget(t *testing.T) { + refToClassName := "ReferenceTo" + refFromClassName := "ReferenceFrom" + defer deleteObjectClass(t, refToClassName) + defer deleteObjectClass(t, refFromClassName) + + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(&models.Class{Class: refToClassName}) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + + refFromClass := &models.Class{ + Class: refFromClassName, + Properties: []*models.Property{ + { + DataType: []string{refToClassName, refFromClassName}, + Name: "ref", + }, + }, + } + params2 := clschema.NewSchemaObjectsCreateParams().WithObjectClass(refFromClass) + resp2, err := helper.Client(t).Schema.SchemaObjectsCreate(params2, nil) + helper.AssertRequestOk(t, resp2, err, nil) + + uuidsTo := make([]strfmt.UUID, 10) + uuidsFrom := make([]strfmt.UUID, 10) + for i := 0; i < 10; i++ { + uuidsTo[i] = assertCreateObject(t, refToClassName, map[string]interface{}{}) + assertGetObjectEventually(t, uuidsTo[i]) + uuidsFrom[i] = assertCreateObject(t, refFromClassName, map[string]interface{}{}) + assertGetObjectEventually(t, uuidsFrom[i]) + } + + // add refs without toClass + var batchRefs []*models.BatchReference + for i := range uuidsFrom[:2] { + from := beaconStart + "ReferenceFrom/" + uuidsFrom[i] + "/ref" + to := beaconStart + uuidsTo[i] + batchRefs = append(batchRefs, &models.BatchReference{From: strfmt.URI(from), To: strfmt.URI(to)}) + } + + // add refs with toClass target 1 + for i := range uuidsFrom[2:5] { + j := i + 2 + from := beaconStart + "ReferenceFrom/" + uuidsFrom[j] + "/ref" + to := beaconStart + "ReferenceTo/" + uuidsTo[j] + batchRefs = append(batchRefs, &models.BatchReference{From: strfmt.URI(from), To: strfmt.URI(to)}) + } + + // add refs with toClass target 2 + for i := range uuidsFrom[5:] { + j := i + 5 + from := beaconStart + "ReferenceFrom/" + uuidsFrom[j] + "/ref" + to := beaconStart + "ReferenceFrom/" + uuidsTo[j] + batchRefs = append(batchRefs, &models.BatchReference{From: strfmt.URI(from), To: strfmt.URI(to)}) + } + + postRefParams := batch.NewBatchReferencesCreateParams().WithBody(batchRefs) + postRefResponse, err := helper.Client(t).Batch.BatchReferencesCreate(postRefParams, nil) + helper.AssertRequestOk(t, postRefResponse, err, nil) + + // no autodetect for multi-target + for i := range uuidsFrom[:2] { + objWithRef := func() interface{} { + obj := assertGetObjectWithClass(t, uuidsFrom[i], refFromClassName) + return obj.Properties + } + helper.AssertEventuallyEqual(t, map[string]interface{}{ + "ref": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf(beaconStart+"%s", uuidsTo[i].String()), + "href": fmt.Sprintf(pathStart+"%s", uuidsTo[i].String()), + }, + }, + }, objWithRef) + } + + // refs for target 1 + for i := range uuidsFrom[2:5] { + j := i + 2 + objWithRef := func() interface{} { + obj := assertGetObjectWithClass(t, uuidsFrom[j], refFromClassName) + return obj.Properties + } + helper.AssertEventuallyEqual(t, map[string]interface{}{ + "ref": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf(beaconStart+"%s/%s", refToClassName, uuidsTo[j].String()), + "href": fmt.Sprintf(pathStart+"%s/%s", refToClassName, uuidsTo[j].String()), + }, + }, + }, objWithRef) + } + + // refs for target 2 + for i := range uuidsFrom[5:] { + j := i + 5 + objWithRef := func() interface{} { + obj := assertGetObjectWithClass(t, uuidsFrom[j], refFromClassName) + return obj.Properties + } + helper.AssertEventuallyEqual(t, map[string]interface{}{ + "ref": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf(beaconStart+"%s/%s", refFromClassName, uuidsTo[j].String()), + "href": fmt.Sprintf(pathStart+"%s/%s", refFromClassName, uuidsTo[j].String()), + }, + }, + }, objWithRef) + } +} + +func TestBatchRefsWithoutFromAndToClass(t *testing.T) { + refToClassName := "ReferenceTo" + refFromClassName := "ReferenceFrom" + + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(&models.Class{Class: refToClassName}) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + + refFromClass := &models.Class{ + Class: refFromClassName, + Properties: []*models.Property{ + { + DataType: []string{refToClassName}, + Name: "ref", + }, + }, + } + params2 := clschema.NewSchemaObjectsCreateParams().WithObjectClass(refFromClass) + resp2, err := helper.Client(t).Schema.SchemaObjectsCreate(params2, nil) + helper.AssertRequestOk(t, resp2, err, nil) + + defer deleteObjectClass(t, refToClassName) + defer deleteObjectClass(t, refFromClassName) + + uuidsTo := make([]strfmt.UUID, 10) + uuidsFrom := make([]strfmt.UUID, 10) + for i := 0; i < 10; i++ { + uuidsTo[i] = assertCreateObject(t, refToClassName, map[string]interface{}{}) + assertGetObjectWithClass(t, uuidsTo[i], refToClassName) + + uuidsFrom[i] = assertCreateObject(t, refFromClassName, map[string]interface{}{}) + assertGetObjectWithClass(t, uuidsFrom[i], refFromClassName) + } + + // cannot do from urls without class + var batchRefs []*models.BatchReference + for i := range uuidsFrom { + from := beaconStart + uuidsFrom[i] + "/ref" + to := beaconStart + uuidsTo[i] + batchRefs = append(batchRefs, &models.BatchReference{From: strfmt.URI(from), To: strfmt.URI(to)}) + } + + postRefParams := batch.NewBatchReferencesCreateParams().WithBody(batchRefs) + resp3, err := helper.Client(t).Batch.BatchReferencesCreate(postRefParams, nil) + require.Nil(t, err) + require.NotNil(t, resp3) + for i := range resp3.Payload { + require.NotNil(t, resp3.Payload[i].Result.Errors) + } +} + +func TestBatchRefWithErrors(t *testing.T) { + refToClassName := "ReferenceTo" + refFromClassName := "ReferenceFrom" + + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(&models.Class{Class: refToClassName}) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + + refFromClass := &models.Class{ + Class: refFromClassName, + Properties: []*models.Property{ + { + DataType: []string{refToClassName}, + Name: "ref", + }, + }, + } + params2 := clschema.NewSchemaObjectsCreateParams().WithObjectClass(refFromClass) + resp2, err := helper.Client(t).Schema.SchemaObjectsCreate(params2, nil) + helper.AssertRequestOk(t, resp2, err, nil) + + defer deleteObjectClass(t, refToClassName) + defer deleteObjectClass(t, refFromClassName) + + uuidsTo := make([]strfmt.UUID, 2) + uuidsFrom := make([]strfmt.UUID, 2) + for i := 0; i < 2; i++ { + uuidsTo[i] = assertCreateObject(t, refToClassName, map[string]interface{}{}) + assertGetObjectWithClass(t, uuidsTo[i], refToClassName) + + uuidsFrom[i] = assertCreateObject(t, refFromClassName, map[string]interface{}{}) + assertGetObjectWithClass(t, uuidsFrom[i], refFromClassName) + } + + var batchRefs []*models.BatchReference + for i := range uuidsFrom { + from := beaconStart + "ReferenceFrom/" + uuidsFrom[i] + "/ref" + to := beaconStart + uuidsTo[i] + batchRefs = append(batchRefs, &models.BatchReference{From: strfmt.URI(from), To: strfmt.URI(to)}) + } + + // append one entry with a non-existent class + batchRefs = append(batchRefs, &models.BatchReference{From: strfmt.URI(beaconStart + "DoesNotExist/" + uuidsFrom[0] + "/ref"), To: strfmt.URI(beaconStart + uuidsTo[0])}) + + // append one entry with a non-existent property for existing class + batchRefs = append(batchRefs, &models.BatchReference{From: strfmt.URI(beaconStart + "ReferenceFrom/" + uuidsFrom[0] + "/doesNotExist"), To: strfmt.URI(beaconStart + uuidsTo[0])}) + + postRefParams := batch.NewBatchReferencesCreateParams().WithBody(batchRefs) + postRefResponse, err := helper.Client(t).Batch.BatchReferencesCreate(postRefParams, nil) + helper.AssertRequestOk(t, postRefResponse, err, nil) + + require.NotNil(t, postRefResponse.Payload[2].Result.Errors) + require.Contains(t, postRefResponse.Payload[2].Result.Errors.Error[0].Message, "source class \"DoesNotExist\" not found in schema") + + require.NotNil(t, postRefResponse.Payload[3].Result.Errors) + require.Contains(t, postRefResponse.Payload[3].Result.Errors.Error[0].Message, "property doesNotExist does not exist for class ReferenceFrom") +} + +func TestBatchRefsWithoutToClass(t *testing.T) { + refToClassName := "ReferenceTo" + refFromClassName := "ReferenceFrom" + otherClassMT := "Other" + + // other class has multi-tenancy enabled to make sure that problems trigger an error + paramsMT := clschema.NewSchemaObjectsCreateParams().WithObjectClass( + &models.Class{Class: otherClassMT, MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: true}}, + ) + respMT, err := helper.Client(t).Schema.SchemaObjectsCreate(paramsMT, nil) + helper.AssertRequestOk(t, respMT, err, nil) + + tenant := "tenant" + tenants := make([]*models.Tenant, 1) + for i := range tenants { + tenants[i] = &models.Tenant{Name: tenant} + } + helper.CreateTenants(t, otherClassMT, tenants) + + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(&models.Class{Class: refToClassName}) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + + refFromClass := &models.Class{ + Class: refFromClassName, + Properties: []*models.Property{ + { + DataType: []string{refToClassName}, + Name: "ref", + }, + }, + } + params2 := clschema.NewSchemaObjectsCreateParams().WithObjectClass(refFromClass) + resp2, err := helper.Client(t).Schema.SchemaObjectsCreate(params2, nil) + helper.AssertRequestOk(t, resp2, err, nil) + + defer deleteObjectClass(t, refToClassName) + defer deleteObjectClass(t, refFromClassName) + defer deleteObjectClass(t, otherClassMT) + + uuidsTo := make([]strfmt.UUID, 10) + uuidsFrom := make([]strfmt.UUID, 10) + for i := 0; i < 10; i++ { + uuidsTo[i] = assertCreateObject(t, refToClassName, map[string]interface{}{}) + assertGetObjectWithClass(t, uuidsTo[i], refToClassName) + + // create object with same id in MT class + assertCreateObjectWithID(t, otherClassMT, tenant, uuidsTo[i], map[string]interface{}{}) + + uuidsFrom[i] = assertCreateObject(t, refFromClassName, map[string]interface{}{}) + assertGetObjectWithClass(t, uuidsFrom[i], refFromClassName) + } + + var batchRefs []*models.BatchReference + for i := range uuidsFrom { + from := beaconStart + "ReferenceFrom/" + uuidsFrom[i] + "/ref" + to := beaconStart + uuidsTo[i] + batchRefs = append(batchRefs, &models.BatchReference{From: strfmt.URI(from), To: strfmt.URI(to)}) + } + + postRefParams := batch.NewBatchReferencesCreateParams().WithBody(batchRefs) + postRefResponse, err := helper.Client(t).Batch.BatchReferencesCreate(postRefParams, nil) + helper.AssertRequestOk(t, postRefResponse, err, nil) + + for i := range uuidsFrom { + // validate that ref was create for the correct class + objWithRef := func() interface{} { + obj := assertGetObjectWithClass(t, uuidsFrom[i], refFromClassName) + return obj.Properties + } + helper.AssertEventuallyEqual(t, map[string]interface{}{ + "ref": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf(beaconStart+"%s/%s", refToClassName, uuidsTo[i].String()), + "href": fmt.Sprintf(pathStart+"%s/%s", refToClassName, uuidsTo[i].String()), + }, + }, + }, objWithRef) + } +} + +func TestObjectBatchToClassDetection(t *testing.T) { + // uses same code path as normal object add + refToClassName := "ReferenceTo" + refFromClassName := "ReferenceFrom" + defer deleteObjectClass(t, refToClassName) + defer deleteObjectClass(t, refFromClassName) + + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(&models.Class{Class: refToClassName}) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + + refFromClass := &models.Class{ + Class: refFromClassName, + Properties: []*models.Property{ + { + DataType: []string{refToClassName}, + Name: "ref", + }, + }, + } + params2 := clschema.NewSchemaObjectsCreateParams().WithObjectClass(refFromClass) + resp2, err := helper.Client(t).Schema.SchemaObjectsCreate(params2, nil) + helper.AssertRequestOk(t, resp2, err, nil) + + refs := make([]interface{}, 10) + uuidsTo := make([]strfmt.UUID, 10) + + for i := 0; i < 10; i++ { + uuidTo := assertCreateObject(t, refToClassName, map[string]interface{}{}) + uuidsTo[i] = uuidTo + assertGetObjectEventually(t, uuidTo) + refs[i] = map[string]interface{}{ + "beacon": beaconStart + uuidTo, + } + } + + fromBatch := make([]*models.Object, 10) + for i := 0; i < 10; i++ { + fromBatch[i] = &models.Object{ + Class: refFromClassName, + ID: strfmt.UUID(uuid.New().String()), + Properties: map[string]interface{}{ + "ref": refs[i : i+1], + }, + } + } + paramsBatch := batch.NewBatchObjectsCreateParams().WithBody( + batch.BatchObjectsCreateBody{ + Objects: fromBatch, + }, + ) + res, err := helper.Client(t).Batch.BatchObjectsCreate(paramsBatch, nil) + require.Nil(t, err) + for _, elem := range res.Payload { + assert.Nil(t, elem.Result.Errors) + } + + for i := range fromBatch { + // validate that ref was create for the correct class + objWithRef := func() interface{} { + obj := assertGetObjectWithClass(t, fromBatch[i].ID, refFromClassName) + return obj.Properties + } + helper.AssertEventuallyEqual(t, map[string]interface{}{ + "ref": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf(beaconStart+"%s/%s", refToClassName, uuidsTo[i].String()), + "href": fmt.Sprintf(pathStart+"%s/%s", refToClassName, uuidsTo[i].String()), + }, + }, + }, objWithRef) + } +} + +func TestObjectCrefWithoutToClass(t *testing.T) { + refToClassName := "ReferenceTo" + refFromClassName := "ReferenceFrom" + otherClassMT := "Other" + + // other class has multi-tenancy enabled to make sure that problems trigger an error + paramsMT := clschema.NewSchemaObjectsCreateParams().WithObjectClass( + &models.Class{Class: otherClassMT, MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: true}}, + ) + respMT, err := helper.Client(t).Schema.SchemaObjectsCreate(paramsMT, nil) + helper.AssertRequestOk(t, respMT, err, nil) + + tenant := "tenant" + tenants := make([]*models.Tenant, 1) + for i := range tenants { + tenants[i] = &models.Tenant{Name: tenant} + } + helper.CreateTenants(t, otherClassMT, tenants) + + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(&models.Class{Class: refToClassName}) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + + refFromClass := &models.Class{ + Class: refFromClassName, + Properties: []*models.Property{ + { + DataType: []string{refToClassName}, + Name: "ref", + }, + }, + } + params2 := clschema.NewSchemaObjectsCreateParams().WithObjectClass(refFromClass) + resp2, err := helper.Client(t).Schema.SchemaObjectsCreate(params2, nil) + helper.AssertRequestOk(t, resp2, err, nil) + + defer deleteObjectClass(t, refToClassName) + defer deleteObjectClass(t, refFromClassName) + defer deleteObjectClass(t, otherClassMT) + + refs := make([]interface{}, 10) + uuids := make([]strfmt.UUID, 10) + for i := 0; i < 10; i++ { + uuidTo := assertCreateObject(t, refToClassName, map[string]interface{}{}) + assertGetObjectWithClass(t, uuidTo, refToClassName) + + // create object with same id in MT class + assertCreateObjectWithID(t, otherClassMT, tenant, uuidTo, map[string]interface{}{}) + + refs[i] = map[string]interface{}{ + "beacon": beaconStart + uuidTo, + } + uuids[i] = uuidTo + } + + uuidFrom := assertCreateObject(t, refFromClassName, map[string]interface{}{"ref": refs}) + assertGetObjectWithClass(t, uuidFrom, refFromClassName) + + objWithRef := assertGetObjectWithClass(t, uuidFrom, refFromClassName) + assert.NotNil(t, objWithRef.Properties) + refsReturned := objWithRef.Properties.(map[string]interface{})["ref"].([]interface{}) + for i := range refsReturned { + require.Equal(t, refsReturned[i].(map[string]interface{})["beacon"], string(beaconStart+"ReferenceTo/"+uuids[i])) + } +} + +// This test suite is meant to prevent a regression on +// https://github.com/weaviate/weaviate/issues/868, hence it tries to +// reproduce the steps outlined in there as closely as possible +func Test_CREFWithCardinalityMany_UsingPatch(t *testing.T) { + defer func() { + // clean up so we can run this test multiple times in a row + delCityParams := clschema.NewSchemaObjectsDeleteParams().WithClassName("ReferenceTestCity") + dresp, err := helper.Client(t).Schema.SchemaObjectsDelete(delCityParams, nil) + t.Logf("clean up - delete city \n%v\n %v", dresp, err) + + delPlaceParams := clschema.NewSchemaObjectsDeleteParams().WithClassName("ReferenceTestPlace") + dresp, err = helper.Client(t).Schema.SchemaObjectsDelete(delPlaceParams, nil) + t.Logf("clean up - delete place \n%v\n %v", dresp, err) + }() + + t.Log("1. create ReferenceTestPlace class") + placeClass := &models.Class{ + Class: "ReferenceTestPlace", + Properties: []*models.Property{ + { + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + Name: "name", + }, + }, + } + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(placeClass) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + + t.Log("2. create ReferenceTestCity class with HasPlaces (many) cross-ref") + cityClass := &models.Class{ + Class: "ReferenceTestCity", + Properties: []*models.Property{ + { + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + Name: "name", + }, + { + DataType: []string{"ReferenceTestPlace"}, + Name: "HasPlaces", + }, + }, + } + params = clschema.NewSchemaObjectsCreateParams().WithObjectClass(cityClass) + resp, err = helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + + t.Log("3. add two places and save their IDs") + place1ID := assertCreateObject(t, "ReferenceTestPlace", map[string]interface{}{ + "name": "Place 1", + }) + place2ID := assertCreateObject(t, "ReferenceTestPlace", map[string]interface{}{ + "name": "Place 2", + }) + assertGetObjectEventually(t, place1ID) + assertGetObjectEventually(t, place2ID) + + t.Log("4. add one city") + cityID := assertCreateObject(t, "ReferenceTestCity", map[string]interface{}{ + "name": "My City", + }) + assertGetObjectEventually(t, cityID) + + t.Log("5. patch city to point to the first place") + patchParams := objects.NewObjectsPatchParams(). + WithID(cityID). + WithBody(&models.Object{ + Class: "ReferenceTestCity", + Properties: map[string]interface{}{ + "hasPlaces": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s", place1ID.String()), + }, + }, + }, + }) + patchResp, err := helper.Client(t).Objects.ObjectsPatch(patchParams, nil) + helper.AssertRequestOk(t, patchResp, err, nil) + + t.Log("6. verify first cross ref was added") + + actualThunk := func() interface{} { + cityAfterFirstPatch := assertGetObject(t, cityID) + return cityAfterFirstPatch.Properties + } + + helper.AssertEventuallyEqual(t, map[string]interface{}{ + "name": "My City", + "hasPlaces": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s/%s", placeClass.Class, place1ID.String()), + "href": fmt.Sprintf("/v1/objects/%s/%s", placeClass.Class, place1ID.String()), + }, + }, + }, actualThunk) + + t.Log("7. patch city to point to the second place") + patchParams = objects.NewObjectsPatchParams(). + WithID(cityID). + WithBody(&models.Object{ + Class: "ReferenceTestCity", + Properties: map[string]interface{}{ + "hasPlaces": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s", place2ID.String()), + }, + }, + }, + }) + patchResp, err = helper.Client(t).Objects.ObjectsPatch(patchParams, nil) + helper.AssertRequestOk(t, patchResp, err, nil) + + actualThunk = func() interface{} { + city := assertGetObject(t, cityID) + return city.Properties.(map[string]interface{})["hasPlaces"].([]interface{}) + } + + t.Log("9. verify both cross refs are present") + expectedRefs := []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s/%s", placeClass.Class, place1ID.String()), + "href": fmt.Sprintf("/v1/objects/%s/%s", placeClass.Class, place1ID.String()), + }, + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s/%s", placeClass.Class, place2ID.String()), + "href": fmt.Sprintf("/v1/objects/%s/%s", placeClass.Class, place2ID.String()), + }, + } + + helper.AssertEventuallyEqual(t, expectedRefs, actualThunk) +} + +// This test suite is meant to prevent a regression on +// https://github.com/weaviate/weaviate/issues/868, hence it tries to +// reproduce the steps outlined in there as closely as possible +func Test_CREFWithCardinalityMany_UsingPostReference(t *testing.T) { + defer func() { + // clean up so we can run this test multiple times in a row + delCityParams := clschema.NewSchemaObjectsDeleteParams().WithClassName("ReferenceTestCity") + dresp, err := helper.Client(t).Schema.SchemaObjectsDelete(delCityParams, nil) + t.Logf("clean up - delete city \n%v\n %v", dresp, err) + + delPlaceParams := clschema.NewSchemaObjectsDeleteParams().WithClassName("ReferenceTestPlace") + dresp, err = helper.Client(t).Schema.SchemaObjectsDelete(delPlaceParams, nil) + t.Logf("clean up - delete place \n%v\n %v", dresp, err) + }() + + t.Log("1. create ReferenceTestPlace class") + placeClass := &models.Class{ + Class: "ReferenceTestPlace", + Properties: []*models.Property{ + { + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + Name: "name", + }, + }, + } + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(placeClass) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + + t.Log("2. create ReferenceTestCity class with HasPlaces (many) cross-ref") + cityClass := &models.Class{ + Class: "ReferenceTestCity", + Properties: []*models.Property{ + { + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + Name: "name", + }, + { + DataType: []string{"ReferenceTestPlace"}, + Name: "HasPlaces", + }, + }, + } + params = clschema.NewSchemaObjectsCreateParams().WithObjectClass(cityClass) + resp, err = helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + + t.Log("3. add two places and save their IDs") + place1ID := assertCreateObject(t, "ReferenceTestPlace", map[string]interface{}{ + "name": "Place 1", + }) + place2ID := assertCreateObject(t, "ReferenceTestPlace", map[string]interface{}{ + "name": "Place 2", + }) + assertGetObjectEventually(t, place1ID) + assertGetObjectEventually(t, place2ID) + t.Logf("Place 1 ID: %s", place1ID) + t.Logf("Place 2 ID: %s", place2ID) + + t.Log("4. add one city") + cityID := assertCreateObject(t, "ReferenceTestCity", map[string]interface{}{ + "name": "My City", + }) + assertGetObjectEventually(t, cityID) + + t.Log("5. POST /references/ for place 1") + postRefParams := objects.NewObjectsReferencesCreateParams(). + WithID(cityID). + WithPropertyName("hasPlaces"). + WithBody(&models.SingleRef{ + Beacon: strfmt.URI(fmt.Sprintf("weaviate://localhost/%s", place1ID.String())), + }) + postRefResponse, err := helper.Client(t).Objects.ObjectsReferencesCreate(postRefParams, nil) + helper.AssertRequestOk(t, postRefResponse, err, nil) + + actualThunk := func() interface{} { + city := assertGetObject(t, cityID) + return city.Properties + } + t.Log("7. verify first cross ref was added") + helper.AssertEventuallyEqual(t, map[string]interface{}{ + "name": "My City", + "hasPlaces": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s/%s", placeClass.Class, place1ID.String()), + "href": fmt.Sprintf("/v1/objects/%s/%s", placeClass.Class, place1ID.String()), + }, + }, + }, actualThunk) + + t.Log("8. POST /references/ for place 2") + postRefParams = objects.NewObjectsReferencesCreateParams(). + WithID(cityID). + WithPropertyName("hasPlaces"). + WithBody(&models.SingleRef{ + Beacon: strfmt.URI(fmt.Sprintf("weaviate://localhost/%s", place2ID.String())), + }) + postRefResponse, err = helper.Client(t).Objects.ObjectsReferencesCreate(postRefParams, nil) + helper.AssertRequestOk(t, postRefResponse, err, nil) + + t.Log("9. verify both cross refs are present") + actualThunk = func() interface{} { + city := assertGetObject(t, cityID) + return city.Properties.(map[string]interface{})["hasPlaces"].([]interface{}) + } + + expectedRefs := []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s/%s", placeClass.Class, place1ID.String()), + "href": fmt.Sprintf("/v1/objects/%s/%s", placeClass.Class, place1ID.String()), + }, + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s/%s", placeClass.Class, place2ID.String()), + "href": fmt.Sprintf("/v1/objects/%s/%s", placeClass.Class, place2ID.String()), + }, + } + + helper.AssertEventuallyEqual(t, expectedRefs, actualThunk) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/objects/crefs_without_waiting_for_refresh_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/crefs_without_waiting_for_refresh_test.go new file mode 100644 index 0000000000000000000000000000000000000000..c0ce671bcec47a8364f299922d2ab5f191f6c7f2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/crefs_without_waiting_for_refresh_test.go @@ -0,0 +1,103 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "fmt" + "testing" + + "github.com/go-openapi/strfmt" + clschema "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" +) + +// See https://github.com/weaviate/weaviate/issues/980 +func Test_AddingReferenceWithoutWaiting_UsingPostObjects(t *testing.T) { + defer func() { + // clean up so we can run this test multiple times in a row + delCityParams := clschema.NewSchemaObjectsDeleteParams().WithClassName("ReferenceWaitingTestCity") + dresp, err := helper.Client(t).Schema.SchemaObjectsDelete(delCityParams, nil) + t.Logf("clean up - delete city \n%v\n %v", dresp, err) + + delPlaceParams := clschema.NewSchemaObjectsDeleteParams().WithClassName("ReferenceWaitingTestPlace") + dresp, err = helper.Client(t).Schema.SchemaObjectsDelete(delPlaceParams, nil) + t.Logf("clean up - delete place \n%v\n %v", dresp, err) + }() + + t.Log("1. create ReferenceTestPlace class") + placeClass := &models.Class{ + Class: "ReferenceWaitingTestPlace", + Properties: []*models.Property{ + { + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + Name: "name", + }, + }, + } + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(placeClass) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + + t.Log("2. create ReferenceTestCity class with HasPlace cross-ref") + cityClass := &models.Class{ + Class: "ReferenceWaitingTestCity", + Properties: []*models.Property{ + { + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + Name: "name", + }, + { + DataType: []string{"ReferenceWaitingTestPlace"}, + Name: "HasPlace", + }, + }, + } + params = clschema.NewSchemaObjectsCreateParams().WithObjectClass(cityClass) + resp, err = helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + + t.Log("3. add a places and save the ID") + placeID := assertCreateObject(t, "ReferenceWaitingTestPlace", map[string]interface{}{ + "name": "Place 1", + }) + + t.Log("4. add one city with ref to the place") + cityID := assertCreateObject(t, "ReferenceWaitingTestCity", map[string]interface{}{ + "name": "My City", + "hasPlace": models.MultipleRef{ + &models.SingleRef{ + Beacon: strfmt.URI(fmt.Sprintf("weaviate://localhost/%s", placeID.String())), + }, + }, + }) + + assertGetObjectEventually(t, cityID) + + actualThunk := func() interface{} { + city := assertGetObject(t, cityID) + return city.Properties + } + t.Log("7. verify first cross ref was added") + helper.AssertEventuallyEqual(t, map[string]interface{}{ + "name": "My City", + "hasPlace": []interface{}{ + map[string]interface{}{ + "beacon": fmt.Sprintf("weaviate://localhost/%s/%s", "ReferenceWaitingTestPlace", placeID.String()), + "href": fmt.Sprintf("/v1/objects/%s/%s", "ReferenceWaitingTestPlace", placeID.String()), + }, + }, + }, actualThunk) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/objects/custom_vectors.go b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/custom_vectors.go new file mode 100644 index 0000000000000000000000000000000000000000..38ef1642e8109a174f1bf2246d84eb8a2af7d0c3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/custom_vectors.go @@ -0,0 +1,86 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "testing" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +func customVectors(t *testing.T) { + var id strfmt.UUID + + t.Run("create object", func(t *testing.T) { + params := objects.NewObjectsCreateParams().WithBody( + &models.Object{ + Class: "TestObjectCustomVector", + Properties: map[string]interface{}{"description": "foo"}, + Vector: []float32{0.1, 0.2}, + }) + resp, err := helper.Client(t).Objects.ObjectsCreate(params, nil) + require.Nil(t, err, "creation should succeed") + id = resp.Payload.ID + }) + + t.Run("check custom vector is set", func(t *testing.T) { + include := "vector" + params := objects.NewObjectsGetParams().WithID(id).WithInclude(&include) + resp, err := helper.Client(t).Objects.ObjectsGet(params, nil) + require.Nil(t, err, "get should succeed") + assert.Equal(t, []float32{0.1, 0.2}, []float32(resp.Payload.Vector)) + }) + + t.Run("replace object entirely (update)", func(t *testing.T) { + params := objects.NewObjectsUpdateParams().WithID(id).WithBody(&models.Object{ + ID: id, + Class: "TestObjectCustomVector", + Properties: map[string]interface{}{"description": "foo updated"}, + Vector: []float32{0.1, 0.3}, + }) + _, err := helper.Client(t).Objects.ObjectsUpdate(params, nil) + require.Nil(t, err, "update should succeed") + }) + + t.Run("check custom vector is updated", func(t *testing.T) { + include := "vector" + params := objects.NewObjectsGetParams().WithID(id).WithInclude(&include) + resp, err := helper.Client(t).Objects.ObjectsGet(params, nil) + require.Nil(t, err, "get should succeed") + assert.Equal(t, []float32{0.1, 0.3}, []float32(resp.Payload.Vector)) + }) + + t.Run("replace only vector through merge", func(t *testing.T) { + params := objects.NewObjectsPatchParams().WithID(id).WithBody(&models.Object{ + ID: id, + Class: "TestObjectCustomVector", + Properties: map[string]interface{}{}, + Vector: []float32{0.4, 0.3}, + }) + _, err := helper.Client(t).Objects.ObjectsPatch(params, nil) + require.Nil(t, err, "patch should succeed") + }) + + t.Run("check custom vector is updated", func(t *testing.T) { + include := "vector" + params := objects.NewObjectsGetParams().WithID(id).WithInclude(&include) + resp, err := helper.Client(t).Objects.ObjectsGet(params, nil) + require.Nil(t, err, "get should succeed") + assert.Equal(t, []float32{0.4, 0.3}, []float32(resp.Payload.Vector)) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/objects/delete_objects_from_all_classes.go b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/delete_objects_from_all_classes.go new file mode 100644 index 0000000000000000000000000000000000000000..3d0a4f03502f4d661353eaee220fac6ee2f79cd5 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/delete_objects_from_all_classes.go @@ -0,0 +1,98 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +// Acceptance tests for objects. + +import ( + "testing" + + "github.com/go-openapi/strfmt" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/client/batch" + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +// run from setup_test.go +func deleteAllObjectsFromAllClasses(t *testing.T) { + // We can have a situation that objects in different classes + // have the same ID. This test is to ensure that the delete request + // deletes all of the objects with a given ID in all classes + // This test is connected with this issue: + // https://github.com/weaviate/weaviate/issues/1836 + const fakeObjectId strfmt.UUID = "11111111-1111-1111-1111-111111111111" + + t.Run("create objects with a specified id", func(t *testing.T) { + object1 := &models.Object{ + Class: "TestDeleteClassOne", + ID: fakeObjectId, + Properties: map[string]interface{}{ + "text": "Test string 1", + }, + } + object2 := &models.Object{ + Class: "TestDeleteClassTwo", + ID: fakeObjectId, + Properties: map[string]interface{}{ + "text": "Test string 2", + }, + } + + testFields := "ALL" + // generate request body + params := batch.NewBatchObjectsCreateParams().WithBody(batch.BatchObjectsCreateBody{ + Objects: []*models.Object{object1, object2}, + Fields: []*string{&testFields}, + }) + + // perform the request + resp, err := helper.BatchClient(t).BatchObjectsCreate(params, nil) + // ensure that the response is OK + helper.AssertRequestOk(t, resp, err, func() { + objectsCreateResponse := resp.Payload + + // check if the batch response contains two batched responses + assert.Equal(t, 2, len(objectsCreateResponse)) + + for _, elem := range resp.Payload { + assert.Nil(t, elem.Result.Errors) + } + }) + }) + + t.Run("check that object exists", func(t *testing.T) { + // there are actually 2 objects in 2 classes with this ID + params := objects.NewObjectsGetParams().WithID(fakeObjectId) + resp, err := helper.Client(t).Objects.ObjectsGet(params, nil) + require.Nil(t, err, "get should succeed") + assert.NotNil(t, resp.Payload) + }) + + t.Run("delete objects with a given ID from all classes", func(t *testing.T) { + params := objects.NewObjectsDeleteParams().WithID(fakeObjectId) + resp, err := helper.Client(t).Objects.ObjectsDelete(params, nil) + require.Nil(t, err, "delete should succeed") + assert.Equal(t, &objects.ObjectsDeleteNoContent{}, resp) + }) + + t.Run("check that object with given ID is removed from all classes", func(t *testing.T) { + params := objects.NewObjectsGetParams().WithID(fakeObjectId) + resp, err := helper.Client(t).Objects.ObjectsGet(params, nil) + require.Equal(t, &objects.ObjectsGetNotFound{}, err) + assert.Nil(t, resp) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/objects/head_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/head_test.go new file mode 100644 index 0000000000000000000000000000000000000000..8b8c3d78bc2ada4a064e54939307e82a7ed01b44 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/head_test.go @@ -0,0 +1,71 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +// Acceptance tests for things. + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +// Test that we can properly check object's existence. +// Create two objects, and check that those objects exist. +// Also check one non existent object that it doesn't exist +// This test is run by setup_test.go +func checkObjects(t *testing.T) { + params1 := objects.NewObjectsCreateParams().WithBody( + &models.Object{ + Class: "TestObject", + Properties: map[string]interface{}{}, + }) + resp1, err := helper.Client(t).Objects.ObjectsCreate(params1, nil) + require.Nil(t, err, "creation should succeed") + object1ID := resp1.Payload.ID + + params2 := objects.NewObjectsCreateParams().WithBody( + &models.Object{ + Class: "TestObject", + Properties: map[string]interface{}{}, + }) + resp2, err := helper.Client(t).Objects.ObjectsCreate(params2, nil) + assert.Nil(t, err, "creation should succeed") + object2ID := resp2.Payload.ID + + // wait for both Objects to be indexed + assertGetObjectEventually(t, object1ID) + assertGetObjectEventually(t, object2ID) + + headParams := objects.NewObjectsHeadParams().WithID(object1ID) + resp, err := helper.Client(t).Objects.ObjectsHead(headParams, nil) + + require.Nil(t, err, "should not error") + assert.True(t, resp != nil, "Did not find object 1") + + headParams = objects.NewObjectsHeadParams().WithID("non-existent-object") + resp, err = helper.Client(t).Objects.ObjectsHead(headParams, nil) + + require.NotNil(t, err, "should error") + assert.True(t, resp == nil, "Did find non existent object") + + headParams = objects.NewObjectsHeadParams().WithID(object2ID) + resp, err = helper.Client(t).Objects.ObjectsHead(headParams, nil) + + require.Nil(t, err, "should not error") + assert.True(t, resp != nil, "Did not find object 2") +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/objects/helpers_for_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/helpers_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..08e9ad23e9e4f29361b985227308a54d96b5f47d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/helpers_for_test.go @@ -0,0 +1,101 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "testing" + + "github.com/go-openapi/strfmt" + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +func assertCreateObject(t *testing.T, className string, schema map[string]interface{}) strfmt.UUID { + params := objects.NewObjectsCreateParams().WithBody( + &models.Object{ + Class: className, + Properties: schema, + }) + + resp, err := helper.Client(t).Objects.ObjectsCreate(params, nil) + + var objectID strfmt.UUID + + // Ensure that the response is OK + helper.AssertRequestOk(t, resp, err, func() { + objectID = resp.Payload.ID + }) + + return objectID +} + +func assertCreateObjectWithID(t *testing.T, className, tenant string, id strfmt.UUID, schema map[string]interface{}) { + params := objects.NewObjectsCreateParams().WithBody( + &models.Object{ + ID: id, + Class: className, + Properties: schema, + Tenant: tenant, + }) + + resp, err := helper.Client(t).Objects.ObjectsCreate(params, nil) + + // Ensure that the response is OK + helper.AssertRequestOk(t, resp, err, nil) +} + +func assertGetObject(t *testing.T, uuid strfmt.UUID) *models.Object { + getResp, err := helper.Client(t).Objects.ObjectsGet(objects.NewObjectsGetParams().WithID(uuid), nil) + + var object *models.Object + + helper.AssertRequestOk(t, getResp, err, func() { + object = getResp.Payload + }) + + return object +} + +func assertGetObjectWithClass(t *testing.T, uuid strfmt.UUID, class string) *models.Object { + getResp, err := helper.Client(t).Objects.ObjectsClassGet(objects.NewObjectsClassGetParams().WithID(uuid).WithClassName(class), nil) + + var object *models.Object + + helper.AssertRequestOk(t, getResp, err, func() { + object = getResp.Payload + }) + + return object +} + +func assertGetObjectEventually(t *testing.T, uuid strfmt.UUID) *models.Object { + var ( + resp *objects.ObjectsGetOK + err error + ) + + checkThunk := func() interface{} { + resp, err = helper.Client(t).Objects.ObjectsGet(objects.NewObjectsGetParams().WithID(uuid), nil) + return err == nil + } + + helper.AssertEventuallyEqual(t, true, checkThunk) + + var object *models.Object + + helper.AssertRequestOk(t, resp, err, func() { + object = resp.Payload + }) + + return object +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/objects/list_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/list_test.go new file mode 100644 index 0000000000000000000000000000000000000000..5a7c64e4d40af5071bf412a6e6f3f433973b9b56 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/list_test.go @@ -0,0 +1,77 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +// Acceptance tests for things. + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +// Test that we can properly list objects. +// Create two objects, and check that the list all contains them all. +// This test is run by setup_test.go +func listingObjects(t *testing.T) { + params1 := objects.NewObjectsCreateParams().WithBody( + &models.Object{ + Class: "TestObject", + Properties: map[string]interface{}{ + "testString": "1", + }, + }) + resp1, err := helper.Client(t).Objects.ObjectsCreate(params1, nil) + require.Nil(t, err, "creation should succeed") + object1ID := resp1.Payload.ID + + params2 := objects.NewObjectsCreateParams().WithBody( + &models.Object{ + Class: "TestObject", + Properties: map[string]interface{}{ + "testString": "2", + }, + }) + resp2, err := helper.Client(t).Objects.ObjectsCreate(params2, nil) + assert.Nil(t, err, "creation should succeed") + object2ID := resp2.Payload.ID + + // wait for both Objects to be indexed + assertGetObjectEventually(t, object1ID) + assertGetObjectEventually(t, object2ID) + + listParams := objects.NewObjectsListParams() + resp, err := helper.Client(t).Objects.ObjectsList(listParams, nil) + require.Nil(t, err, "should not error") + + found1 := false + found2 := false + + for _, object := range resp.Payload.Objects { + if object.ID == resp1.Payload.ID { + assert.False(t, found1, "found double ID for object 1!") + found1 = true + } + + if object.ID == resp2.Payload.ID { + assert.False(t, found2, "found double ID for object 2!") + found2 = true + } + } + + assert.True(t, found1, "Did not find object 1") + assert.True(t, found2, "Did not find object 2") +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/objects/network_refs_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/network_refs_test.go new file mode 100644 index 0000000000000000000000000000000000000000..8daaf6a62ede4dacd557993f71ca27e6518dd961 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/network_refs_test.go @@ -0,0 +1,104 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +// func TestCanAddSingleNetworkRef(t *testing.T) { +// networkRefID := "711da979-4b0b-41e2-bcb8-fcc03554c7c8" +// thingID := assertCreateThing(t, "TestThing", map[string]interface{}{ +// "testReference": []interface{}{ +// map[string]interface{}{ +// "beacon": "weaviate://RemoteWeaviateForAcceptanceTest/things/" + networkRefID, +// }, +// }, +// }) + +// t.Run("it can query the resource again to verify the cross ref was added", func(t *testing.T) { +// thing := assertGetThingEventually(t, thingID) +// list := thing.Schema.(map[string]interface{})["testReference"] +// require.NotNil(t, list, "cross-ref is present") +// cref := list.([]interface{})[0].(map[string]interface{}) +// assert.Equal(t, cref["beacon"], "weaviate://RemoteWeaviateForAcceptanceTest/things/"+networkRefID) +// }) + +// t.Run("an implicit schema update has happened, we now include the network ref's class", func(t *testing.T) { +// schema := assertGetSchema(t) +// require.NotNil(t, schema.Things) +// class := assertClassInSchema(t, schema.Things, "TestThing") +// prop := assertPropertyInClass(t, class, "testReference") +// expectedDataType := []string{"TestThingTwo", "RemoteWeaviateForAcceptanceTest/Instruments"} +// assert.Equal(t, expectedDataType, prop.DataType, "prop should have old and newly added dataTypes") +// }) + +// t.Run("it can query the reference through the graphql api", func(t *testing.T) { +// result := graphql.AssertGraphQL(t, helper.RootAuth, +// "{ Get { Things { TestThing { TestReference { ... on RemoteWeaviateForAcceptanceTest__Instruments { name } } } } } }") +// things := result.Get("Get", "Things", "TestThing").AsSlice() +// assert.Contains(t, things, parseJSONObj(`{"TestReference":[{"name": "Talkbox"}]}`)) +// }) +// } + +// func TestCanPatchNetworkRef(t *testing.T) { +// t.Parallel() + +// thingID := assertCreateThing(t, "TestThing", nil) +// assertGetThingEventually(t, thingID) +// networkRefID := "711da979-4b0b-41e2-bcb8-fcc03554c7c8" + +// op := "add" +// path := "/schema/testReference" + +// patch := &models.PatchDocument{ +// Op: &op, +// Path: &path, +// Value: []interface{}{ +// map[string]interface{}{ +// "beacon": "weaviate://RemoteWeaviateForAcceptanceTest/things/" + networkRefID, +// }, +// }, +// } + +// t.Run("it can apply the patch", func(t *testing.T) { +// params := things.NewThingsPatchParams(). +// WithBody([]*models.PatchDocument{patch}). +// WithID(thingID) +// patchResp, err := helper.Client(t).Things.ThingsPatch(params, nil) +// helper.AssertRequestOk(t, patchResp, err, nil) +// }) + +// t.Run("it can query the resource again to verify the cross ref was added", func(t *testing.T) { +// patchedThing := assertGetThing(t, thingID) +// list := patchedThing.Schema.(map[string]interface{})["testReference"] +// require.NotNil(t, list, "cross-ref is present") +// cref := list.([]interface{})[0].(map[string]interface{}) +// assert.Equal(t, cref["beacon"], "weaviate://RemoteWeaviateForAcceptanceTest/things/"+networkRefID) +// }) + +// t.Run("an implicit schema update has happened, we now include the network ref's class", func(t *testing.T) { +// schema := assertGetSchema(t) +// require.NotNil(t, schema.Things) +// class := assertClassInSchema(t, schema.Things, "TestThing") +// prop := assertPropertyInClass(t, class, "testReference") +// expectedDataType := []string{"TestThingTwo", "RemoteWeaviateForAcceptanceTest/Instruments"} +// assert.Equal(t, expectedDataType, prop.DataType, "prop should have old and newly added dataTypes") +// }) +// } + +// func parseJSONObj(text string) interface{} { +// var result interface{} +// err := json.Unmarshal([]byte(text), &result) + +// if err != nil { +// panic(err) +// } + +// return result +// } diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/objects/objects_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/objects_test.go new file mode 100644 index 0000000000000000000000000000000000000000..0fc65d4e8b6427ae51cd9063eee45e40d2394527 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/objects_test.go @@ -0,0 +1,326 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +// Acceptance tests for objects. + +import ( + "encoding/json" + "errors" + "fmt" + "testing" + + "github.com/go-openapi/strfmt" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +// run from setup_test.go +func creatingObjects(t *testing.T) { + const fakeObjectId strfmt.UUID = "11111111-1111-1111-1111-111111111111" + + t.Run("create object with user specified id", func(t *testing.T) { + var ( + id = strfmt.UUID("d47ea61b-0ed7-4e5f-9c05-6d2c0786660f") + className = "TestObject" + // Set all object values to compare + objectTestString = "Test string" + ) + // clean up to make sure we can run this test multiple times in a row + defer func() { + params := objects.NewObjectsDeleteParams().WithID(id) + helper.Client(t).Objects.ObjectsDelete(params, nil) + { + params := objects.NewObjectsClassGetParams() + params.WithClassName(className).WithID(id) + _, err := helper.Client(t).Objects.ObjectsClassGet(params, nil) + if err == nil { + t.Errorf("Object %v cannot exist after deletion", id) + } + werr := new(objects.ObjectsClassGetNotFound) + if ok := errors.As(err, &werr); !ok { + t.Errorf("get deleted object err got: %v want: %v", err, werr) + } + } + }() + + params := objects.NewObjectsCreateParams().WithBody( + &models.Object{ + ID: id, + Class: className, + Properties: map[string]interface{}{ + "testString": objectTestString, + }, + }) + + resp, err := helper.Client(t).Objects.ObjectsCreate(params, nil) + + // Ensure that the response is OK + helper.AssertRequestOk(t, resp, err, func() { + object := resp.Payload + _, err := uuid.Parse(object.ID.String()) + assert.NoError(t, err) + + schema, ok := object.Properties.(map[string]interface{}) + if !ok { + t.Fatal("The returned schema is not an JSON object") + } + + // Check whether the returned information is the same as the data added + assert.Equal(t, objectTestString, schema["testString"]) + }) + + // wait for the object to be created + helper.AssertEventuallyEqual(t, id, func() interface{} { + params := objects.NewObjectsClassGetParams() + params.WithClassName(className).WithID(id) + object, err := helper.Client(t).Objects.ObjectsClassGet(params, nil) + if err != nil { + return nil + } + + return object.Payload.ID + }) + // deprecated: is here because of backward compatibility reasons + helper.AssertEventuallyEqual(t, id, func() interface{} { + params := objects.NewObjectsGetParams().WithID(id) + object, err := helper.Client(t).Objects.ObjectsGet(params, nil) + if err != nil { + return nil + } + + return object.Payload.ID + }) + + // Try to create the same object again and make sure it fails + params = objects.NewObjectsCreateParams().WithBody( + &models.Object{ + ID: id, + Class: "TestObject", + Properties: map[string]interface{}{ + "testString": objectTestString, + }, + }) + + resp, err = helper.Client(t).Objects.ObjectsCreate(params, nil) + helper.AssertRequestFail(t, resp, err, func() { + var errResponse *objects.ObjectsCreateUnprocessableEntity + if !errors.As(err, &errResponse) { + t.Fatalf("Did not get not found response, but %#v", err) + } + + assert.Equal(t, fmt.Sprintf("id '%s' already exists", id), errResponse.Payload.Error[0].Message) + }) + }) + + // Check if we can create a Object, and that it's properties are stored correctly. + t.Run("creating a object", func(t *testing.T) { + t.Parallel() + // Set all object values to compare + objectTestString := "Test string" + objectTestInt := 1 + objectTestBoolean := true + objectTestNumber := 1.337 + objectTestDate := "2017-10-06T08:15:30+01:00" + objectTestPhoneNumber := map[string]interface{}{ + "input": "0171 11122233", + "defaultCountry": "DE", + } + + params := objects.NewObjectsCreateParams().WithBody( + &models.Object{ + Class: "TestObject", + Properties: map[string]interface{}{ + "testString": objectTestString, + "testWholeNumber": objectTestInt, + "testTrueFalse": objectTestBoolean, + "testNumber": objectTestNumber, + "testDateTime": objectTestDate, + "testPhoneNumber": objectTestPhoneNumber, + }, + }) + + resp, err := helper.Client(t).Objects.ObjectsCreate(params, nil) + + // Ensure that the response is OK + helper.AssertRequestOk(t, resp, err, func() { + object := resp.Payload + _, err := uuid.Parse(object.ID.String()) + assert.NoError(t, err) + + schema, ok := object.Properties.(map[string]interface{}) + if !ok { + t.Fatal("The returned schema is not an JSON object") + } + + testWholeNumber, _ := schema["testWholeNumber"].(json.Number).Int64() + testNumber, _ := schema["testNumber"].(json.Number).Float64() + + expectedParsedPhoneNumber := map[string]interface{}{ + "input": "0171 11122233", + "defaultCountry": "DE", + "countryCode": json.Number("49"), + "internationalFormatted": "+49 171 11122233", + "national": json.Number("17111122233"), + "nationalFormatted": "0171 11122233", + "valid": true, + } + + // Check whether the returned information is the same as the data added + assert.Equal(t, objectTestString, schema["testString"]) + assert.Equal(t, objectTestInt, int(testWholeNumber)) + assert.Equal(t, objectTestBoolean, schema["testTrueFalse"]) + assert.Equal(t, objectTestNumber, testNumber) + assert.Equal(t, objectTestDate, schema["testDateTime"]) + assert.Equal(t, expectedParsedPhoneNumber, schema["testPhoneNumber"]) + }) + }) + + // Examples of how a Object can be invalid. + invalidObjectTestCases := []struct { + // What is wrong in this example + mistake string + + // the example object, with a mistake. + // this is a function, so that we can use utility functions like + // helper.GetWeaviateURL(), which might not be initialized yet + // during the static construction of the examples. + object func() *models.Object + + // Enable the option to perform some extra assertions on the error response + errorCheck func(t *testing.T, err *models.ErrorResponse) + }{ + { + mistake: "missing the class", + object: func() *models.Object { + return &models.Object{ + Properties: map[string]interface{}{ + "testString": "test", + }, + } + }, + errorCheck: func(t *testing.T, err *models.ErrorResponse) { + assert.Equal(t, "invalid object: the given class is empty", err.Error[0].Message) + }, + }, + // AUTO_SCHEMA creates classes automatically + // { + // mistake: "non existing class", + // object: func() *models.Object { + // return &models.Object{ + // Class: "NonExistingClass", + // Properties: map[string]interface{}{ + // "testString": "test", + // }, + // } + // }, + // errorCheck: func(t *testing.T, err *models.ErrorResponse) { + // assert.Equal(t, fmt.Sprintf("invalid object: class '%s' not present in schema", "NonExistingClass"), err.Error[0].Message) + // }, + // }, + // AUTO_SCHEMA creates missing properties automatically + // { + // mistake: "non existing property", + // object: func() *models.Object { + // return &models.Object{ + // Class: "TestObject", + // Properties: map[string]interface{}{ + // "nonExistingProperty": "test", + // }, + // } + // }, + // errorCheck: func(t *testing.T, err *models.ErrorResponse) { + // assert.Equal(t, fmt.Sprintf("invalid object: "+schema.ErrorNoSuchProperty, "nonExistingProperty", "TestObject"), err.Error[0].Message) + // }, + // }, + { + /* TODO gh-616: don't count nr of elements in validation. Just validate keys, and _also_ generate an error on superfluous keys. + E.g. + var cref *string + var type_ *string + var locationUrl *string + + for key, val := range(propertyValue) { + switch key { + case "beacon": cref = val + case "type": type_ = val + case "locationUrl": locationUrl = val + default: + return fmt.Errof("Unexpected key %s", key) + } + } + if cref == nil { return fmt.Errorf("beacon missing") } + if type_ == nil { return fmt.Errorf("type missing") } + if locationUrl == nil { return fmt.Errorf("locationUrl missing") } + + // now everything has a valid state. + */ + mistake: "invalid cref, property missing locationUrl", + object: func() *models.Object { + return &models.Object{ + Class: "TestObject", + Properties: map[string]interface{}{ + "testReference": []interface{}{map[string]interface{}{ + "beacon": fakeObjectId, + "x": nil, + "type": "Object", + }}, + }, + } + }, + errorCheck: func(t *testing.T, err *models.ErrorResponse) { + assert.NotNil(t, err) + }, + }, + { + mistake: "invalid property; assign int to string", + object: func() *models.Object { + return &models.Object{ + Class: "TestObject", + Properties: map[string]interface{}{ + "testString": 2, + }, + } + }, + errorCheck: func(t *testing.T, err *models.ErrorResponse) { + assert.Contains(t, + "invalid object: invalid text property 'testString' on class 'TestObject': not a string, but json.Number", + err.Error[0].Message) + }, + }, + } + + // Check that none of the examples of invalid objects can be created. + t.Run("cannot create invalid objects", func(t *testing.T) { + // invalidObjectTestCases defined below this test. + for _, example_ := range invalidObjectTestCases { + t.Run(example_.mistake, func(t *testing.T) { + example := example_ // Needed; example is updated to point to a new test case. + t.Parallel() + + params := objects.NewObjectsCreateParams().WithBody(example.object()) + resp, err := helper.Client(t).Objects.ObjectsCreate(params, nil) + helper.AssertRequestFail(t, resp, err, func() { + var errResponse *objects.ObjectsCreateUnprocessableEntity + if !errors.As(err, &errResponse) { + t.Fatalf("Did not get not found response, but %#v", err) + } + example.errorCheck(t, errResponse.Payload) + }) + }) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/objects/rapid_updates_add_reference_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/rapid_updates_add_reference_test.go new file mode 100644 index 0000000000000000000000000000000000000000..6f7e04e448f8158eea6d2868730a1c6d79afc089 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/rapid_updates_add_reference_test.go @@ -0,0 +1,149 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "fmt" + "strings" + "testing" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/client/objects" + clschema "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" +) + +// This aims to prevent a regression on +// https://github.com/weaviate/weaviate/issues/1016 +// The issue was that rapid POST .../references/... request in succession would +// overwrite each other due to the eventual consistency nature of the used +// backend (esvector). This bug is considered fix if n items can be rapidly +// added and a subsequent GET request of the source resource contains all +// previously added references. +func Test_RapidlyAddingReferences(t *testing.T) { + sourceClass := "SequenceReferenceTestSource" + targetClass := "SequenceReferenceTestTarget" + + sourceID := strfmt.UUID("96ce03ca-58ed-48e1-a0f1-51f63fa9aa12") + + targetIDs := []strfmt.UUID{ + "ce1a4756-b7ce-44fa-b079-45a7ec400882", + "e1edb4ff-570c-4f0b-a1a1-18af118369aa", + "25d22c70-3df0-4e5c-b8c1-a88d4d2771ef", + "6f2a0708-3e8e-4a68-9763-26c465d8bf83", + "c4dfae47-ebcf-4808-9122-1c67898ec140", + "754bd925-1900-4f93-9f5d-27631eb618bb", + "babba820-e3f5-4e8d-a354-76f2cb13fdba", + "270942da-1999-40cd-a580-a91aa144b6c0", + "a7a06618-6d50-4654-be75-2c9f639a6368", + "47ba1d2b-6b8c-4b3b-92a8-46574a069ae8", + } + + t.Run("adding the required schema", func(t *testing.T) { + t.Run("target class", func(t *testing.T) { + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass( + &models.Class{ + Class: targetClass, + Properties: []*models.Property{ + { + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + Name: "name", + }, + }, + }, + ) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + }) + + t.Run("source class", func(t *testing.T) { + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass( + &models.Class{ + Class: sourceClass, + Properties: []*models.Property{ + { + DataType: []string{targetClass}, + Name: "toTarget", + }, + { + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + Name: "name", + }, + }, + }, + ) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + }) + }) + + t.Run("adding all objects (without referencing)", func(t *testing.T) { + t.Run("source object", func(t *testing.T) { + assertCreateObjectWithID(t, sourceClass, "", sourceID, map[string]interface{}{ + "name": "Source Object", + }) + }) + + t.Run("target objects", func(t *testing.T) { + for i, id := range targetIDs { + assertCreateObjectWithID(t, targetClass, "", id, map[string]interface{}{ + "name": fmt.Sprintf("target object %d", i), + }) + } + }) + }) + + t.Run("waiting for the last added object to be present", func(t *testing.T) { + assertGetObjectEventually(t, targetIDs[len(targetIDs)-1]) + }) + + t.Run("placing all references in succession", func(t *testing.T) { + for _, id := range targetIDs { + params := objects.NewObjectsReferencesCreateParams(). + WithID(sourceID). + WithPropertyName("toTarget"). + WithBody( + &models.SingleRef{ + Beacon: strfmt.URI(fmt.Sprintf("weaviate://localhost/%s", id)), + }, + ) + + res, err := helper.Client(t).Objects.ObjectsReferencesCreate(params, nil) + helper.AssertRequestOk(t, res, err, nil) + } + }) + + t.Run("checking which refs were set", func(t *testing.T) { + source := assertGetObject(t, sourceID) + + var foundIDs []strfmt.UUID + // extract IDs + for _, ref := range source.Properties.(map[string]interface{})["toTarget"].([]interface{}) { + beacon := ref.(map[string]interface{})["beacon"].(string) + chunks := strings.Split(beacon, "/") + foundIDs = append(foundIDs, strfmt.UUID(chunks[len(chunks)-1])) + } + + assert.ElementsMatch(t, targetIDs, foundIDs) + }) + + // cleanup + helper.Client(t).Schema.SchemaObjectsDelete( + clschema.NewSchemaObjectsDeleteParams().WithClassName(sourceClass), nil) + helper.Client(t).Schema.SchemaObjectsDelete( + clschema.NewSchemaObjectsDeleteParams().WithClassName(targetClass), nil) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/objects/setup_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/setup_test.go new file mode 100644 index 0000000000000000000000000000000000000000..6bdd7d5d77f1ae71357cb172f9a1076a33debfa3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/setup_test.go @@ -0,0 +1,298 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "context" + "encoding/json" + "fmt" + "testing" + "time" + + "github.com/weaviate/weaviate/test/docker" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/objects" + + clschema "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/helper" +) + +// Tests that sort parameters are validated with the correct class +func TestSort(t *testing.T) { + createObjectClass(t, &models.Class{ + Class: "ClassToSort", + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + }, + }) + defer deleteObjectClass(t, "ClassToSort") + + createObjectClass(t, &models.Class{ + Class: "OtherClass", + Properties: []*models.Property{ + { + Name: "ref", + DataType: []string{"ClassToSort"}, + }, + }, + }) + defer deleteObjectClass(t, "OtherClass") + + listParams := objects.NewObjectsListParams() + nameClass := "ClassToSort" + nameProp := "name" + limit := int64(5) + listParams.Class = &nameClass + listParams.Sort = &nameProp + listParams.Limit = &limit + + _, err := helper.Client(t).Objects.ObjectsList(listParams, nil) + require.Nil(t, err, "should not error") +} + +func TestObjects_AsyncIndexing(t *testing.T) { + ctx := context.Background() + compose, err := docker.New(). + WithWeaviate(). + WithText2VecContextionary(). + WithWeaviateEnv("ASYNC_INDEXING", "true"). + WithWeaviateEnv("ASYNC_INDEXING_STALE_TIMEOUT", "1s"). + WithWeaviateEnv("API_BASED_MODULES_DISABLED", "true"). + Start(ctx) + require.NoError(t, err) + defer func() { + require.NoError(t, compose.Terminate(ctx)) + }() + + defer helper.SetupClient(fmt.Sprintf("%s:%s", helper.ServerHost, helper.ServerPort)) + helper.SetupClient(compose.GetWeaviate().URI()) + + testObjects(t) + asyncTestObjects(t) +} + +// Tests for allocChecker nil error on dynamic indexes during shard load +func TestObjects_AsyncIndexing_LoadShard(t *testing.T) { + ctx := context.Background() + compose, err := docker.New(). + WithWeaviate(). + WithWeaviateEnv("ASYNC_INDEXING", "true"). + WithWeaviateEnv("PERSISTENCE_MIN_MMAP_SIZE", "20MB"). + WithWeaviateEnv("API_BASED_MODULES_DISABLED", "true"). + Start(ctx) + require.NoError(t, err) + defer func() { + require.NoError(t, compose.Terminate(ctx)) + }() + + defer helper.SetupClient(fmt.Sprintf("%s:%s", helper.ServerHost, helper.ServerPort)) + helper.SetupClient(compose.GetWeaviate().URI()) + + className := "Dynamic" + createObjectClass(t, &models.Class{ + Class: className, + Vectorizer: "none", + VectorIndexType: "dynamic", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + Properties: []*models.Property{ + { + Name: "description", + DataType: []string{"text"}, + }, + }, + }) + + tenantName := "tenant0" + helper.CreateTenants(t, className, []*models.Tenant{{Name: tenantName, ActivityStatus: "ACTIVE"}}) + + for i := 0; i < 1000; i++ { + helper.AssertCreateObjectTenantVector(t, className, map[string]interface{}{ + "description": fmt.Sprintf("Test string %d", i), + }, tenantName, []float32{0.0, 0.1}) + } + time.Sleep(3 * time.Second) + helper.UpdateTenants(t, className, []*models.Tenant{{Name: tenantName, ActivityStatus: "INACTIVE"}}) + + time.Sleep(3 * time.Second) + helper.UpdateTenants(t, className, []*models.Tenant{{Name: tenantName, ActivityStatus: "ACTIVE"}}) + + deleteObjectClass(t, className) +} + +func TestObjects_SyncIndexing(t *testing.T) { + testObjects(t) +} + +func testObjects(t *testing.T) { + createObjectClass(t, &models.Class{ + Class: "TestObject", + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + Properties: []*models.Property{ + { + Name: "testString", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + { + Name: "testWholeNumber", + DataType: []string{"int"}, + }, + { + Name: "testReference", + DataType: []string{"TestObject"}, + }, + { + Name: "testNumber", + DataType: []string{"number"}, + }, + { + Name: "testDateTime", + DataType: []string{"date"}, + }, + { + Name: "testTrueFalse", + DataType: []string{"boolean"}, + }, + { + Name: "testPhoneNumber", + DataType: []string{"phoneNumber"}, + }, + }, + }) + createObjectClass(t, &models.Class{ + Class: "TestObjectCustomVector", + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "description", + DataType: []string{"text"}, + }, + }, + }) + createObjectClass(t, &models.Class{ + Class: "TestDeleteClassOne", + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "text", + DataType: []string{"text"}, + }, + }, + }) + createObjectClass(t, &models.Class{ + Class: "TestDeleteClassTwo", + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "text", + DataType: []string{"text"}, + }, + }, + }) + + // tests + t.Run("listing objects", listingObjects) + t.Run("searching for neighbors", searchNeighbors) + t.Run("running a feature projection", featureProjection) + t.Run("creating objects", creatingObjects) + + t.Run("custom vector journey", customVectors) + t.Run("auto schema", autoSchemaObjects) + t.Run("checking object's existence", checkObjects) + t.Run("delete request deletes all objects with a given ID", deleteAllObjectsFromAllClasses) + + // tear down + deleteObjectClass(t, "TestObject") + deleteObjectClass(t, "TestObjectCustomVector") + deleteObjectClass(t, "NonExistingClass") + deleteObjectClass(t, "TestDeleteClassOne") + deleteObjectClass(t, "TestDeleteClassTwo") +} + +func asyncTestObjects(t *testing.T) { + className := "Dynamic" + createObjectClass(t, &models.Class{ + Class: className, + Vectorizer: "none", + VectorIndexType: "dynamic", + VectorIndexConfig: map[string]interface{}{ + "threshold": 999, + "hnsw": map[string]interface{}{ + "ef": 123, + }, + }, + Properties: []*models.Property{ + { + Name: "description", + DataType: []string{"text"}, + }, + }, + }) + + t.Run("update dynamic hnsw ef", func(t *testing.T) { + params := clschema.NewSchemaObjectsGetParams(). + WithClassName(className) + res, err := helper.Client(t).Schema.SchemaObjectsGet(params, nil) + require.Nil(t, err) + + class := res.Payload + if vectorIndexConfig, ok := class.VectorIndexConfig.(map[string]interface{}); ok { + if hnsw, ok := vectorIndexConfig["hnsw"].(map[string]interface{}); ok { + if ef, ok := hnsw["ef"].(json.Number); ok { + efFloat, err := ef.Float64() + require.Nil(t, err) + require.Equal(t, 123.0, efFloat) + hnsw["ef"] = 1234.0 + } else { + t.Errorf("type assertion failure 'ef' to json.Number") + } + } else { + t.Errorf("type assertion failure 'hnsw' to map[string]interface{}") + } + } else { + t.Errorf("type assertion failure 'vectorIndexConfig' to map[string]interface{}") + } + + updateParams := clschema.NewSchemaObjectsUpdateParams(). + WithClassName(className). + WithObjectClass(class) + _, err = helper.Client(t).Schema.SchemaObjectsUpdate(updateParams, nil) + assert.Nil(t, err) + }) + deleteObjectClass(t, className) +} + +func createObjectClass(t *testing.T, class *models.Class) { + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(class) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) +} + +func deleteObjectClass(t *testing.T, class string) { + delParams := clschema.NewSchemaObjectsDeleteParams().WithClassName(class) + delRes, err := helper.Client(t).Schema.SchemaObjectsDelete(delParams, nil) + helper.AssertRequestOk(t, delRes, err, nil) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/objects/skip_vector_index_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/skip_vector_index_test.go new file mode 100644 index 0000000000000000000000000000000000000000..d216e0cdeb6d0a087e61cae1d6aae061cb9c39d0 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/objects/skip_vector_index_test.go @@ -0,0 +1,69 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "testing" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +func TestSkipVectorIndex(t *testing.T) { + // Import a class with vectorizer 'none' and 'skipVectorIndex: true', import + // objects without vectors. + + t.Run("create schema", func(t *testing.T) { + createObjectClass(t, &models.Class{ + Class: "TestSkipVectorIndex", + VectorIndexConfig: map[string]interface{}{ + "skip": true, + }, + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "name", + DataType: []string{"text"}, + }, + }, + }) + }) + + id := strfmt.UUID("d1d58565-3c9b-4ca6-ac7f-43f739700a1d") + + t.Run("create object", func(t *testing.T) { + params := objects.NewObjectsCreateParams().WithBody( + &models.Object{ + ID: id, + Class: "TestSkipVectorIndex", + Properties: map[string]interface{}{"name": "Jane Doe"}, + }) + _, err := helper.Client(t).Objects.ObjectsCreate(params, nil) + require.Nil(t, err, "creation should succeed") + }) + + t.Run("get obj by ID", func(t *testing.T) { + params := objects.NewObjectsGetParams().WithID(id) + obj, err := helper.Client(t).Objects.ObjectsGet(params, nil) + require.Nil(t, err, "object can be retrieved by id") + + assert.Equal(t, "Jane Doe", obj.Payload.Properties.(map[string]interface{})["name"].(string)) + }) + + t.Run("tear down", func(t *testing.T) { + deleteObjectClass(t, "TestSkipVectorIndex") + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/recovery/network_isolation_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/recovery/network_isolation_test.go new file mode 100644 index 0000000000000000000000000000000000000000..0a654a168aa4f992e2dba7fecfed154fad6f4a1c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/recovery/network_isolation_test.go @@ -0,0 +1,100 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package recovery + +import ( + "context" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/client/nodes" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" +) + +func TestNetworkIsolationSplitBrain(t *testing.T) { + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + defer cancel() + + compose, err := docker.New(). + With3NodeCluster(). + WithText2VecContextionary(). + Start(ctx) + require.Nil(t, err) + + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %s", err.Error()) + } + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + + verbose := "verbose" + params := nodes.NewNodesGetParams().WithOutput(&verbose) + t.Run("verify nodes are healthy", func(t *testing.T) { + resp, err := helper.Client(t).Nodes.NodesGet(params, nil) + require.Nil(t, err) + + nodeStatusResp := resp.GetPayload() + require.NotNil(t, nodeStatusResp) + + nodes := nodeStatusResp.Nodes + require.NotNil(t, nodes) + require.Len(t, nodes, 3) + }) + + t.Run("disconnect node 3 from the network", func(t *testing.T) { + err = compose.DisconnectFromNetwork(ctx, 3) + require.Nil(t, err) + // this sleep to make sure network is disconnected + time.Sleep(3 * time.Second) + }) + + t.Run("verify 2 nodes are healthy", func(t *testing.T) { + assert.Eventually(t, func() bool { + resp, err := helper.Client(t).Nodes.NodesGet(params, nil) + assert.Nil(t, err) + + nodeStatusResp := resp.GetPayload() + assert.NotNil(t, nodeStatusResp) + + nodes := nodeStatusResp.Nodes + assert.NotNil(t, nodes) + return len(nodes) == 2 + }, 30*time.Second, 500*time.Millisecond) + }) + + t.Run("reconnect node 3 to the network", func(t *testing.T) { + err = compose.ConnectToNetwork(ctx, 3) + require.Nil(t, err) + // this sleep to make sure network is connected + time.Sleep(3 * time.Second) + }) + + t.Run("verify nodes are healthy and 3rd node successfully rejoined", func(t *testing.T) { + assert.Eventually(t, func() bool { + resp, err := helper.Client(t).Nodes.NodesGet(params, nil) + assert.Nil(t, err) + + nodeStatusResp := resp.GetPayload() + assert.NotNil(t, nodeStatusResp) + + nodes := nodeStatusResp.Nodes + assert.NotNil(t, nodes) + return len(nodes) == 3 + }, 90*time.Second, 500*time.Millisecond) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/recovery/recovery_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/recovery/recovery_test.go new file mode 100644 index 0000000000000000000000000000000000000000..20df1ff5eada817bce89a99e11d7ab2274b77851 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/recovery/recovery_test.go @@ -0,0 +1,69 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package recovery + +import ( + "context" + "testing" + "time" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/test/docker" + "golang.org/x/sync/errgroup" +) + +// TODO-RAFT current tests doesn't force containers to change their IPs +// we need to add test were the actual container ip changes on stop if possible with testcontainer +// if not we need to terminate the whole container to pick up new IP and copy the old container filesystem +// to the new one to force recovery +func TestRecovery(t *testing.T) { + t.Setenv("TEST_WEAVIATE_IMAGE", "weaviate/test-server") + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + defer cancel() + + compose, err := docker.New(). + With3NodeCluster(). + WithText2VecContextionary(). + Start(ctx) + require.Nil(t, err) + + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %s", err.Error()) + } + }() + + container1Ip := compose.ContainerURI(1) + container2Ip := compose.ContainerURI(2) + container3Ip := compose.ContainerURI(3) + + <-time.After(3 * time.Second) // wait for memberlist + + eg := errgroup.Group{} + for idx := 1; idx <= 3; idx++ { + require.Nil(t, compose.StopAt(ctx, idx, nil)) + i := idx // catch idx for eg + if i > 1 { + time.Sleep(2 * time.Second) + } + eg.Go(func() error { + require.Nil(t, compose.StartAt(ctx, i)) + return nil + }) + } + + eg.Wait() + // ips shouldn't be equal + require.NotEqual(t, container1Ip, compose.ContainerURI(1)) + require.NotEqual(t, container2Ip, compose.ContainerURI(2)) + require.NotEqual(t, container3Ip, compose.ContainerURI(3)) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/recovery/restart_after_faulty_update_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/recovery/restart_after_faulty_update_test.go new file mode 100644 index 0000000000000000000000000000000000000000..94982d49da7d9a397c82b5487ea46930ec84427c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/recovery/restart_after_faulty_update_test.go @@ -0,0 +1,144 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package recovery + +import ( + "context" + "errors" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/acceptance/replication/common" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" +) + +func TestUpdatePropertyFieldFailureWithRestart(t *testing.T) { + className := "C2" + propName := "p1" + nestedPropName := "np1" + ctx, cancel := context.WithTimeout(context.Background(), 3*time.Minute) + defer cancel() + + compose, err := docker.New(). + With1NodeCluster(). + Start(ctx) + require.Nil(t, err) + + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %s", err.Error()) + } + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + + delete := func() { + params := schema.NewSchemaObjectsDeleteParams().WithClassName(className) + _, err := helper.Client(t).Schema.SchemaObjectsDelete(params, nil) + assert.Nil(t, err) + } + defer delete() + + params := schema.NewSchemaObjectsCreateParams().WithObjectClass(&models.Class{ + Class: className, + Properties: []*models.Property{ + { + Name: propName, + DataType: []string{"object"}, + NestedProperties: []*models.NestedProperty{{ + Name: nestedPropName, + DataType: []string{"text"}, + }}, + }, + }, + }) + + _, err = helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + assert.Nil(t, err) + + newDescription := "its updated description" + + t.Run("update property and nested property data type and shall fail", func(t *testing.T) { + params := schema.NewSchemaObjectsGetParams(). + WithClassName(className) + + res, err := helper.Client(t).Schema.SchemaObjectsGet(params, nil) + require.Nil(t, err) + assert.Equal(t, "", res.Payload.Properties[0].Description) + + prop := res.Payload.Properties[0] + prop.Description = newDescription + prop.NestedProperties[0].Description = newDescription + prop.NestedProperties[0].Name = "faulty-np2" + prop.NestedProperties[0].DataType = []string{"boolean"} + updateParams := schema.NewSchemaObjectsUpdateParams(). + WithClassName(className). + WithObjectClass(&models.Class{ + Class: className, + Properties: []*models.Property{prop}, + }) + _, err = helper.Client(t).Schema.SchemaObjectsUpdate(updateParams, nil) + require.Error(t, err) + + helper.AssertRequestFail(t, nil, err, func() { + var errResponse *schema.SchemaObjectsUpdateUnprocessableEntity + require.True(t, errors.As(err, &errResponse)) + require.Contains(t, errResponse.Payload.Error[0].Message, "property fields other than description cannot be updated through updating the class") + }) + }) + + t.Run("restart node", func(t *testing.T) { + common.StopNodeAt(ctx, t, compose, 0) + common.StartNodeAt(ctx, t, compose, 0) + }) + + t.Run("verify node is running after faulty schema update", func(t *testing.T) { + helper.SetupClient(compose.GetWeaviate().URI()) + require.NotNil(t, helper.GetClass(t, className)) + }) + + t.Run("create new class to make sure schema updates work even with previous failure", func(t *testing.T) { + helper.SetupClient(compose.GetWeaviate().URI()) + newClass := &models.Class{ + Class: "NewClass", + Properties: []*models.Property{ + { + Name: "p1", + DataType: []string{"text"}, + }, + }, + } + helper.CreateClass(t, newClass) + returnedClass := helper.GetClass(t, newClass.Class) + require.NotNil(t, returnedClass) + require.Equal(t, newClass.Class, returnedClass.Class) + require.Equal(t, newClass.Properties[0].Name, returnedClass.Properties[0].Name) + require.Equal(t, newClass.Properties[0].DataType, returnedClass.Properties[0].DataType) + }) + + t.Run("restart node again", func(t *testing.T) { + common.StopNodeAt(ctx, t, compose, 0) + common.StartNodeAt(ctx, t, compose, 0) + }) + + t.Run("verify node is running after faulty and valid schema update", func(t *testing.T) { + helper.SetupClient(compose.GetWeaviate().URI()) + require.NotNil(t, helper.GetClass(t, className)) + require.NotNil(t, helper.GetClass(t, "NewClass")) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/schema/add_class_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/schema/add_class_test.go new file mode 100644 index 0000000000000000000000000000000000000000..76a79294a26693beb54c1d5b477e069b4be56ef5 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/schema/add_class_test.go @@ -0,0 +1,604 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "context" + "errors" + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + clschema "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/usecases/config" +) + +// this test prevents a regression on +// https://github.com/weaviate/weaviate/issues/981 +func TestInvalidDataTypeInProperty(t *testing.T) { + t.Parallel() + className := t.Name() + + t.Run("asserting that this class does not exist yet", func(t *testing.T) { + assert.NotContains(t, GetObjectClassNames(t), className) + }) + + t.Run("trying to import empty string as data type", func(t *testing.T) { + c := &models.Class{ + Class: className, + Properties: []*models.Property{ + { + Name: "someProperty", + DataType: []string{""}, + }, + }, + } + + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(c) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestFail(t, resp, err, func() { + var parsed *clschema.SchemaObjectsCreateUnprocessableEntity + require.True(t, errors.As(err, &parsed), "error should be unprocessable entity") + assert.Equal(t, "property 'someProperty': invalid dataType: []: dataType cannot be an empty string", + parsed.Payload.Error[0].Message) + }) + }) +} + +func TestInvalidPropertyName(t *testing.T) { + t.Parallel() + className := t.Name() + + t.Run("asserting that this class does not exist yet", func(t *testing.T) { + assert.NotContains(t, GetObjectClassNames(t), className) + }) + + t.Run("trying to create class with invalid property name", func(t *testing.T) { + c := &models.Class{ + Class: className, + Properties: []*models.Property{ + { + Name: "some-property", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + }, + } + + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(c) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestFail(t, resp, err, func() { + var parsed *clschema.SchemaObjectsCreateUnprocessableEntity + require.True(t, errors.As(err, &parsed), "error should be unprocessable entity") + assert.Equal(t, "'some-property' is not a valid property name. Property names in Weaviate "+ + "are restricted to valid GraphQL names, which must be “/[_A-Za-z][_0-9A-Za-z]{0,230}/”", + parsed.Payload.Error[0].Message) + }) + }) +} + +func TestAddAndRemoveObjectClass(t *testing.T) { + randomObjectClassName := "YellowCars" + + // Ensure that this name is not in the schema yet. + t.Log("Asserting that this class does not exist yet") + assert.NotContains(t, GetObjectClassNames(t), randomObjectClassName) + + tc := &models.Class{ + Class: randomObjectClassName, + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + } + + t.Log("Creating class") + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(tc) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + + t.Log("Asserting that this class is now created") + assert.Contains(t, GetObjectClassNames(t), randomObjectClassName) + + t.Run("pure http - without the auto-generated client", testGetSchemaWithoutClient) + + // Now clean up this class. + t.Log("Remove the class") + delParams := clschema.NewSchemaObjectsDeleteParams().WithClassName(randomObjectClassName) + delResp, err := helper.Client(t).Schema.SchemaObjectsDelete(delParams, nil) + helper.AssertRequestOk(t, delResp, err, nil) + + // And verify that the class does not exist anymore. + assert.NotContains(t, GetObjectClassNames(t), randomObjectClassName) +} + +// This test prevents a regression on +// https://github.com/weaviate/weaviate/issues/1799 +// +// This was related to adding ref props. For example in the case of a circular +// dependency (A<>B), users would typically add A without refs, then add B with +// a reference back to A, finally update A with a ref to B. +// +// This last update that would set the ref prop on an existing class was missing +// module-specific defaults. So when comparing to-be-updated to existing we would +// find differences in the properties, thus triggering the above error. +func TestUpdateHNSWSettingsAfterAddingRefProps(t *testing.T) { + className := "RefUpdateIssueClass" + + t.Run("asserting that this class does not exist yet", func(t *testing.T) { + assert.NotContains(t, GetObjectClassNames(t), className) + }) + + defer func(t *testing.T) { + params := clschema.NewSchemaObjectsDeleteParams().WithClassName(className) + _, err := helper.Client(t).Schema.SchemaObjectsDelete(params, nil) + assert.Nil(t, err) + if err != nil { + var typed *clschema.SchemaObjectsDeleteBadRequest + if errors.As(err, &typed) { + fmt.Println(typed.Payload.Error[0].Message) + } + } + }(t) + + t.Run("initially creating the class", func(t *testing.T) { + c := &models.Class{ + Class: className, + Properties: []*models.Property{ + { + Name: "string_prop", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + }, + } + + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(c) + _, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + assert.Nil(t, err) + }) + + t.Run("adding a ref prop after the fact", func(t *testing.T) { + params := clschema.NewSchemaObjectsPropertiesAddParams(). + WithClassName(className). + WithBody(&models.Property{ + DataType: []string{className}, + Name: "ref_prop", + }) + _, err := helper.Client(t).Schema.SchemaObjectsPropertiesAdd(params, nil) + assert.Nil(t, err) + }) + + t.Run("obtaining the class, making an innocent change and trying to update it", func(t *testing.T) { + params := clschema.NewSchemaObjectsGetParams(). + WithClassName(className) + res, err := helper.Client(t).Schema.SchemaObjectsGet(params, nil) + require.Nil(t, err) + + class := res.Payload + + class.VectorIndexConfig.(map[string]interface{})["ef"] = float64(1234) + + updateParams := clschema.NewSchemaObjectsUpdateParams(). + WithClassName(className). + WithObjectClass(class) + _, err = helper.Client(t).Schema.SchemaObjectsUpdate(updateParams, nil) + assert.Nil(t, err) + }) + + t.Run("obtaining the class, making a change to IndexNullState (immutable) property and update", func(t *testing.T) { + params := clschema.NewSchemaObjectsGetParams(). + WithClassName(className) + res, err := helper.Client(t).Schema.SchemaObjectsGet(params, nil) + require.Nil(t, err) + + class := res.Payload + + // IndexNullState cannot be updated during runtime + class.InvertedIndexConfig.IndexNullState = true + updateParams := clschema.NewSchemaObjectsUpdateParams(). + WithClassName(className). + WithObjectClass(class) + _, err = helper.Client(t).Schema.SchemaObjectsUpdate(updateParams, nil) + assert.NotNil(t, err) + }) + + t.Run("obtaining the class, making a change to IndexPropertyLength (immutable) property and update", func(t *testing.T) { + params := clschema.NewSchemaObjectsGetParams(). + WithClassName(className) + res, err := helper.Client(t).Schema.SchemaObjectsGet(params, nil) + require.Nil(t, err) + + class := res.Payload + + // IndexPropertyLength cannot be updated during runtime + class.InvertedIndexConfig.IndexPropertyLength = true + updateParams := clschema.NewSchemaObjectsUpdateParams(). + WithClassName(className). + WithObjectClass(class) + _, err = helper.Client(t).Schema.SchemaObjectsUpdate(updateParams, nil) + assert.NotNil(t, err) + }) +} + +// This test prevents a regression of +// https://github.com/weaviate/weaviate/issues/2692 +// +// In this issue, any time a class had no vector index set, any other update to +// the class would be blocked +func TestUpdateClassWithoutVectorIndex(t *testing.T) { + className := "IAintGotNoVectorIndex" + + t.Run("asserting that this class does not exist yet", func(t *testing.T) { + assert.NotContains(t, GetObjectClassNames(t), className) + }) + + defer func(t *testing.T) { + params := clschema.NewSchemaObjectsDeleteParams().WithClassName(className) + _, err := helper.Client(t).Schema.SchemaObjectsDelete(params, nil) + assert.Nil(t, err) + if err != nil { + var typed *clschema.SchemaObjectsDeleteBadRequest + if errors.As(err, &typed) { + fmt.Println(typed.Payload.Error[0].Message) + } + } + }(t) + + t.Run("initially creating the class", func(t *testing.T) { + c := &models.Class{ + Class: className, + InvertedIndexConfig: &models.InvertedIndexConfig{ + Stopwords: &models.StopwordConfig{ + Preset: "en", + }, + UsingBlockMaxWAND: config.DefaultUsingBlockMaxWAND, + }, + Properties: []*models.Property{ + { + Name: "text_prop", + DataType: []string{"text"}, + }, + }, + VectorIndexConfig: map[string]interface{}{ + "skip": true, + }, + } + + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(c) + _, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + assert.Nil(t, err) + }) + + t.Run("obtaining the class, making an innocent change and trying to update it", func(t *testing.T) { + params := clschema.NewSchemaObjectsGetParams(). + WithClassName(className) + res, err := helper.Client(t).Schema.SchemaObjectsGet(params, nil) + require.Nil(t, err) + + class := res.Payload + + class.InvertedIndexConfig.Stopwords.Preset = "none" + + updateParams := clschema.NewSchemaObjectsUpdateParams(). + WithClassName(className). + WithObjectClass(class) + _, err = helper.Client(t).Schema.SchemaObjectsUpdate(updateParams, nil) + assert.Nil(t, err) + }) +} + +// This test prevents a regression of +// https://github.com/weaviate/weaviate/issues//3177 +// +// This test ensures that distance belongs to the immutable properties, i.e. no changes to it are possible after creating the class. +func TestUpdateDistanceSettings(t *testing.T) { + className := "Cosine_Class" + + t.Run("asserting that this class does not exist yet", func(t *testing.T) { + assert.NotContains(t, GetObjectClassNames(t), className) + }) + + defer func(t *testing.T) { + params := clschema.NewSchemaObjectsDeleteParams().WithClassName(className) + _, err := helper.Client(t).Schema.SchemaObjectsDelete(params, nil) + assert.Nil(t, err) + if err != nil { + var typed *clschema.SchemaObjectsDeleteBadRequest + if errors.As(err, &typed) { + fmt.Println(typed.Payload.Error[0].Message) + } + } + }(t) + + t.Run("initially creating the class", func(t *testing.T) { + c := &models.Class{ + Class: className, + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + }, + VectorIndexConfig: map[string]interface{}{ + "distance": "cosine", + }, + } + + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(c) + _, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + assert.Nil(t, err) + }) + + t.Run("Trying to change the distance measurement", func(t *testing.T) { + params := clschema.NewSchemaObjectsGetParams(). + WithClassName(className) + res, err := helper.Client(t).Schema.SchemaObjectsGet(params, nil) + require.Nil(t, err) + + class := res.Payload + + class.VectorIndexConfig.(map[string]interface{})["distance"] = "l2-squared" + + updateParams := clschema.NewSchemaObjectsUpdateParams(). + WithClassName(className). + WithObjectClass(class) + _, err = helper.Client(t).Schema.SchemaObjectsUpdate(updateParams, nil) + assert.NotNil(t, err) + }) +} + +// TODO: https://github.com/weaviate/weaviate/issues/973 +// // This test prevents a regression on the fix for this bug: +// // https://github.com/weaviate/weaviate/issues/831 +// func TestDeleteSingleProperties(t *testing.T) { +// t.Parallel() + +// randomObjectClassName := "RedShip" + +// // Ensure that this name is not in the schema yet. +// t.Log("Asserting that this class does not exist yet") +// assert.NotContains(t, GetThingClassNames(t), randomThingClassName) + +// tc := &models.Class{ +// Class: randomThingClassName, +// Properties: []*models.Property{ +// &models.Property{ +// DataType: schema.DataTypeText.PropString(), +// Tokenization: models.PropertyTokenizationWhitespace, +// Name: "name", +// }, +// &models.Property{ +// DataType: schema.DataTypeText.PropString(), +// Tokenization: models.PropertyTokenizationWhitespace, +// Name: "description", +// }, +// }, +// } + +// t.Log("Creating class") +// params := clschema.NewSchemaThingsCreateParams().WithThingClass(tc) +// resp, err := helper.Client(t).Schema.SchemaThingsCreate(params, nil) +// helper.AssertRequestOk(t, resp, err, nil) + +// t.Log("Asserting that this class is now created") +// assert.Contains(t, GetThingClassNames(t), randomThingClassName) + +// t.Log("adding an instance of this particular class that uses both properties") +// instanceParams := things.NewThingsCreateParams().WithBody( +// &models.Thing{ +// Class: randomThingClassName, +// Schema: map[string]interface{}{ +// "name": "my name", +// "description": "my description", +// }, +// }) +// instanceRes, err := helper.Client(t).Things.ThingsCreate(instanceParams, nil) +// assert.Nil(t, err, "adding a class instance should not error") + +// t.Log("delete a single property of the class") +// deleteParams := clschema.NewSchemaThingsPropertiesDeleteParams(). +// WithClassName(randomThingClassName). +// WithPropertyName("description") +// _, err = helper.Client(t).Schema.SchemaThingsPropertiesDelete(deleteParams, nil) +// assert.Nil(t, err, "deleting the property should not error") + +// t.Log("retrieve the class and make sure the property is gone") +// thing := assertGetThingEventually(t, instanceRes.Payload.ID) +// expectedSchema := map[string]interface{}{ +// "name": "my name", +// } +// assert.Equal(t, expectedSchema, thing.Schema) + +// t.Log("verifying that we can still retrieve the thing through graphQL") +// result := gql.AssertGraphQL(t, helper.RootAuth, "{ Get { Things { RedShip { name } } } }") +// ships := result.Get("Get", "Things", "RedShip").AsSlice() +// expectedShip := map[string]interface{}{ +// "name": "my name", +// } +// assert.Contains(t, ships, expectedShip) + +// t.Log("verifying other GQL/REST queries still work") +// gql.AssertGraphQL(t, helper.RootAuth, "{ Meta { Things { RedShip { name { count } } } } }") +// gql.AssertGraphQL(t, helper.RootAuth, `{ Aggregate { Things { RedShip(groupBy: ["name"]) { name { count } } } } }`) +// _, err = helper.Client(t).Things.ThingsList(things.NewThingsListParams(), nil) +// assert.Nil(t, err, "listing things should not error") + +// t.Log("verifying we could re-add the property with the same name") +// readdParams := clschema.NewSchemaThingsPropertiesAddParams(). +// WithClassName(randomThingClassName). +// WithBody(&models.Property{ +// Name: "description", +// DataType: schema.DataTypeText.PropString(), +// Tokenization: models.PropertyTokenizationWhitespace, +// }) + +// _, err = helper.Client(t).Schema.SchemaThingsPropertiesAdd(readdParams, nil) +// assert.Nil(t, err, "adding the previously deleted property again should not error") + +// // Now clean up this class. +// t.Log("Remove the class") +// delParams := clschema.NewSchemaThingsDeleteParams().WithClassName(randomThingClassName) +// delResp, err := helper.Client(t).Schema.SchemaThingsDelete(delParams, nil) +// helper.AssertRequestOk(t, delResp, err, nil) + +// // And verify that the class does not exist anymore. +// assert.NotContains(t, GetThingClassNames(t), randomThingClassName) +// } +func TestMaximumAllowedCollectionsCount(t *testing.T) { + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + defer cancel() + + t.Run("with limit of 1", func(t *testing.T) { + // Start a single node with collection limit of 1 + compose, err := docker.New().With3NodeCluster(). + WithWeaviateEnv("MAXIMUM_ALLOWED_COLLECTIONS_COUNT", "1"). + Start(ctx) + require.Nil(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %v", err) + } + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + defer helper.ResetClient() + + className := "TestCollection1" + c1 := &models.Class{ + Class: className, + Vectorizer: "none", + } + + // First class should succeed + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(c1) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + + // Second class should fail + className2 := "TestCollection2" + c2 := &models.Class{ + Class: className2, + Vectorizer: "none", + } + + params = clschema.NewSchemaObjectsCreateParams().WithObjectClass(c2) + resp, err = helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestFail(t, resp, err, func() { + var parsed *clschema.SchemaObjectsCreateUnprocessableEntity + require.True(t, errors.As(err, &parsed), "error should be unprocessable entity") + assert.Contains(t, parsed.Payload.Error[0].Message, "maximum number of collections") + }) + }) + + t.Run("with default limit (unlimited collections)", func(t *testing.T) { + // Start a single node with unlimited collections + compose, err := docker.New().With3NodeCluster(). + Start(ctx) + require.Nil(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %v", err) + } + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + defer helper.ResetClient() + + // Create multiple classes (more than default limit) + classNames := []string{} + for i := 0; i < 102; i++ { + className := fmt.Sprintf("TestCollection_%d", i) + classNames = append(classNames, className) + + c := &models.Class{ + Class: className, + Vectorizer: "none", + } + + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(c) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + } + + // Verify all classes exist + for _, className := range classNames { + assert.Contains(t, GetObjectClassNames(t), className) + } + }) + + t.Run("with limit (100)", func(t *testing.T) { + // Start a single node with default limit + compose, err := docker.New().With3NodeCluster(). + WithWeaviateEnv("MAXIMUM_ALLOWED_COLLECTIONS_COUNT", "100"). + Start(ctx) + require.Nil(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %v", err) + } + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + defer helper.ResetClient() + + // Create classes up to the limit + classNames := []string{} + for i := 0; i < 100; i++ { + className := fmt.Sprintf("TestCollection_%d", i) + classNames = append(classNames, className) + + c := &models.Class{ + Class: className, + Vectorizer: "none", + } + + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(c) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + } + + // Attempt to create one more class (should fail) + c := &models.Class{ + Class: "TestCollectionExtra", + Vectorizer: "none", + } + + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(c) + resp, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestFail(t, resp, err, func() { + var parsed *clschema.SchemaObjectsCreateUnprocessableEntity + require.True(t, errors.As(err, &parsed), "error should be unprocessable entity") + assert.Contains(t, parsed.Payload.Error[0].Message, "maximum number of collections") + }) + + // Delete one class and verify we can create a new one + deleteParams := clschema.NewSchemaObjectsDeleteParams().WithClassName(classNames[0]) + delResp, err := helper.Client(t).Schema.SchemaObjectsDelete(deleteParams, nil) + helper.AssertRequestOk(t, delResp, err, nil) + + // Should now be able to create a new class + params = clschema.NewSchemaObjectsCreateParams().WithObjectClass(c) + resp, err = helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + helper.AssertRequestOk(t, resp, err, nil) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/schema/default_compression_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/schema/default_compression_test.go new file mode 100644 index 0000000000000000000000000000000000000000..e6f22c38246739a61508aaf439f309257ec06a82 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/schema/default_compression_test.go @@ -0,0 +1,390 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + //"context" + "context" + "encoding/json" + "fmt" + "testing" + "time" + + "github.com/weaviate/weaviate/usecases/config/runtime" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/vectorindex/hnsw" + + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/articles" +) + +func TestAssignDynamic(t *testing.T) { + d := runtime.NewDynamicValue("rq") + require.Equal(t, "rq", d.Get()) +} + +func TestDefaultCompressionRQ8(t *testing.T) { + mainCtx := context.Background() + + compose, err := docker.New(). + WithWeaviateCluster(3). + WithWeaviateEnv("DEFAULT_QUANTIZATION", "rq-8"). + Start(mainCtx) + require.Nil(t, err) + defer func() { + if err := compose.Terminate(mainCtx); err != nil { + t.Fatalf("failed to terminate test containers: %s", err.Error()) + } + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + + cls := articles.ParagraphsClass() + cls.ReplicationConfig = &models.ReplicationConfig{ + Factor: 1, + } + cls.MultiTenancyConfig = &models.MultiTenancyConfig{ + Enabled: true, + AutoTenantActivation: true, + AutoTenantCreation: true, + } + + // Create the class + t.Log("Creating class", cls.Class) + helper.DeleteClass(t, cls.Class) + helper.CreateClass(t, cls) + + // Load data + t.Log("Loading data into tenant...") + tenantName := "tenant" + batch := make([]*models.Object, 0, 100000) + start := time.Now() + for j := 0; j < 10; j++ { + batch = append(batch, (*models.Object)(articles.NewParagraph(). + WithContents(fmt.Sprintf("paragraph#%d", j)). + WithTenant(tenantName). + Object())) + if len(batch) == 50000 { + helper.CreateObjectsBatch(t, batch) + t.Logf("Loaded %d objects", len(batch)) + batch = batch[:0] + } + } + if len(batch) > 0 { + helper.CreateObjectsBatch(t, batch) + t.Logf("Loaded remaining %d objects", len(batch)) + } + t.Logf("Data loading took %s", time.Since(start)) + + nodes, err := helper.Client(t).Nodes.NodesGet(nil, nil) + require.Nil(t, err) + + nodeNames := make([]string, len(nodes.GetPayload().Nodes)) + for i, node := range nodes.GetPayload().Nodes { + nodeNames[i] = node.Name + } + + // Get the schema + + t.Log("Getting schema") + schema, err := helper.Client(t).Schema.SchemaDump(nil, nil) + fmt.Printf("Schema: %+v\n", schema.GetPayload()) + require.Nil(t, err) + require.NotNil(t, schema) + payload := schema.GetPayload() + require.NotNil(t, payload) + viconfig := payload.Classes[0].VectorIndexConfig + require.NotNil(t, viconfig) + rq := viconfig.(map[string]interface{})["rq"] + require.NotNil(t, rq) + enabled := rq.(map[string]interface{})["enabled"].(bool) + require.Equal(t, true, enabled) + jsonBits := rq.(map[string]interface{})["bits"].(json.Number) + bits, err := jsonBits.Int64() + require.Nil(t, err) + require.Equal(t, int64(8), bits) + jsonRescoreLimit := rq.(map[string]interface{})["rescoreLimit"].(json.Number) + rescoreLimit, err := jsonRescoreLimit.Int64() + require.Nil(t, err) + require.Equal(t, int64(hnsw.DefaultRQRescoreLimit), rescoreLimit) + skipDefaultQuantization := viconfig.(map[string]interface{})["skipDefaultQuantization"].(bool) + require.Equal(t, false, skipDefaultQuantization) + trackDefaultQuantization := viconfig.(map[string]interface{})["trackDefaultQuantization"].(bool) + require.Equal(t, true, trackDefaultQuantization) +} + +func TestDefaultCompressionRQ1(t *testing.T) { + mainCtx := context.Background() + + compose, err := docker.New(). + WithWeaviateCluster(3). + WithWeaviateEnv("DEFAULT_QUANTIZATION", "rq-1"). + Start(mainCtx) + require.Nil(t, err) + defer func() { + if err := compose.Terminate(mainCtx); err != nil { + t.Fatalf("failed to terminate test containers: %s", err.Error()) + } + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + + cls := articles.ParagraphsClass() + cls.ReplicationConfig = &models.ReplicationConfig{ + Factor: 1, + } + cls.MultiTenancyConfig = &models.MultiTenancyConfig{ + Enabled: true, + AutoTenantActivation: true, + AutoTenantCreation: true, + } + + // Create the class + t.Log("Creating class", cls.Class) + helper.DeleteClass(t, cls.Class) + helper.CreateClass(t, cls) + + // Load data + t.Log("Loading data into tenant...") + tenantName := "tenant" + batch := make([]*models.Object, 0, 100000) + start := time.Now() + for j := 0; j < 10; j++ { + batch = append(batch, (*models.Object)(articles.NewParagraph(). + WithContents(fmt.Sprintf("paragraph#%d", j)). + WithTenant(tenantName). + Object())) + if len(batch) == 50000 { + helper.CreateObjectsBatch(t, batch) + t.Logf("Loaded %d objects", len(batch)) + batch = batch[:0] + } + } + if len(batch) > 0 { + helper.CreateObjectsBatch(t, batch) + t.Logf("Loaded remaining %d objects", len(batch)) + } + t.Logf("Data loading took %s", time.Since(start)) + + nodes, err := helper.Client(t).Nodes.NodesGet(nil, nil) + require.Nil(t, err) + + nodeNames := make([]string, len(nodes.GetPayload().Nodes)) + for i, node := range nodes.GetPayload().Nodes { + nodeNames[i] = node.Name + } + + // Get the schema + + t.Log("Getting schema") + schema, err := helper.Client(t).Schema.SchemaDump(nil, nil) + fmt.Printf("Schema: %+v\n", schema.GetPayload()) + require.Nil(t, err) + require.NotNil(t, schema) + payload := schema.GetPayload() + require.NotNil(t, payload) + viconfig := payload.Classes[0].VectorIndexConfig + require.NotNil(t, viconfig) + rq := viconfig.(map[string]interface{})["rq"] + require.NotNil(t, rq) + enabled := rq.(map[string]interface{})["enabled"].(bool) + require.Equal(t, true, enabled) + jsonBits := rq.(map[string]interface{})["bits"].(json.Number) + bits, err := jsonBits.Int64() + require.Nil(t, err) + require.Equal(t, int64(1), bits) + jsonRescoreLimit := rq.(map[string]interface{})["rescoreLimit"].(json.Number) + rescoreLimit, err := jsonRescoreLimit.Int64() + require.Nil(t, err) + require.Equal(t, int64(hnsw.DefaultBRQRescoreLimit), rescoreLimit) + skipDefaultQuantization := viconfig.(map[string]interface{})["skipDefaultQuantization"].(bool) + require.Equal(t, false, skipDefaultQuantization) + trackDefaultQuantization := viconfig.(map[string]interface{})["trackDefaultQuantization"].(bool) + require.Equal(t, true, trackDefaultQuantization) +} + +func TestDefaultCompressionWithSkipDefaultQuantization(t *testing.T) { + mainCtx := context.Background() + + compose, err := docker.New(). + WithWeaviateCluster(3). + WithWeaviateEnv("DEFAULT_QUANTIZATION", "rq-8"). + Start(mainCtx) + require.Nil(t, err) + defer func() { + if err := compose.Terminate(mainCtx); err != nil { + t.Fatalf("failed to terminate test containers: %s", err.Error()) + } + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + + cls := articles.ParagraphsClass() + cls.ReplicationConfig = &models.ReplicationConfig{ + Factor: 1, + } + cls.MultiTenancyConfig = &models.MultiTenancyConfig{ + Enabled: true, + AutoTenantActivation: true, + AutoTenantCreation: true, + } + cls.VectorIndexConfig = map[string]interface{}{ + "skipDefaultQuantization": true, + } + + // Create the class + t.Log("Creating class", cls.Class) + helper.DeleteClass(t, cls.Class) + helper.CreateClass(t, cls) + + // Load data + t.Log("Loading data into tenant...") + tenantName := "tenant" + batch := make([]*models.Object, 0, 100000) + start := time.Now() + for j := 0; j < 10; j++ { + batch = append(batch, (*models.Object)(articles.NewParagraph(). + WithContents(fmt.Sprintf("paragraph#%d", j)). + WithTenant(tenantName). + Object())) + if len(batch) == 50000 { + helper.CreateObjectsBatch(t, batch) + t.Logf("Loaded %d objects", len(batch)) + batch = batch[:0] + } + } + if len(batch) > 0 { + helper.CreateObjectsBatch(t, batch) + t.Logf("Loaded remaining %d objects", len(batch)) + } + t.Logf("Data loading took %s", time.Since(start)) + + nodes, err := helper.Client(t).Nodes.NodesGet(nil, nil) + require.Nil(t, err) + + nodeNames := make([]string, len(nodes.GetPayload().Nodes)) + for i, node := range nodes.GetPayload().Nodes { + nodeNames[i] = node.Name + } + + // Get the schema + + t.Log("Getting schema") + schema, err := helper.Client(t).Schema.SchemaDump(nil, nil) + fmt.Printf("Schema: %+v\n", schema.GetPayload()) + require.Nil(t, err) + require.NotNil(t, schema) + payload := schema.GetPayload() + require.NotNil(t, payload) + viconfig := payload.Classes[0].VectorIndexConfig + require.NotNil(t, viconfig) + rq := viconfig.(map[string]interface{})["rq"] + require.NotNil(t, rq) + enabled := rq.(map[string]interface{})["enabled"].(bool) + require.Equal(t, false, enabled) + skipDefaultQuantization := viconfig.(map[string]interface{})["skipDefaultQuantization"].(bool) + require.Equal(t, true, skipDefaultQuantization) + trackDefaultQuantization := viconfig.(map[string]interface{})["trackDefaultQuantization"].(bool) + require.Equal(t, false, trackDefaultQuantization) +} + +func TestDefaultCompressionOverride(t *testing.T) { + mainCtx := context.Background() + + compose, err := docker.New(). + WithWeaviateCluster(3). + WithWeaviateEnv("DEFAULT_QUANTIZATION", "rq-8"). + Start(mainCtx) + require.Nil(t, err) + defer func() { + if err := compose.Terminate(mainCtx); err != nil { + t.Fatalf("failed to terminate test containers: %s", err.Error()) + } + }() + + helper.SetupClient(compose.GetWeaviate().URI()) + + cls := articles.ParagraphsClass() + cls.ReplicationConfig = &models.ReplicationConfig{ + Factor: 1, + } + cls.MultiTenancyConfig = &models.MultiTenancyConfig{ + Enabled: true, + AutoTenantActivation: true, + AutoTenantCreation: true, + } + + cfg := hnsw.NewDefaultUserConfig() + cfg.BQ.Enabled = true + cls.VectorIndexConfig = cfg + + // Create the class + t.Log("Creating class", cls.Class) + helper.DeleteClass(t, cls.Class) + helper.CreateClass(t, cls) + + // Load data + t.Log("Loading data into tenant...") + tenantName := "tenant" + batch := make([]*models.Object, 0, 100000) + start := time.Now() + for j := 0; j < 10; j++ { + batch = append(batch, (*models.Object)(articles.NewParagraph(). + WithContents(fmt.Sprintf("paragraph#%d", j)). + WithTenant(tenantName). + Object())) + if len(batch) == 50000 { + helper.CreateObjectsBatch(t, batch) + t.Logf("Loaded %d objects", len(batch)) + batch = batch[:0] + } + } + if len(batch) > 0 { + helper.CreateObjectsBatch(t, batch) + t.Logf("Loaded remaining %d objects", len(batch)) + } + t.Logf("Data loading took %s", time.Since(start)) + + nodes, err := helper.Client(t).Nodes.NodesGet(nil, nil) + require.Nil(t, err) + + nodeNames := make([]string, len(nodes.GetPayload().Nodes)) + for i, node := range nodes.GetPayload().Nodes { + nodeNames[i] = node.Name + } + + // Get the schema + + t.Log("Getting schema") + schema, err := helper.Client(t).Schema.SchemaDump(nil, nil) + fmt.Printf("Schema: %+v\n", schema.GetPayload()) + require.Nil(t, err) + require.NotNil(t, schema) + payload := schema.GetPayload() + require.NotNil(t, payload) + viconfig := payload.Classes[0].VectorIndexConfig + require.NotNil(t, viconfig) + rq := viconfig.(map[string]interface{})["rq"] + require.NotNil(t, rq) + require.False(t, rq.(map[string]interface{})["enabled"].(bool)) + bq := viconfig.(map[string]interface{})["bq"] + require.NotNil(t, bq) + enabled := bq.(map[string]interface{})["enabled"].(bool) + require.Equal(t, true, enabled) + skipDefaultQuantization := viconfig.(map[string]interface{})["skipDefaultQuantization"].(bool) + require.Equal(t, false, skipDefaultQuantization) + trackDefaultQuantization := viconfig.(map[string]interface{})["trackDefaultQuantization"].(bool) + require.Equal(t, false, trackDefaultQuantization) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/schema/get_class_consistency_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/schema/get_class_consistency_test.go new file mode 100644 index 0000000000000000000000000000000000000000..569f4b328f0389866d57a4893b8bf2d197cf79f3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/schema/get_class_consistency_test.go @@ -0,0 +1,66 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "context" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + clschema "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" +) + +func TestGetClassWithConsistency(t *testing.T) { + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + defer cancel() + + // 3 Node cluster so that we can verify that the proxy to leader feature work + compose, err := docker.New().WithWeaviateCluster(3). + WithText2VecContextionary(). + Start(ctx) + require.Nil(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %v", err) + } + }() + + helper.SetupClient(compose.GetWeaviateNode2().URI()) + defer helper.ResetClient() + + className := t.Name() + + t.Run("asserting that this class does not exist yet", func(t *testing.T) { + assert.NotContains(t, GetObjectClassNames(t), className) + }) + + c := &models.Class{ + Class: className, + } + + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(c) + _, err = helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + assert.Nil(t, err) + + truePtr := true + paramsGet := clschema.NewSchemaObjectsGetParams().WithClassName(className).WithConsistency(&truePtr) + res, err := helper.Client(t).Schema.SchemaObjectsGet(paramsGet, nil) + require.Nil(t, err) + // Check only the `Class` as the returned class from the server has all the defaults config initialized + require.Equal(t, c.Class, res.Payload.Class) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/schema/get_schema_without_client_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/schema/get_schema_without_client_test.go new file mode 100644 index 0000000000000000000000000000000000000000..bde5f3bfc1f3cc871fb91c83b93ac7cda37e8299 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/schema/get_schema_without_client_test.go @@ -0,0 +1,136 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "encoding/json" + "fmt" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/vectorindex/hnsw" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/usecases/config" +) + +func testGetSchemaWithoutClient(t *testing.T) { + res, err := http.Get(fmt.Sprintf("%s%s", helper.GetWeaviateURL(), "/v1/schema")) + require.Nil(t, err) + + defer res.Body.Close() + var body map[string]interface{} + err = json.NewDecoder(res.Body).Decode(&body) + require.Nil(t, err) + + expected := map[string]interface{}{ + "classes": []interface{}{ + map[string]interface{}{ + "class": "YellowCars", + "properties": (interface{})(nil), + "vectorIndexType": "hnsw", // from default + "vectorIndexConfig": map[string]interface{}{ // from default + "skip": false, + "cleanupIntervalSeconds": float64(300), + "efConstruction": float64(128), + "flatSearchCutoff": float64(40000), + "ef": float64(-1), + "maxConnections": float64(32), + "vectorCacheMaxObjects": float64(1e12), + "dynamicEfMin": float64(100), + "dynamicEfMax": float64(500), + "dynamicEfFactor": float64(8), + "distance": "cosine", + "bq": map[string]interface{}{ + "enabled": false, + }, + "pq": map[string]interface{}{ + "bitCompression": false, + "centroids": float64(256), + "enabled": false, + "encoder": map[string]interface{}{ + "distribution": "log-normal", + "type": hnsw.PQEncoderTypeKMeans, + }, + "segments": float64(0), + "trainingLimit": float64(100000), + }, + "sq": map[string]interface{}{ + "enabled": false, + "trainingLimit": float64(100000), + "rescoreLimit": float64(20), + }, + "rq": map[string]interface{}{ + "enabled": false, + "bits": float64(8), + "rescoreLimit": float64(20), + }, + "filterStrategy": "sweeping", + "multivector": map[string]interface{}{ + "enabled": false, + "aggregation": "maxSim", + "muvera": map[string]interface{}{ + "dprojections": float64(16), + "enabled": false, + "ksim": float64(4), + "repetitions": float64(10), + }, + }, + "skipDefaultQuantization": false, + "trackDefaultQuantization": false, + }, + "shardingConfig": map[string]interface{}{ + "actualCount": float64(1), + "actualVirtualCount": float64(128), + "desiredCount": float64(1), + "desiredVirtualCount": float64(128), + "function": "murmur3", + "strategy": "hash", + "key": "_id", + "virtualPerPhysical": float64(128), + }, + "replicationConfig": map[string]interface{}{ + "asyncEnabled": false, + "factor": float64(1), + "deletionStrategy": "NoAutomatedResolution", + }, + "vectorizer": "text2vec-contextionary", // global default from env var, see docker-compose-test.yml + "invertedIndexConfig": map[string]interface{}{ + "cleanupIntervalSeconds": float64(60), + "bm25": map[string]interface{}{ + "k1": float64(1.2), + "b": float64(0.75), + }, + "stopwords": map[string]interface{}{ + "preset": "en", + "additions": nil, + "removals": nil, + }, + "usingBlockMaxWAND": config.DefaultUsingBlockMaxWAND, + }, + "moduleConfig": map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + "multiTenancyConfig": map[string]interface{}{ + "enabled": false, + "autoTenantCreation": false, + "autoTenantActivation": false, + }, + }, + }, + } + + assert.Equal(t, expected, body) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/schema/helper.go b/platform/dbops/binaries/weaviate-src/test/acceptance/schema/helper.go new file mode 100644 index 0000000000000000000000000000000000000000..e9e2779500b4c2f9522dd382665a47f702aafe9b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/schema/helper.go @@ -0,0 +1,48 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "testing" + + "github.com/weaviate/weaviate/test/helper" +) + +// Helper function to get all the names of Object classes. +func GetObjectClassNames(t *testing.T) []string { + resp, err := helper.Client(t).Schema.SchemaDump(nil, nil) + var names []string + + // Extract all names + helper.AssertRequestOk(t, resp, err, func() { + for _, class := range resp.Payload.Classes { + names = append(names, class.Class) + } + }) + + return names +} + +// Helper function to get all the names of Action classes. +// func GetActionClassNames(t *testing.T) []string { +// resp, err := helper.Client(t).Schema.SchemaDump(nil, nil) +// var names []string + +// // Extract all names +// helper.AssertRequestOk(t, resp, err, func() { +// for _, class := range resp.Payload.Actions.Classes { +// names = append(names, class.Class) +// } +// }) + +// return names +// } diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/schema/update_class_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/schema/update_class_test.go new file mode 100644 index 0000000000000000000000000000000000000000..db0dbbb2e6f5dadb2492b7f7034c19acbf2745cf --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/schema/update_class_test.go @@ -0,0 +1,190 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + clschema "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/helper" +) + +func TestUpdateClassDescription(t *testing.T) { + className := "C1" + + t.Run("delete class if exists", func(t *testing.T) { + params := clschema.NewSchemaObjectsDeleteParams().WithClassName(className) + _, err := helper.Client(t).Schema.SchemaObjectsDelete(params, nil) + assert.Nil(t, err) + }) + + t.Run("initially creating the class", func(t *testing.T) { + c := &models.Class{ + Class: className, + } + + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(c) + _, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + assert.Nil(t, err) + }) + + newDescription := "it's updated description" + + t.Run("update class description", func(t *testing.T) { + params := clschema.NewSchemaObjectsGetParams(). + WithClassName(className) + + res, err := helper.Client(t).Schema.SchemaObjectsGet(params, nil) + require.Nil(t, err) + assert.Equal(t, res.Payload.Description, "") + + class := res.Payload + class.Description = newDescription + updateParams := clschema.NewSchemaObjectsUpdateParams(). + WithClassName(className). + WithObjectClass(class) + _, err = helper.Client(t).Schema.SchemaObjectsUpdate(updateParams, nil) + assert.Nil(t, err) + }) + + t.Run("assert update class description", func(t *testing.T) { + params := clschema.NewSchemaObjectsGetParams(). + WithClassName(className) + + res, err := helper.Client(t).Schema.SchemaObjectsGet(params, nil) + require.Nil(t, err) + assert.Equal(t, res.Payload.Description, newDescription) + }) +} + +func TestUpdatePropertyDescription(t *testing.T) { + className := "C2" + propName := "p1" + nestedPropName := "np1" + + delete := func() { + params := clschema.NewSchemaObjectsDeleteParams().WithClassName(className) + _, err := helper.Client(t).Schema.SchemaObjectsDelete(params, nil) + assert.Nil(t, err) + } + defer delete() + + delete() + c := &models.Class{ + Class: className, + Properties: []*models.Property{ + { + Name: propName, + DataType: []string{"object"}, + NestedProperties: []*models.NestedProperty{{ + Name: nestedPropName, + DataType: []string{"text"}, + }}, + }, + }, + } + + params := clschema.NewSchemaObjectsCreateParams().WithObjectClass(c) + _, err := helper.Client(t).Schema.SchemaObjectsCreate(params, nil) + assert.Nil(t, err) + + newDescription := "its updated description" + + t.Run("update property and nested property descriptions", func(t *testing.T) { + params := clschema.NewSchemaObjectsGetParams(). + WithClassName(className) + + res, err := helper.Client(t).Schema.SchemaObjectsGet(params, nil) + require.Nil(t, err) + assert.Equal(t, "", res.Payload.Properties[0].Description) + + prop := res.Payload.Properties[0] + prop.Description = newDescription + prop.NestedProperties[0].Description = newDescription + updateParams := clschema.NewSchemaObjectsUpdateParams(). + WithClassName(className). + WithObjectClass(&models.Class{ + Class: className, + Properties: []*models.Property{prop}, + }) + _, err = helper.Client(t).Schema.SchemaObjectsUpdate(updateParams, nil) + assert.Nil(t, err) + + params = clschema.NewSchemaObjectsGetParams().WithClassName(className) + res, err = helper.Client(t).Schema.SchemaObjectsGet(params, nil) + require.Nil(t, err) + assert.Equal(t, newDescription, res.Payload.Properties[0].Description) + assert.Equal(t, newDescription, res.Payload.Properties[0].NestedProperties[0].Description) + }) + + t.Run("assert updated descriptions", func(t *testing.T) { + params := clschema.NewSchemaObjectsGetParams(). + WithClassName(className) + + res, err := helper.Client(t).Schema.SchemaObjectsGet(params, nil) + require.Nil(t, err) + assert.Equal(t, newDescription, res.Payload.Properties[0].Description) + assert.Equal(t, newDescription, res.Payload.Properties[0].NestedProperties[0].Description) + }) + + t.Run("update field other than description", func(t *testing.T) { + params := clschema.NewSchemaObjectsGetParams(). + WithClassName(className) + + res, err := helper.Client(t).Schema.SchemaObjectsGet(params, nil) + require.Nil(t, err) + + prop := res.Payload.Properties[0] + prop.DataType = []string{"int"} + updateParams := clschema.NewSchemaObjectsUpdateParams(). + WithClassName(className). + WithObjectClass(&models.Class{ + Class: className, + Properties: []*models.Property{prop}, + }) + _, err = helper.Client(t).Schema.SchemaObjectsUpdate(updateParams, nil) + assert.NotNil(t, err) + var parsed *clschema.SchemaObjectsUpdateUnprocessableEntity + require.ErrorAs(t, err, &parsed) + if errors.As(err, &parsed) { + require.Contains(t, parsed.Payload.Error[0].Message, "property fields other than description cannot be updated through updating the class") + } + }) + + t.Run("update field other than description in nested", func(t *testing.T) { + params := clschema.NewSchemaObjectsGetParams(). + WithClassName(className) + + res, err := helper.Client(t).Schema.SchemaObjectsGet(params, nil) + require.Nil(t, err) + + prop := res.Payload.Properties[0] + prop.NestedProperties[0].DataType = []string{"int"} + updateParams := clschema.NewSchemaObjectsUpdateParams(). + WithClassName(className). + WithObjectClass(&models.Class{ + Class: className, + Properties: []*models.Property{prop}, + }) + _, err = helper.Client(t).Schema.SchemaObjectsUpdate(updateParams, nil) + assert.NotNil(t, err) + var parsed *clschema.SchemaObjectsUpdateUnprocessableEntity + require.ErrorAs(t, err, &parsed) + if errors.As(err, &parsed) { + require.Contains(t, parsed.Payload.Error[0].Message, "property fields other than description cannot be updated through updating the class") + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/schema/update_with_vectorizer_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/schema/update_with_vectorizer_test.go new file mode 100644 index 0000000000000000000000000000000000000000..96c9dcf1bbed05337c46619ec37eb2722f14f11a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/schema/update_with_vectorizer_test.go @@ -0,0 +1,57 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package test + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/test/helper/sample-schema/books" +) + +func Test_UpdateClassWithText2VecOpenAI(t *testing.T) { + ctx := context.Background() + compose, err := docker.New(). + WithText2VecOpenAI("", "", ""). + WithWeaviate(). + Start(ctx) + require.NoError(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %v", err) + } + }() + endpoint := compose.GetWeaviate().URI() + helper.SetupClient(endpoint) + defer helper.ResetClient() + + t.Run("update description of legacy vectorizer class", func(t *testing.T) { + cls := books.ClassOpenAIWithOptions() + helper.CreateClass(t, cls) + defer helper.DeleteClass(t, cls.Class) + + cls.Description = "updated description" + helper.UpdateClass(t, cls) + }) + + t.Run("update description of named vectors class", func(t *testing.T) { + cls := books.ClassNamedOpenAIWithOptions() + helper.CreateClass(t, cls) + defer helper.DeleteClass(t, cls.Class) + + cls.Description = "updated description" + helper.UpdateClass(t, cls) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/snapshots/raft_snapshot_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/snapshots/raft_snapshot_test.go new file mode 100644 index 0000000000000000000000000000000000000000..d95a39dd01064d88e0b263b270c5e75c2fcd3482 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/snapshots/raft_snapshot_test.go @@ -0,0 +1,208 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package recovery + +import ( + "context" + "fmt" + "os/exec" + "strings" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/testcontainers/testcontainers-go" + + "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/test/docker" + "github.com/weaviate/weaviate/test/helper" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func TestSchemaSnapshotRecovery(t *testing.T) { + ctx := context.Background() + // Start a 3-node cluster with a low snapshot threshold + compose, err := docker.New(). + WithWeaviateCluster(3). + WithWeaviateEnv("RAFT_SNAPSHOT_THRESHOLD", "1"). // Force snapshot after every change + WithWeaviateEnv("RAFT_SNAPSHOT_INTERVAL", "1"). // Force snapshot every second + WithWeaviateEnv("RAFT_TRAILING_LOGS", "1"). // Keep one trailing logs + Start(ctx) + require.NoError(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %v", err) + } + }() + + // Stop node 3 directly to make sure it doesn't get any added classes + t.Run("stop node 3", func(t *testing.T) { + require.NoError(t, compose.StopAt(ctx, 2, nil)) + }) + + helper.SetupClient(compose.GetWeaviate().URI()) + defer helper.ResetClient() + + // Create classes while node 3 is down + t.Run("create classes while node 3 is down", func(t *testing.T) { + // Create multiple classes + for idx := 0; idx < 100; idx++ { + className := fmt.Sprintf("TestClass_%d", idx) + class := &models.Class{ + Class: className, + } + helper.CreateClass(t, class) + } + + // Verify classes exist on running nodes + for idx := 0; idx < 100; idx++ { + className := fmt.Sprintf("TestClass_%d", idx) + class := helper.GetClass(t, className) + require.NotNil(t, class) + require.Equal(t, className, class.Class) + } + }) + + // Start node 3 back up + t.Run("start node 3", func(t *testing.T) { + require.NoError(t, compose.StartAt(ctx, 2)) + helper.SetupClient(compose.GetWeaviateNode3().URI()) + }) + + // Verify all classes exist on recovered node + t.Run("verify classes on recovered node", func(t *testing.T) { + // Wait for node 3 to be ready and verify schema matches + assert.Eventually(t, func() bool { + // Get schema from all nodes + helper.SetupClient(compose.GetWeaviate().URI()) + schema1, err := helper.Client(t).Schema.SchemaDump(schema.NewSchemaDumpParams().WithConsistency(Bool(false)), nil) + assert.NoError(t, err) + + helper.SetupClient(compose.GetWeaviateNode2().URI()) + schema2, err := helper.Client(t).Schema.SchemaDump(schema.NewSchemaDumpParams().WithConsistency(Bool(false)), nil) + assert.NoError(t, err) + + helper.SetupClient(compose.GetWeaviateNode3().URI()) + schema3, err := helper.Client(t).Schema.SchemaDump(schema.NewSchemaDumpParams().WithConsistency(Bool(false)), nil) + assert.NoError(t, err) + + // All schemas should have the same number of classes + return len(schema1.Payload.Classes) == len(schema2.Payload.Classes) && + len(schema1.Payload.Classes) == len(schema3.Payload.Classes) && + len(schema1.Payload.Classes) == 100 + }, 90*time.Second, 1*time.Second, "Schema should match across all nodes") + }) +} + +func TestRBACSnapshotRecovery(t *testing.T) { + // Set up test users and roles + adminUser := "admin-user" + adminKey := "admin-key" + testRole := "test_role" + + ctx := context.Background() + // Start a 3-node cluster with RBAC enabled and a low snapshot threshold + compose, err := docker.New(). + WithWeaviateCluster(3). + WithApiKey(). + WithUserApiKey(adminUser, adminKey). + WithRBAC(). + WithRbacRoots(adminUser). + WithWeaviateEnv("RAFT_SNAPSHOT_THRESHOLD", "1"). // Force snapshot after every change + WithWeaviateEnv("RAFT_SNAPSHOT_INTERVAL", "1"). // Force snapshot every second + WithWeaviateEnv("RAFT_TRAILING_LOGS", "1"). // Keep one trailing logs + Start(ctx) + require.NoError(t, err) + defer func() { + if err := compose.Terminate(ctx); err != nil { + t.Fatalf("failed to terminate test containers: %v", err) + } + }() + + // Stop node 3 directly to make sure it doesn't get any added roles + t.Run("stop node 3", func(t *testing.T) { + require.NoError(t, compose.StopAt(ctx, 2, nil)) + }) + + helper.SetupClient(compose.GetWeaviate().URI()) + defer helper.ResetClient() + + // Create all roles while node 3 is down + t.Run("create roles while node 3 is down", func(t *testing.T) { + // Create roles + for idx := 0; idx < 100; idx++ { + roleName := fmt.Sprintf("%s_while_down_%d", testRole, idx) + helper.CreateRole(t, adminKey, &models.Role{ + Name: &roleName, + Permissions: []*models.Permission{{ + Action: String(authorization.CreateCollections), + Collections: &models.PermissionCollections{ + Collection: String("*"), + }, + }}, + }) + } + + for idx := 0; idx < 100; idx++ { + roleName := fmt.Sprintf("%s_while_down_%d", testRole, idx) + role := helper.GetRoleByName(t, adminKey, roleName) + require.NotNil(t, role) + require.Equal(t, roleName, *role.Name) + } + }) + + // Start node 3 back up + t.Run("start node 3", func(t *testing.T) { + require.NoError(t, compose.StartAt(ctx, 2)) + helper.SetupClient(compose.GetWeaviateNode3().URI()) + }) + + // Verify all roles exist on recovered node + t.Run("verify roles on recovered node", func(t *testing.T) { + // Wait for node 3 to be ready and verify checksums match + assert.Eventually(t, func() bool { + checksum1 := getPolicyChecksum(t, compose.GetWeaviate().Container()) + checksum2 := getPolicyChecksum(t, compose.GetWeaviateNode2().Container()) + checksum3 := getPolicyChecksum(t, compose.GetWeaviateNode3().Container()) + // All checksums should match + return checksum1 != "" && checksum2 != "" && checksum3 != "" && + checksum1 == checksum2 && checksum1 == checksum3 + }, 90*time.Second, 1*time.Second, "Policy checksums should match across all nodes") + }) +} + +func getPolicyChecksum(t *testing.T, container testcontainers.Container) string { + // Run sort | md5sum on the policy file directly in the container + cmd := exec.Command("docker", "exec", container.GetContainerID(), "sh", "-c", "sort data/raft/rbac/policy.csv | md5sum") + output, err := cmd.CombinedOutput() + if err != nil { + t.Logf("Failed to get policy checksum: %v", err) + return "" + } + + // Extract the checksum from the output + parts := strings.Fields(string(output)) + if len(parts) < 1 { + return "" + } + return parts[0] +} + +func Bool(b bool) *bool { + return &b +} + +func String(s string) *string { + return &s +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/stress_tests/concurrent_batches_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance/stress_tests/concurrent_batches_test.go new file mode 100644 index 0000000000000000000000000000000000000000..d49f9fdbbb542d02aff80e81d099e6adfcb9acdb --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/stress_tests/concurrent_batches_test.go @@ -0,0 +1,94 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package stress_tests + +import ( + "encoding/json" + "fmt" + "sync" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/models" +) + +type batch struct { + Objects []*models.Object +} + +const class = "TestClass" + +func Test_AddConcurrentSchemas_sameObject(t *testing.T) { + url := "http://localhost:8080/v1/" + batch := batch{createObject(class)} + parallelReqs := 10 + wg := sync.WaitGroup{} + wg.Add(parallelReqs) + + // Add schema and object + c := createHttpClient() + clearExistingObjects(c, url) + requestSchema := createSchemaRequest(url, class, false) + performRequest(c, requestSchema) + + for i := 0; i < parallelReqs; i++ { + go func(j int) { + defer wg.Done() + c := createHttpClient() + performRequest(c, createRequest(url+"batch/objects", "POST", batch)) + }(i) + } + wg.Wait() + + requestRead := createRequest(url+"objects?limit="+fmt.Sprint(10)+"&class="+class, "GET", nil) + _, body, _ := performRequest(c, requestRead) + var result map[string]interface{} + json.Unmarshal(body, &result) + assert.Equal(t, 1, int(result["totalResults"].(float64))) +} + +func Test_AddConcurrentBatches_differentObjects(t *testing.T) { + url := "http://localhost:8080/v1/" + + parallelReqs := 150 + wg := sync.WaitGroup{} + wg.Add(parallelReqs) + + // Add schema and object + c := createHttpClient() + clearExistingObjects(c, url) + requestSchema := createSchemaRequest(url, class, false) + performRequest(c, requestSchema) + batch1 := batch{createObject(class)} + batch2 := batch{createObject(class)} + + for i := 0; i < parallelReqs; i++ { + go func(j int) { + defer wg.Done() + + c := createHttpClient() + if j%2 == 0 { + performRequest(c, createRequest(url+"batch/objects", "POST", batch1)) + } else { + performRequest(c, createRequest(url+"batch/objects", "POST", batch2)) + } + }(i) + } + wg.Wait() + + requestRead := createRequest(url+"objects?limit="+fmt.Sprint(10)+"&class="+class, "GET", nil) + _, body, _ := performRequest(c, requestRead) + var result map[string]interface{} + json.Unmarshal(body, &result) + assert.Equal(t, 2, int(result["totalResults"].(float64))) + clearExistingObjects(c, url) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance/stress_tests/stress.go b/platform/dbops/binaries/weaviate-src/test/acceptance/stress_tests/stress.go new file mode 100644 index 0000000000000000000000000000000000000000..7c56f89e7290efe48e9c97f863f7267aafa62e27 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance/stress_tests/stress.go @@ -0,0 +1,179 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package stress_tests + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "net" + "net/http" + "strings" + "time" + + "github.com/go-openapi/strfmt" + "github.com/google/uuid" + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" +) + +// If there is already a schema present, clear it out +func clearExistingObjects(c *http.Client, url string) { + checkSchemaRequest := createRequest(url+"schema", "GET", nil) + checkSchemaResponseCode, body, err := performRequest(c, checkSchemaRequest) + if err != nil { + panic(errors.Wrap(err, "perform request")) + } + if checkSchemaResponseCode != 200 { + return + } + + var dump models.Schema + if err := json.Unmarshal(body, &dump); err != nil { + panic(errors.Wrap(err, "Could not unmarshal read response")) + } + for _, classObj := range dump.Classes { + requestDelete := createRequest(url+"schema/"+classObj.Class, "DELETE", nil) + responseDeleteCode, _, err := performRequest(c, requestDelete) + if err != nil { + panic(errors.Wrap(err, "Could delete schema")) + } + if responseDeleteCode != 200 { + panic(fmt.Sprintf("Could not delete schema, code: %v", responseDeleteCode)) + } + } +} + +func createHttpClient() *http.Client { + httpT := &http.Transport{ + Proxy: http.ProxyFromEnvironment, + DialContext: (&net.Dialer{ + Timeout: 500 * time.Second, + KeepAlive: 120 * time.Second, + }).DialContext, + MaxIdleConnsPerHost: 100, + MaxIdleConns: 100, + IdleConnTimeout: 90 * time.Second, + TLSHandshakeTimeout: 10 * time.Second, + ExpectContinueTimeout: 1 * time.Second, + } + return &http.Client{Transport: httpT} +} + +// createRequest creates requests +func createRequest(url string, method string, payload interface{}) *http.Request { + var body io.Reader = nil + if payload != nil { + jsonBody, err := json.Marshal(payload) + if err != nil { + panic(errors.Wrap(err, "Could not marshal request")) + } + body = bytes.NewBuffer(jsonBody) + } + request, err := http.NewRequest(method, url, body) + if err != nil { + panic(errors.Wrap(err, "Could not create request")) + } + request.Header.Add("Content-Type", "application/json") + request.Header.Add("Accept", "application/json") + + return request +} + +// performRequest runs requests +func performRequest(c *http.Client, request *http.Request) (int, []byte, error) { + for { + response, err := c.Do(request) + if err != nil { + return 0, nil, err + } + + body, err := io.ReadAll(response.Body) + response.Body.Close() + if err != nil { + return 0, nil, err + } + + if response.StatusCode == 200 { + return response.StatusCode, body, nil + } + time.Sleep(time.Millisecond * 10) + var result map[string]interface{} + json.Unmarshal(body, &result) + message := result["error"].([]interface{})[0].(map[string]interface{})["message"].(string) + + if strings.Contains(message, "concurrent transaction") { + time.Sleep(time.Millisecond * 10) + continue + } + return response.StatusCode, body, nil + } +} + +func createSchemaRequest(url string, class string, multiTenantcy bool) *http.Request { + classObj := &models.Class{ + Class: class, + Description: "Dummy class for benchmarking purposes", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: multiTenantcy, + }, + Properties: []*models.Property{ + { + DataType: []string{"int"}, + Description: "The value of the counter in the dataset", + Name: "counter", + }, + { + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + Description: "The value of the counter in the dataset", + Name: "name", + }, + }, + } + request := createRequest(url+"schema", "POST", classObj) + return request +} + +func createObject(class string) []*models.Object { + objects := []*models.Object{ + { + Class: class, + ID: strfmt.UUID(uuid.New().String()), + Vector: models.C11yVector([]float32{1.0, 2, 534, 324, 0.0001}), + Properties: map[string]interface{}{ + "counter": 50, + "counter2": 45, + "something": "JustSlammedMyKeyboardahudghoig", + }, + }, + } + return objects +} + +func createBatch(class string, batchSize int, tenants []models.Tenant) []*models.Object { + objects := make([]*models.Object, 0, batchSize) + for i := 0; i < batchSize; i++ { + objects = append(objects, &models.Object{ + Class: class, + ID: strfmt.UUID(uuid.New().String()), + Properties: map[string]interface{}{ + "counter": i, + "name": tenants[i%len(tenants)].Name, + }, + Tenant: tenants[i%len(tenants)].Name, + }) + } + return objects +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_lsmkv/data_integrity_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance_lsmkv/data_integrity_test.go new file mode 100644 index 0000000000000000000000000000000000000000..602dced1efe231e6bedfee69b088daed639c7a33 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_lsmkv/data_integrity_test.go @@ -0,0 +1,105 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package main + +import ( + "bytes" + "context" + "fmt" + "os" + "path" + "testing" + + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/adapters/repos/db/lsmkv" + "github.com/weaviate/weaviate/entities/cyclemanager" +) + +func TestLSMKV_ChecksumRoundtrip(t *testing.T) { + for _, enableChecksumsInitially := range []bool{true, false} { + t.Run(fmt.Sprintf("enableChecksumsInitially=%v", enableChecksumsInitially), func(t *testing.T) { + var ( + key = []byte("primary_key") + val = []byte("some_value") + dataDir = t.TempDir() + ) + + bucket, err := newTestBucket(dataDir, enableChecksumsInitially) + require.NoError(t, err) + + require.NoError(t, bucket.Put(key, val)) + + // verify that you can read the value + res, err := bucket.Get(key) + require.NoError(t, err) + require.Equal(t, val, res) + + // flush the segment to disk + require.NoError(t, bucket.Shutdown(context.Background())) + + // verify that you can boostrap from the data on disk when checksums are enabled + bucket, err = newTestBucket(dataDir, true) + require.NoError(t, err) + + res, err = bucket.Get(key) + require.Nil(t, err) + require.Equal(t, val, res) + + require.NoError(t, bucket.Shutdown(context.Background())) + }) + } +} + +func TestLSMKV_ChecksumsCatchCorruptedFiles(t *testing.T) { + var ( + key = []byte("primary_key") + val = []byte("some_value") + dataDir = t.TempDir() + ) + + // create a bucket with checksums enabled and flush some data to disk + bucket, err := newTestBucket(dataDir, true) + require.NoError(t, err) + require.NoError(t, bucket.Put(key, val)) + require.NoError(t, bucket.FlushAndSwitch()) + require.NoError(t, bucket.Shutdown(context.Background())) + + entries, err := os.ReadDir(dataDir) + require.NoError(t, err) + require.Len(t, entries, 3, "segment files should be created") + + segmentPath := path.Join(dataDir, entries[2].Name()) + fileContent, err := os.ReadFile(segmentPath) + require.NoError(t, err) + + valueOffset := bytes.Index(fileContent, val) + require.NotEqual(t, -1, valueOffset, "value was not find in the segment file") + + // corrupt the file contents + fileContent[valueOffset] = 0xFF + require.NoError(t, os.WriteFile(segmentPath, fileContent, os.ModePerm)) + + _, err = newTestBucket(dataDir, true) + require.ErrorContains(t, err, "invalid checksum") +} + +func newTestBucket(dataPath string, checkSumEnabled bool) (*lsmkv.Bucket, error) { + log, _ := test.NewNullLogger() + return lsmkv.NewBucketCreator(). + NewBucket(context.Background(), dataPath, "", log, nil, + cyclemanager.NewCallbackGroupNoop(), + cyclemanager.NewCallbackGroupNoop(), + lsmkv.WithSegmentsChecksumValidationEnabled(checkSumEnabled), + lsmkv.WithCalcCountNetAdditions(true), + ) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_lsmkv/replace_bucket_acceptance_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance_lsmkv/replace_bucket_acceptance_test.go new file mode 100644 index 0000000000000000000000000000000000000000..3d7b88e1e8b209d01636b3ee7ea43666698ec4a1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_lsmkv/replace_bucket_acceptance_test.go @@ -0,0 +1,214 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package main + +import ( + "context" + "fmt" + "math/rand" + "path/filepath" + "runtime" + "sync" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/adapters/repos/db/lsmkv" + "github.com/weaviate/weaviate/adapters/repos/db/priorityqueue" + "github.com/weaviate/weaviate/entities/cyclemanager" +) + +func TestLSMKV_ReplaceBucket(t *testing.T) { + putThreshold := 100 * time.Millisecond + getThreshold := 100 * time.Millisecond + trackWorstQueries := 10 + workers := 4 + + dir := t.TempDir() + ctx := context.Background() + c := lsmkv.NewBucketCreator() + + logger := logrus.New() + logger.SetLevel(logrus.DebugLevel) + + if n := runtime.GOMAXPROCS(0); n < workers { + workers = n + logger.Infof("reducing workers to %d", workers) + } + + flushCallbacks := cyclemanager.NewCallbackGroup("flush", logger, 1) + compactionCallbacks := cyclemanager.NewCallbackGroup("compaction", logger, 1) + flushCycle := cyclemanager.NewManager(cyclemanager.MemtableFlushCycleTicker(), flushCallbacks.CycleCallback, logger) + flushCycle.Start() + compactionCycle := cyclemanager.NewManager(cyclemanager.CompactionCycleTicker(), compactionCallbacks.CycleCallback, logger) + compactionCycle.Start() + + bucket, err := c.NewBucket(ctx, filepath.Join(dir, "my-bucket"), "", logger, nil, + compactionCallbacks, flushCallbacks, + lsmkv.WithPread(true), + lsmkv.WithDynamicMemtableSizing(1, 2, 1, 4), + ) + if err != nil { + panic(err) + } + + defer bucket.Shutdown(ctx) + + ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) + defer cancel() + + results := make([]result, workers) + wg := sync.WaitGroup{} + for workerID := 0; workerID < workers; workerID++ { + wg.Add(1) + go worker(ctx, t, &wg, workerID, bucket, logger, putThreshold, getThreshold, trackWorstQueries, results) + } + + wg.Wait() + logger.WithField("concurrency", workers).Infof("%d workers completed", workers) + + var putOutsideThreshold []float32 + var getOutsideThreshold []float32 + + totalIngested := 0 + totalSpotChecks := 0 + + for _, r := range results { + totalIngested += r.ingested + totalSpotChecks += r.getSpotChecks + + for r.worstPutQueries.Len() > 0 { + tookMs := r.worstPutQueries.Pop().Dist * 1000 + if tookMs > float32(putThreshold.Milliseconds()) { + putOutsideThreshold = append(putOutsideThreshold, tookMs) + } + } + + for r.worstGetQueries.Len() > 0 { + tookMs := r.worstGetQueries.Pop().Dist * 1000 + if tookMs > float32(getThreshold.Milliseconds()) { + getOutsideThreshold = append(getOutsideThreshold, tookMs) + } + } + } + + if len(putOutsideThreshold) > 0 { + t.Errorf("%d put queries outside threshold (%s): %v", len(putOutsideThreshold), putThreshold, putOutsideThreshold) + } else { + logger.Infof("all put queries were within threshold (%s)", putThreshold) + } + + if len(getOutsideThreshold) > 0 { + t.Errorf("%d get queries outside threshold (%s) : %v", len(getOutsideThreshold), getThreshold, getOutsideThreshold) + } else { + logger.Infof("all get queries were within threshold (%s)", getThreshold) + } + + // This a sanity check to make sure the test actually ran. The expected total + // is a lot more, but if the test were to just block for 60s and do nothing, + // this sanity check should catch it. + if totalIngested < 500_000 { + t.Errorf("expected at least 500k entries but got %d", totalIngested) + } else { + logger.Infof("ingested %d entries", totalIngested) + } + if totalSpotChecks < 250_000 { + t.Errorf("expected at least 250k spot checks but got %d", totalSpotChecks) + } else { + logger.Infof("performed %d spot checks", totalSpotChecks) + } +} + +type result struct { + workerID int + worstPutQueries *priorityqueue.Queue[float32] + worstGetQueries *priorityqueue.Queue[float32] + ingested int + getSpotChecks int +} + +func worker(ctx context.Context, t *testing.T, wg *sync.WaitGroup, workerID int, bucket *lsmkv.Bucket, logger logrus.FieldLogger, + putThreshold time.Duration, getThreshold time.Duration, trackWorstQueries int, results []result, +) { + defer wg.Done() + + logger = logger.WithField("worker_id", workerID) + worstPutQueries := priorityqueue.NewMin[float32](trackWorstQueries) + worstGetQueries := priorityqueue.NewMin[float32](trackWorstQueries) + + i := 0 + totalAsserted := 0 + for { + if ctx.Err() != nil { + break + } + before := time.Now() + bucket.Put([]byte(fmt.Sprintf("worker-%d-key-%d", workerID, i)), []byte(fmt.Sprintf("value-%d", i))) + took := time.Since(before) + trackWorstQuery(worstPutQueries, i, took, trackWorstQueries) + if took > putThreshold { + logger.Warnf("put took too long: %s", time.Since(before)) + } + + // perform spot checks every 10000 iterations + // the spot checks pick a random increment between 0 and 100 between ids + if i > 100 && i%10000 == 0 { + j := 0 + for j < i { + before := time.Now() + val, err := bucket.Get([]byte(fmt.Sprintf("worker-%d-key-%d", workerID, j))) + if err != nil { + t.Errorf("failed to get key-%d: %s", j, err) + return + } + took := time.Since(before) + if took > getThreshold { + logger.Warnf("get took too long: %s", time.Since(before)) + } + + if string(val) != fmt.Sprintf("value-%d", j) { + t.Errorf("expected value-%d but got %s", j, val) + } + + trackWorstQuery(worstGetQueries, j, took, trackWorstQueries) + + totalAsserted++ + j += rand.Intn(100) + } + } + + if i%100_000 == 0 { + logger.WithField("current_id", i).Infof("worker %d inserted %d entries", workerID, i) + } + + i++ + } + + results[workerID] = result{ + workerID: workerID, + worstPutQueries: worstPutQueries, + worstGetQueries: worstGetQueries, + ingested: i, + getSpotChecks: totalAsserted, + } + + logger.WithField("imported", i).WithField("get_spot_checks", totalAsserted).Infof("completed worker") +} + +func trackWorstQuery(heap *priorityqueue.Queue[float32], i int, took time.Duration, trackWorstQueries int) { + if heap.Len() < trackWorstQueries { + heap.Insert(uint64(i), float32(took.Seconds())) + } else if heap.Top().Dist < float32(took.Seconds()) { + heap.Pop() + heap.Insert(uint64(i), float32(took.Seconds())) + } +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_lsmkv_long_running/.gitignore b/platform/dbops/binaries/weaviate-src/test/acceptance_lsmkv_long_running/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..8fce603003c1e5857013afec915ace9fc8bcdb8d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_lsmkv_long_running/.gitignore @@ -0,0 +1 @@ +data/ diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_lsmkv_long_running/replace_bucket_acceptance_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance_lsmkv_long_running/replace_bucket_acceptance_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ed39f9834e81178d0e7358cdcc67438c6417ed0f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_lsmkv_long_running/replace_bucket_acceptance_test.go @@ -0,0 +1,286 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +//go:build manual +// +build manual + +package main + +import ( + "context" + "fmt" + "math/rand" + "path/filepath" + "runtime" + "sync" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/adapters/repos/db/lsmkv" + "github.com/weaviate/weaviate/adapters/repos/db/priorityqueue" + "github.com/weaviate/weaviate/entities/cyclemanager" +) + +func TestLSMKV_ReplaceBucket(t *testing.T) { + putThreshold := 1000 * time.Millisecond + getThreshold := 500 * time.Millisecond + + writeDuration := time.Minute + readDuration := time.Minute + + trackWorstQueries := 10 + workers := 3 + + dir := "./data" + ctx := context.Background() + c := lsmkv.NewBucketCreator() + + logger := logrus.New() + logger.SetLevel(logrus.DebugLevel) + + if n := runtime.GOMAXPROCS(0); n < workers { + workers = n + logger.Infof("reducing workers to %d", workers) + } + + flushCallbacks := cyclemanager.NewCallbackGroup("flush", logger, 1) + compactionCallbacks := cyclemanager.NewCallbackGroup("compaction", logger, 1) + flushCycle := cyclemanager.NewManager(cyclemanager.MemtableFlushCycleTicker(), flushCallbacks.CycleCallback, logger) + flushCycle.Start() + compactionCycle := cyclemanager.NewManager(cyclemanager.CompactionCycleTicker(), compactionCallbacks.CycleCallback, logger) + compactionCycle.Start() + + logger.Info("loading bucket") + bucket, err := c.NewBucket(ctx, filepath.Join(dir, "my-bucket"), "", logger, nil, + compactionCallbacks, flushCallbacks, + lsmkv.WithPread(true), + ) + if err != nil { + panic(err) + } + + logger.Info("bucket is ready") + + defer bucket.Shutdown(ctx) + + ctx, cancel := context.WithTimeout(context.Background(), 15*time.Minute) + defer cancel() + + results := make([]result, workers) + wg := sync.WaitGroup{} + + mode := newMode(logger, writeDuration, readDuration) + + for workerID := 0; workerID < workers; workerID++ { + wg.Add(1) + go worker(ctx, t, mode, &wg, workerID, bucket, logger, putThreshold, getThreshold, trackWorstQueries, results) + } + + modeCtx, cancelMode := context.WithCancel(context.Background()) + go mode.alternate(modeCtx) + + wg.Wait() + cancelMode() + + logger.WithField("concurrency", workers).Infof("%d workers completed", workers) + + var putOutsideThreshold []float32 + var getOutsideThreshold []float32 + + totalIngested := 0 + totalSpotChecks := 0 + + for _, r := range results { + totalIngested += r.ingested + totalSpotChecks += r.getSpotChecks + + for r.worstPutQueries.Len() > 0 { + tookMs := r.worstPutQueries.Pop().Dist * 1000 + if tookMs > float32(putThreshold.Milliseconds()) { + putOutsideThreshold = append(putOutsideThreshold, tookMs) + } + } + + for r.worstGetQueries.Len() > 0 { + tookMs := r.worstGetQueries.Pop().Dist * 1000 + if tookMs > float32(getThreshold.Milliseconds()) { + getOutsideThreshold = append(getOutsideThreshold, tookMs) + } + } + } + + if len(putOutsideThreshold) > 0 { + t.Errorf("%d put queries outside threshold (%s): %v", len(putOutsideThreshold), putThreshold, putOutsideThreshold) + } else { + logger.Infof("all put queries were within threshold (%s)", putThreshold) + } + + if len(getOutsideThreshold) > 0 { + t.Errorf("%d get queries outside threshold (%s) : %v", len(getOutsideThreshold), getThreshold, getOutsideThreshold) + } else { + logger.Infof("all get queries were within threshold (%s)", getThreshold) + } + + // This a sanity check to make sure the test actually ran. The expected total + // is a lot more, but if the test were to just block for 60s and do nothing, + // this sanity check should catch it. + if totalIngested < 500_000 { + t.Errorf("expected at least 500k entries but got %d", totalIngested) + } else { + logger.Infof("ingested %d entries", totalIngested) + } + if totalSpotChecks < 250_000 { + t.Errorf("expected at least 250k spot checks but got %d", totalSpotChecks) + } else { + logger.Infof("performed %d spot checks", totalSpotChecks) + } +} + +type result struct { + workerID int + worstPutQueries *priorityqueue.Queue[float32] + worstGetQueries *priorityqueue.Queue[float32] + ingested int + getSpotChecks int +} + +type mode struct { + mu sync.Mutex + write bool + writeDuration time.Duration + readDuration time.Duration + logger logrus.FieldLogger +} + +func newMode(logger logrus.FieldLogger, writeDuration, readDuration time.Duration) *mode { + return &mode{ + write: true, + writeDuration: writeDuration, + readDuration: readDuration, + logger: logger, + } +} + +func (m *mode) setWrite() { + m.mu.Lock() + m.write = true + m.mu.Unlock() + m.logger.Info("switched to WRITE mode") +} + +func (m *mode) setRead() { + m.mu.Lock() + m.write = false + m.mu.Unlock() + m.logger.Info("switched to READ mode") +} + +func (m *mode) isWrite() bool { + m.mu.Lock() + defer m.mu.Unlock() + return m.write +} + +func (m *mode) alternate(ctx context.Context) { + for { + if ctx.Err() != nil { + return + } + + if m.isWrite() { + time.Sleep(m.writeDuration) + m.setRead() + } else { + time.Sleep(m.readDuration) + m.setWrite() + } + } +} + +func worker(ctx context.Context, t *testing.T, mode *mode, wg *sync.WaitGroup, workerID int, bucket *lsmkv.Bucket, logger logrus.FieldLogger, + putThreshold time.Duration, getThreshold time.Duration, trackWorstQueries int, results []result, +) { + defer wg.Done() + + logger = logger.WithField("worker_id", workerID) + worstPutQueries := priorityqueue.NewMin[float32](trackWorstQueries) + worstGetQueries := priorityqueue.NewMin[float32](trackWorstQueries) + + i := 0 + totalAsserted := 0 + for { + if ctx.Err() != nil { + break + } + + if mode.isWrite() { + before := time.Now() + bucket.Put([]byte(fmt.Sprintf("worker-%d-key-%d", workerID, i)), []byte(fmt.Sprintf("value-%d", i))) + took := time.Since(before) + trackWorstQuery(worstPutQueries, i, took, trackWorstQueries) + if took > putThreshold { + logger.Warnf("put took too long: %s", time.Since(before)) + } + + if i%100_000 == 0 { + logger.WithField("current_id", i).Infof("worker %d inserted %d entries", workerID, i) + } + + i++ + continue + } + + // read mode + j := 0 + for j < i { + before := time.Now() + val, err := bucket.Get([]byte(fmt.Sprintf("worker-%d-key-%d", workerID, j))) + if err != nil { + t.Errorf("failed to get key-%d: %s", j, err) + return + } + took := time.Since(before) + if took > getThreshold { + logger.Warnf("get took too long: %s", time.Since(before)) + } + + if string(val) != fmt.Sprintf("value-%d", j) { + t.Errorf("expected value-%d but got %s", j, val) + } + + trackWorstQuery(worstGetQueries, j, took, trackWorstQueries) + + totalAsserted++ + j += rand.Intn(100) + } + + } + + results[workerID] = result{ + workerID: workerID, + worstPutQueries: worstPutQueries, + worstGetQueries: worstGetQueries, + ingested: i, + getSpotChecks: totalAsserted, + } + + logger.WithField("imported", i).WithField("get_spot_checks", totalAsserted).Infof("completed worker") +} + +func trackWorstQuery(heap *priorityqueue.Queue[float32], i int, took time.Duration, trackWorstQueries int) { + if heap.Len() < trackWorstQueries { + heap.Insert(uint64(i), float32(took.Seconds())) + } else if heap.Top().Dist < float32(took.Seconds()) { + heap.Pop() + heap.Insert(uint64(i), float32(took.Seconds())) + } +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/autoschema_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/autoschema_test.go new file mode 100644 index 0000000000000000000000000000000000000000..5301555070d993dd547475395f11d365783a0f66 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/autoschema_test.go @@ -0,0 +1,301 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package acceptance_with_go_client + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + client "github.com/weaviate/weaviate-go-client/v5/weaviate" + "github.com/weaviate/weaviate-go-client/v5/weaviate/graphql" + "github.com/weaviate/weaviate/entities/models" +) + +func TestAutoschemaCasingClass(t *testing.T) { + ctx := context.Background() + c, err := client.NewClient(client.Config{Scheme: "http", Host: "localhost:8080"}) + require.Nil(t, err) + + upperClassName := "RandomBlueTree" + lowerClassName := "randomBlueTree" + + cases := []struct { + className1 string + className2 string + }{ + {className1: upperClassName, className2: upperClassName}, + {className1: lowerClassName, className2: lowerClassName}, + {className1: upperClassName, className2: lowerClassName}, + {className1: lowerClassName, className2: upperClassName}, + } + for _, tt := range cases { + t.Run(tt.className1+" "+tt.className2, func(t *testing.T) { + c.Schema().ClassDeleter().WithClassName(tt.className1).Do(ctx) + c.Schema().ClassDeleter().WithClassName(tt.className2).Do(ctx) + creator := c.Data().Creator() + _, err := creator.WithClassName(tt.className1).Do(ctx) + require.Nil(t, err) + + _, err = creator.WithClassName(tt.className2).Do(ctx) + require.Nil(t, err) + + // Regardless of whether a class exists or not, the delete operation will always return a success + require.Nil(t, c.Schema().ClassDeleter().WithClassName(upperClassName).Do(ctx)) + require.Nil(t, c.Schema().ClassDeleter().WithClassName(lowerClassName).Do(ctx)) + }) + } +} + +func TestAutoschemaCasingProps(t *testing.T) { + ctx := context.Background() + c, err := client.NewClient(client.Config{Scheme: "http", Host: "localhost:8080"}) + require.Nil(t, err) + + className := "RandomGreenBike" + + upperPropName := "SomeProp" + lowerPropName := "someProp" + cases := []struct { + prop1 string + prop2 string + }{ + {prop1: upperPropName, prop2: upperPropName}, + {prop1: lowerPropName, prop2: lowerPropName}, + {prop1: upperPropName, prop2: lowerPropName}, + {prop1: lowerPropName, prop2: upperPropName}, + } + for _, tt := range cases { + t.Run(tt.prop1+" "+tt.prop2, func(t *testing.T) { + c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + creator := c.Data().Creator() + _, err := creator.WithClassName(className).Do(ctx) + require.Nil(t, err) + + creator1 := c.Data().Creator() + _, err = creator1.WithClassName(className).WithProperties(map[string]string{tt.prop1: "something"}).Do(ctx) + require.Nil(t, err) + + creator2 := c.Data().Creator() + _, err = creator2.WithClassName(className).WithProperties(map[string]string{tt.prop2: "other value"}).Do(ctx) + require.Nil(t, err) + + // three objects should have been added + result, err := c.GraphQL().Aggregate().WithClassName(className).WithFields(graphql.Field{ + Name: "meta", Fields: []graphql.Field{ + {Name: "count"}, + }, + }).Do(ctx) + require.Nil(t, err) + require.Equal(t, result.Data["Aggregate"].(map[string]interface{})[className].([]interface{})[0].(map[string]interface{})["meta"].(map[string]interface{})["count"], 3.) + + require.Nil(t, c.Schema().ClassDeleter().WithClassName(className).Do(ctx)) + }) + } +} + +func TestAutoschemaCasingUpdateProps(t *testing.T) { + ctx := context.Background() + c, err := client.NewClient(client.Config{Scheme: "http", Host: "localhost:8080"}) + require.Nil(t, err) + + objId := "67b79643-cf8b-4b22-b206-6e63dbb4e57a" + upperPropName := "SomeProp" + lowerPropName := "someProp" + cases := []struct { + prop1 string + prop2 string + }{ + {prop1: upperPropName, prop2: upperPropName}, + {prop1: lowerPropName, prop2: lowerPropName}, + {prop1: upperPropName, prop2: lowerPropName}, + {prop1: lowerPropName, prop2: upperPropName}, + } + for _, tt := range cases { + t.Run(tt.prop1+" "+tt.prop2, func(t *testing.T) { + className := "RandomOliveTree" + c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + creator := c.Data().Creator() + _, err := creator.WithClassName(className).Do(ctx) + require.Nil(t, err) + + creator1 := c.Data().Creator() + _, err = creator1.WithClassName(className).WithID(objId).WithProperties(map[string]string{tt.prop1: "something"}).Do(ctx) + require.Nil(t, err) + + updater := c.Data().Updater() + err = updater.WithClassName(className).WithID(objId).WithProperties(map[string]string{tt.prop2: "other"}).Do(ctx) + require.Nil(t, err) + + // two objects should have been added (with one update + result, err := c.GraphQL().Aggregate().WithClassName(className).WithFields(graphql.Field{ + Name: "meta", Fields: []graphql.Field{ + {Name: "count"}, + }, + }).Do(ctx) + require.Nil(t, err) + require.Equal(t, result.Data["Aggregate"].(map[string]interface{})[className].([]interface{})[0].(map[string]interface{})["meta"].(map[string]interface{})["count"], 2.) + + require.Nil(t, c.Schema().ClassDeleter().WithClassName(className).Do(ctx)) + }) + } +} + +func TestAutoschemaPanicOnUnregonizedDataType(t *testing.T) { + ctx := context.Background() + c, err := client.NewClient(client.Config{Scheme: "http", Host: "localhost:8080"}) + require.Nil(t, err) + + tests := []struct { + name string + properties map[string]interface{} + containsErrMessage string + }{ + { + name: "unrecognized array property type", + properties: map[string]interface{}{ + "panicProperty": []interface{}{ + []interface{}{ + []interface{}{ + "panic", + }, + }, + }, + }, + containsErrMessage: "property 'panicProperty' on class 'BeautifulWeather': element [0]: unrecognized data type of value", + }, + { + name: "unrecognized nil array property type", + properties: map[string]interface{}{ + "panicProperty": []interface{}{ + []interface{}{ + []interface{}{ + nil, + }, + }, + }, + }, + containsErrMessage: "property 'panicProperty' on class 'BeautifulWeather': element [0]: unrecognized data type of value", + }, + { + name: "array property with nil", + properties: map[string]interface{}{ + "nilPropertyArray": []interface{}{nil}, + }, + containsErrMessage: "property 'nilPropertyArray' on class 'BeautifulWeather': element [0]: unrecognized data type of value ''", + }, + { + name: "empty string array property", + properties: map[string]interface{}{ + "emptyPropertyArray": []string{}, + }, + }, + { + name: "empty interface array property", + properties: map[string]interface{}{ + "emptyPropertyArray": []interface{}{}, + }, + }, + { + name: "empty int array property", + properties: map[string]interface{}{ + "emptyPropertyArray": []int{}, + }, + }, + { + name: "array property with empty string", + properties: map[string]interface{}{ + "emptyPropertyArray": []string{""}, + }, + }, + { + name: "nil property", + properties: map[string]interface{}{ + "nilProperty": nil, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + resp, err := c.Data(). + Creator(). + WithClassName("BeautifulWeather"). + WithProperties(tt.properties). + Do(ctx) + + if tt.containsErrMessage != "" { + assert.Nil(t, resp) + assert.NotNil(t, err) + assert.ErrorContains(t, err, tt.containsErrMessage) + } else { + assert.NotNil(t, resp) + assert.Nil(t, err) + } + + err = c.Schema().ClassDeleter().WithClassName("BeautifulWeather").Do(ctx) + require.Nil(t, err) + }) + } +} + +func TestAutoschemaPanicOnUnregonizedDataTypeWithBatch(t *testing.T) { + ctx := context.Background() + c, err := client.NewClient(client.Config{Scheme: "http", Host: "localhost:8080"}) + require.Nil(t, err) + + className := "Passage" + t.Run("should not panic with properties defined as empty array, but just return error", func(t *testing.T) { + obj := &models.Object{ + Class: className, + Properties: []interface{}{}, + } + + resp, err := c.Batch().ObjectsBatcher().WithObjects(obj).Do(ctx) + require.Nil(t, err) + require.Len(t, resp, 1) + require.NotNil(t, resp[0].Result) + require.NotNil(t, resp[0].Result.Errors) + require.Len(t, resp[0].Result.Errors.Error, 1) + assert.Equal(t, "could not recognize object's properties: []", resp[0].Result.Errors.Error[0].Message) + + objs, err := c.Data().ObjectsGetter().WithClassName(className).Do(ctx) + require.Nil(t, err) + require.Len(t, objs, 0) + + err = c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + require.Nil(t, err) + }) + + t.Run("should create object in batch without problems", func(t *testing.T) { + obj := &models.Object{ + Class: className, + Properties: map[string]interface{}{ + "stringProperty": "value", + }, + } + resp, err := c.Batch().ObjectsBatcher().WithObjects(obj).Do(ctx) + require.Nil(t, err) + require.Len(t, resp, 1) + require.NotNil(t, resp[0].Result) + require.Nil(t, resp[0].Result.Errors) + require.NotNil(t, resp[0].Object) + assert.True(t, len(resp[0].Object.Vector) > 0) + + objs, err := c.Data().ObjectsGetter().WithClassName(className).Do(ctx) + require.Nil(t, err) + require.Len(t, objs, 1) + + err = c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + require.Nil(t, err) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/batch_ref_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/batch_ref_test.go new file mode 100644 index 0000000000000000000000000000000000000000..2b0ca539e025da82c61ba79089023fbd6c6cf0e9 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/batch_ref_test.go @@ -0,0 +1,134 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package acceptance_with_go_client + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + wvt "github.com/weaviate/weaviate-go-client/v5/weaviate" + "github.com/weaviate/weaviate/entities/models" +) + +const ( + UUID1 = "10523cdd-15a2-42f4-81fa-267fe92f7cd6" + UUID2 = "5b6a08ba-1d46-43aa-89cc-8b070790c6f2" +) + +func TestBatchReferenceCreateNoObjects(t *testing.T) { + client, err := wvt.NewClient(wvt.Config{Scheme: "http", Host: "localhost:8080"}) + require.Nil(t, err) + + classNameFrom := "GreenTeddyFlowerFrom" + classNameTo := "GreenTeddyFlowerTo" + + // delete class if exists and cleanup after test + client.Schema().ClassDeleter().WithClassName(classNameFrom).Do(ctx) + defer client.Schema().ClassDeleter().WithClassName(classNameFrom).Do(ctx) + client.Schema().ClassDeleter().WithClassName(classNameTo).Do(ctx) + defer client.Schema().ClassDeleter().WithClassName(classNameTo).Do(ctx) + + classTo := &models.Class{Class: classNameTo, Vectorizer: "none"} + require.Nil(t, client.Schema().ClassCreator().WithClass(classTo).Do(ctx)) + + classFrom := &models.Class{ + Class: classNameFrom, + Properties: []*models.Property{ + {Name: "ref", DataType: []string{classNameTo}}, + }, + Vectorizer: "none", + } + require.Nil(t, client.Schema().ClassCreator().WithClass(classFrom).Do(ctx)) + + // no objects exist, ref must fail - note that we tolerate if the target does not exist, however the source must exist + rpb := client.Batch().ReferencePayloadBuilder(). + WithFromClassName(classNameFrom). + WithFromRefProp("ref"). + WithFromID(UUID1). // uuids dont matter as we havent added any objects + WithToClassName(classNameTo). + WithToID(UUID2) + references := []*models.BatchReference{rpb.Payload()} + + resp, err := client.Batch().ReferencesBatcher(). + WithReferences(references...). + Do(context.Background()) + require.Nil(t, err) + require.NotNil(t, resp) + assert.Len(t, resp, len(references)) + for i := range resp { + require.NotNil(t, resp[i].Result) + require.NotNil(t, resp[i].Result.Status) + assert.Equal(t, "FAILED", *resp[i].Result.Status) + assert.NotNil(t, resp[i].Result.Errors) + } +} + +func TestBatchReferenceTargetIsMT(t *testing.T) { + client, err := wvt.NewClient(wvt.Config{Scheme: "http", Host: "localhost:8080"}) + require.Nil(t, err) + + classNameFrom := "RedTeddyFlowerFrom" + classNameTo := "RedTeddyFlowerTo" + + // delete class if exists and cleanup after test + client.Schema().ClassDeleter().WithClassName(classNameFrom).Do(ctx) + defer client.Schema().ClassDeleter().WithClassName(classNameFrom).Do(ctx) + client.Schema().ClassDeleter().WithClassName(classNameTo).Do(ctx) + defer client.Schema().ClassDeleter().WithClassName(classNameTo).Do(ctx) + + classTo := &models.Class{Class: classNameTo, Vectorizer: "none", MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }} + require.Nil(t, client.Schema().ClassCreator().WithClass(classTo).Do(ctx)) + require.Nil(t, client.Schema().TenantsCreator(). + WithClassName(classNameTo). + WithTenants(models.Tenant{Name: "Tenant"}). + Do(context.Background())) + + require.Nil(t, err) + classFrom := &models.Class{ + Class: classNameFrom, + Properties: []*models.Property{ + {Name: "ref", DataType: []string{classNameTo}}, + }, + Vectorizer: "none", + } + require.Nil(t, client.Schema().ClassCreator().WithClass(classFrom).Do(ctx)) + + // add object to target and source class + _, err = client.Data().Creator().WithClassName(classNameTo).WithID(UUID1).WithTenant("Tenant").WithProperties(map[string]interface{}{}).Do(ctx) + require.Nil(t, err) + _, err = client.Data().Creator().WithClassName(classNameFrom).WithID(UUID2).WithProperties(map[string]interface{}{}).Do(ctx) + require.Nil(t, err) + + rpb := client.Batch().ReferencePayloadBuilder(). + WithFromClassName(classNameFrom). + WithFromRefProp("ref"). + WithFromID(UUID2). + WithToID(UUID1) // no to class supplied, will be auto-detected + references := []*models.BatchReference{rpb.Payload()} + + resp, err := client.Batch().ReferencesBatcher(). + WithReferences(references...). + Do(context.Background()) + require.Nil(t, err) + require.NotNil(t, resp) + assert.Len(t, resp, len(references)) + for i := range resp { + require.NotNil(t, resp[i].Result) + require.NotNil(t, resp[i].Result.Status) + assert.Equal(t, "FAILED", *resp[i].Result.Status) + assert.NotNil(t, resp[i].Result.Errors) + } +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/endpoint_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/endpoint_test.go new file mode 100644 index 0000000000000000000000000000000000000000..b3cb0338af869af08828a233606939584874a44a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/endpoint_test.go @@ -0,0 +1,172 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package acceptance_with_go_client + +import ( + "context" + "testing" + + "github.com/weaviate/weaviate-go-client/v5/weaviate/filters" + "github.com/weaviate/weaviate-go-client/v5/weaviate/graphql" + + "github.com/stretchr/testify/require" + client "github.com/weaviate/weaviate-go-client/v5/weaviate" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +func TestUpdatingPropertiesWithNil(t *testing.T) { + ctx := context.Background() + c, err := client.NewClient(client.Config{Scheme: "http", Host: "localhost:8080"}) + require.Nil(t, err) + + className := "RandomPinkFlower" + upperPropName := "SomeProp" + lowerPropName := "someProp" + cases := []struct { + prop1 string + prop2 string + }{ + {prop1: upperPropName, prop2: upperPropName}, + {prop1: lowerPropName, prop2: lowerPropName}, + {prop1: upperPropName, prop2: lowerPropName}, + {prop1: lowerPropName, prop2: upperPropName}, + } + for _, tt := range cases { + t.Run(tt.prop1+" "+tt.prop2, func(t *testing.T) { + c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + defer c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + classCreator := c.Schema().ClassCreator() + class := models.Class{ + Class: className, + Properties: []*models.Property{{ + Name: tt.prop1, + // TODO change to method call + DataType: []string{string(schema.DataTypeText)}, + // TODO change to constant + Tokenization: "whitespace", + }}, + InvertedIndexConfig: &models.InvertedIndexConfig{IndexNullState: true, UsingBlockMaxWAND: config.DefaultUsingBlockMaxWAND}, + } + require.Nil(t, classCreator.WithClass(&class).Do(ctx)) + + _, err := c.Data().Creator().WithClassName(className).WithProperties( + map[string]interface{}{tt.prop1: "SomeText"}, + ).WithID(UUID1).Do(ctx) + require.Nil(t, err) + + require.Nil(t, c.Data().Updater().WithClassName(className).WithProperties(map[string]interface{}{tt.prop2: nil}).WithID(UUID1).WithMerge().Do(ctx)) + + // update should have cleared the object + getter := c.Data().ObjectsGetter() + objAfterUpdate, err := getter.WithID(UUID1).WithClassName(className).Do(ctx) + require.Nil(t, err) + require.Len(t, objAfterUpdate[0].Properties, 0) + + // test that II has been updated: + // a) no results for when filtering for old value + // b) one result when filtering for null values + filter := filters.Where() + filter.WithValueString("SomeText") + filter.WithOperator(filters.Equal) + filter.WithPath([]string{lowerPropName}) + resultFilter, err := c.GraphQL().Get().WithClassName(className).WithWhere(filter).WithFields(graphql.Field{Name: "_additional", Fields: []graphql.Field{{Name: "id"}}}).Do(ctx) + require.Nil(t, err) + require.Len(t, resultFilter.Data["Get"].(map[string]interface{})[className], 0) + + filter = filters.Where() + filter.WithValueBoolean(true) + filter.WithOperator("IsNull") // replace with real operator after updating go client + filter.WithPath([]string{lowerPropName}) + resultFilter, err = c.GraphQL().Get().WithClassName(className).WithWhere(filter).WithFields(graphql.Field{Name: "_additional", Fields: []graphql.Field{{Name: "id"}}}).Do(ctx) + require.Nil(t, err) + require.Len(t, resultFilter.Data["Get"].(map[string]interface{})[className], 1) + + // Property is still part of the class + schemaClass, err := c.Schema().ClassGetter().WithClassName(className).Do(ctx) + require.Nil(t, err) + require.Len(t, schemaClass.Properties, 1) + }) + } +} + +func TestUpdateWithVectorVectorizer(t *testing.T) { + ctx := context.Background() + c, err := client.NewClient(client.Config{Scheme: "http", Host: "localhost:8080"}) + require.Nil(t, err) + + className := "TestUpdateWithVectorWithVec" + c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + defer c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + + classCreator := c.Schema().ClassCreator() + class := models.Class{ + Class: className, + Properties: []*models.Property{{ + Name: "prop", DataType: []string{string(schema.DataTypeText)}, + }}, + Vectorizer: "text2vec-contextionary", + } + require.Nil(t, classCreator.WithClass(&class).Do(ctx)) + + _, err = c.Data().Creator().WithClassName(className).WithProperties( + map[string]interface{}{"prop": "SomeText"}, + ).WithID(UUID1).Do(ctx) + require.Nil(t, err) + + // get object to have vector + objBeforeUpdate, err := c.Data().ObjectsGetter().WithID(UUID1).WithVector().WithClassName(className).Do(ctx) + require.Nil(t, c.Data().Updater().WithClassName(className).WithVector(objBeforeUpdate[0].Vector).WithProperties(map[string]interface{}{"prop": "Other text"}).WithID(UUID1).WithMerge().Do(ctx)) + + // update should not have changed the vector + objAfterUpdate, err := c.Data().ObjectsGetter().WithID(UUID1).WithVector().WithClassName(className).Do(ctx) + require.Equal(t, objBeforeUpdate[0].Vector, objAfterUpdate[0].Vector) +} + +func TestUpdateWithVectorVectorizerNone(t *testing.T) { + ctx := context.Background() + c, err := client.NewClient(client.Config{Scheme: "http", Host: "localhost:8080"}) + require.Nil(t, err) + + className := "TestUpdateWithVectorNoVec" + c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + defer c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + + classCreator := c.Schema().ClassCreator() + class := models.Class{ + Class: className, + Properties: []*models.Property{{ + Name: "prop", DataType: []string{string(schema.DataTypeText)}, + }}, + Vectorizer: "none", + } + require.Nil(t, classCreator.WithClass(&class).Do(ctx)) + + vecBefore := []float32{0.1, 0.2, 0.3} + + _, err = c.Data().Creator().WithClassName(className).WithProperties( + map[string]interface{}{"prop": "SomeText"}, + ).WithID(UUID1).WithVector(vecBefore).Do(ctx) + require.Nil(t, err) + + // update without vector should not change the vector + require.Nil(t, c.Data().Updater().WithClassName(className).WithProperties(map[string]interface{}{"prop": "Other text"}).WithID(UUID1).WithMerge().Do(ctx)) + objAfterUpdateNoVector, err := c.Data().ObjectsGetter().WithID(UUID1).WithVector().WithClassName(className).Do(ctx) + require.ElementsMatch(t, vecBefore, objAfterUpdateNoVector[0].Vector) + + // update with vector should change the vector + vecAfter := []float32{0.1, 0.2, 0.4} + require.Nil(t, c.Data().Updater().WithClassName(className).WithProperties(map[string]interface{}{"prop": "Other text"}).WithID(UUID1).WithMerge().WithVector(vecAfter).Do(ctx)) + objAfterUpdateWithVector, err := c.Data().ObjectsGetter().WithID(UUID1).WithVector().WithClassName(className).Do(ctx) + require.ElementsMatch(t, vecAfter, objAfterUpdateWithVector[0].Vector) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/generative_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/generative_test.go new file mode 100644 index 0000000000000000000000000000000000000000..b49bde5e5970ee41bd5953051dc78189394dde3c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/generative_test.go @@ -0,0 +1,171 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package acceptance_with_go_client + +import ( + "context" + "strings" + "testing" + + "github.com/google/uuid" + + "github.com/stretchr/testify/require" + client "github.com/weaviate/weaviate-go-client/v5/weaviate" + "github.com/weaviate/weaviate-go-client/v5/weaviate/graphql" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" +) + +func TestGenerative(t *testing.T) { + ctx := context.Background() + c, err := client.NewClient(client.Config{Scheme: "http", Host: "localhost:8080"}) + require.Nil(t, err) + + className := "BigScaryMonsterDog" + c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + defer c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + classCreator := c.Schema().ClassCreator() + class := models.Class{ + Class: className, + Properties: []*models.Property{ + { + Name: "first", + DataType: []string{string(schema.DataTypeText)}, + }, + { + Name: "second", + DataType: []string{string(schema.DataTypeText)}, + }, + }, + ModuleConfig: map[string]interface{}{ + "generative-dummy": map[string]interface{}{}, + }, + } + require.Nil(t, classCreator.WithClass(&class).Do(ctx)) + uids := []string{uuid.New().String(), uuid.New().String()} + _, err = c.Data().Creator().WithClassName(className).WithProperties( + map[string]interface{}{"first": "one", "second": "two"}, + ).WithID(uids[0]).WithVector([]float32{1, 0}).Do(ctx) + require.Nil(t, err) + + _, err = c.Data().Creator().WithClassName(className).WithProperties( + map[string]interface{}{"first": "three", "second": "four"}, + ).WithID(uids[1]).WithVector([]float32{1, 0}).Do(ctx) + require.Nil(t, err) + nv := graphql.NearVectorArgumentBuilder{} + + t.Run("single result", func(t *testing.T) { + gs := graphql.NewGenerativeSearch().SingleResult("Input: {first} and {second}") + + fields := graphql.Field{ + Name: "_additional{id}", + } + + result, err := c.GraphQL().Get().WithClassName(className).WithNearVector(nv.WithVector([]float32{1, 0})).WithGenerativeSearch(gs).WithFields(fields).Do(ctx) + require.Nil(t, err) + + expected := map[string]string{uids[0]: "Input: one and two", uids[1]: "Input: three and four"} + for i := 0; i < 2; i++ { + uidReturn := result.Data["Get"].(map[string]interface{})[className].([]interface{})[i].(map[string]interface{})["_additional"].(map[string]interface{})["id"].(string) + returnString := result.Data["Get"].(map[string]interface{})[className].([]interface{})[i].(map[string]interface{})["_additional"].(map[string]interface{})["generate"].(map[string]interface{})["singleResult"].(string) + require.NotNil(t, returnString) + require.True(t, strings.Contains(returnString, expected[uidReturn]), "expected %s to contain %s", returnString, expected[uidReturn]) + } + }) + + t.Run("grouped result with all properties", func(t *testing.T) { + gs := graphql.NewGenerativeSearch().GroupedResult("summarize") + + result, err := c.GraphQL().Get().WithClassName(className).WithNearVector(nv.WithVector([]float32{1, 0})).WithGenerativeSearch(gs).Do(ctx) + require.Nil(t, err) + + returnString := result.Data["Get"].(map[string]interface{})[className].([]interface{})[0].(map[string]interface{})["_additional"].(map[string]interface{})["generate"].(map[string]interface{})["groupedResult"].(string) + require.NotNil(t, returnString) + expected := "summarize" + require.True(t, strings.Contains(returnString, expected), "expected %s to contain %s", returnString, expected) + + // order is not guaranteed + require.True(t, strings.Contains(returnString, "{\"first\":\"one\",\"second\":\"two\"}"), "got &v", returnString) + require.True(t, strings.Contains(returnString, "{\"first\":\"three\",\"second\":\"four\"}"), "got &v", returnString) + }) + + t.Run("grouped result with selected properties", func(t *testing.T) { + gs := graphql.NewGenerativeSearch().GroupedResult("summarize", "first") + + result, err := c.GraphQL().Get().WithClassName(className).WithNearVector(nv.WithVector([]float32{1, 0})).WithGenerativeSearch(gs).Do(ctx) + require.Nil(t, err) + + returnString := result.Data["Get"].(map[string]interface{})[className].([]interface{})[0].(map[string]interface{})["_additional"].(map[string]interface{})["generate"].(map[string]interface{})["groupedResult"].(string) + require.NotNil(t, returnString) + expected := "summarize:" + require.True(t, strings.Contains(returnString, expected), "expected %s to contain %s", returnString, expected) + + // order is not guaranteed + require.True(t, strings.Contains(returnString, "{\"first\":\"one\"}"), "got &v", returnString) + require.True(t, strings.Contains(returnString, "{\"first\":\"three\"}"), "got &v", returnString) + // only "first" was requested, so "second" should not be in the result + require.False(t, strings.Contains(returnString, "second"), "got &v", returnString) + }) +} + +func TestGenerativeUpdate(t *testing.T) { + ctx := context.Background() + c, err := client.NewClient(client.Config{Scheme: "http", Host: "localhost:8080"}) + require.Nil(t, err) + + className := "LionsAreKittyCats" + c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + defer c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + classCreator := c.Schema().ClassCreator() + class := models.Class{ + Class: className, + Properties: []*models.Property{ + { + Name: "first", + DataType: []string{string(schema.DataTypeText)}, + }, + }, + ModuleConfig: map[string]interface{}{ + "generative-dummy": map[string]interface{}{"first": "second"}, + }, + } + + require.Nil(t, classCreator.WithClass(&class).Do(ctx)) + uids := []string{uuid.New().String(), uuid.New().String()} + _, err = c.Data().Creator().WithClassName(className).WithProperties( + map[string]interface{}{"first": "one"}, + ).WithID(uids[0]).Do(ctx) + require.Nil(t, err) + + gs := graphql.NewGenerativeSearch().SingleResult("Input: {first}") + + fields := graphql.Field{Name: "_additional{id}"} + res, err := c.GraphQL().Get().WithClassName(className).WithGenerativeSearch(gs).WithFields(fields).Do(ctx) + require.NoError(t, err) + singelResults := res.Data["Get"].(map[string]interface{})[className].([]interface{})[0].(map[string]interface{})["_additional"].(map[string]interface{})["generate"].(map[string]interface{})["singleResult"] + require.NotNil(t, singelResults) + require.Contains(t, singelResults, "first") + require.Contains(t, singelResults, "second") + + class.ModuleConfig = map[string]interface{}{"generative-dummy": map[string]interface{}{"third": "fourth"}} + require.NoError(t, c.Schema().ClassUpdater().WithClass(&class).Do(ctx)) + + res, err = c.GraphQL().Get().WithClassName(className).WithGenerativeSearch(gs).WithFields(fields).Do(ctx) + require.NoError(t, err) + require.Nil(t, res.Errors) + singelResults2 := res.Data["Get"].(map[string]interface{})[className].([]interface{})[0].(map[string]interface{})["_additional"].(map[string]interface{})["generate"].(map[string]interface{})["singleResult"] + require.NotNil(t, singelResults2) + require.Contains(t, singelResults2, "third") + require.Contains(t, singelResults2, "fourth") + require.NotContains(t, singelResults2, "first") + require.NotContains(t, singelResults2, "second") +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/go.mod b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/go.mod new file mode 100644 index 0000000000000000000000000000000000000000..67e23b2c528d902a81731ca098b204f9000e6240 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/go.mod @@ -0,0 +1,214 @@ +module acceptance_tests_with_client + +go 1.24 + +replace github.com/weaviate/weaviate => ../.. + +require ( + github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc + github.com/go-openapi/strfmt v0.23.0 + github.com/google/uuid v1.6.0 + github.com/pkg/errors v0.9.1 + github.com/stretchr/testify v1.10.0 + github.com/tailor-inc/graphql v0.5.7 + github.com/weaviate/weaviate v1.32.4-0.20250821224425-5469448cc83a + github.com/weaviate/weaviate-go-client/v5 v5.4.2-0.20250822111337-d751da280fa2 +) + +require ( + cel.dev/expr v0.24.0 // indirect + cloud.google.com/go v0.121.0 // indirect + cloud.google.com/go/auth v0.16.3 // indirect + cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect + cloud.google.com/go/compute/metadata v0.7.0 // indirect + cloud.google.com/go/iam v1.5.2 // indirect + cloud.google.com/go/monitoring v1.24.2 // indirect + cloud.google.com/go/storage v1.54.0 // indirect + dario.cat/mergo v1.0.1 // indirect + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 // indirect + github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect + github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.1 // indirect + github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.51.0 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.51.0 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect + github.com/VividCortex/ewma v1.2.0 // indirect + github.com/armon/go-metrics v0.4.1 // indirect + github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect + github.com/aws/aws-sdk-go v1.44.298 // indirect + github.com/aws/aws-sdk-go-v2 v1.36.5 // indirect + github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.11 // indirect + github.com/aws/aws-sdk-go-v2/config v1.29.14 // indirect + github.com/aws/aws-sdk-go-v2/credentials v1.17.67 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.30 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.36 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.36 // indirect + github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 // indirect + github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.30.0 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.4 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.17 // indirect + github.com/aws/aws-sdk-go-v2/service/sagemakerruntime v1.33.4 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.25.3 // indirect + github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.1 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.33.19 // indirect + github.com/aws/smithy-go v1.22.4 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/buger/jsonparser v1.1.1 // indirect + github.com/cenkalti/backoff/v4 v4.3.0 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/cheggaaa/pb/v3 v3.1.4 // indirect + github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 // indirect + github.com/containerd/errdefs v1.0.0 // indirect + github.com/containerd/errdefs/pkg v0.3.0 // indirect + github.com/containerd/log v0.1.0 // indirect + github.com/containerd/platforms v0.2.1 // indirect + github.com/cpuguy83/dockercfg v0.3.2 // indirect + github.com/cpuguy83/go-md2man/v2 v2.0.5 // indirect + github.com/danaugrs/go-tsne v0.0.0-20200708172100-6b7d1d577fd3 // indirect + github.com/distribution/reference v0.6.0 // indirect + github.com/dlclark/regexp2 v1.10.0 // indirect + github.com/docker/docker v28.3.3+incompatible // indirect + github.com/docker/go-connections v0.5.0 // indirect + github.com/docker/go-units v0.5.0 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/ebitengine/purego v0.8.4 // indirect + github.com/envoyproxy/go-control-plane/envoy v1.32.4 // indirect + github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect + github.com/fatih/camelcase v1.0.0 // indirect + github.com/fatih/color v1.17.0 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/getsentry/sentry-go v0.30.0 // indirect + github.com/go-ini/ini v1.67.0 // indirect + github.com/go-jose/go-jose/v4 v4.0.5 // indirect + github.com/go-logr/logr v1.4.3 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-ole/go-ole v1.3.0 // indirect + github.com/go-openapi/analysis v0.23.0 // indirect + github.com/go-openapi/errors v0.22.0 // indirect + github.com/go-openapi/jsonpointer v0.21.0 // indirect + github.com/go-openapi/jsonreference v0.21.0 // indirect + github.com/go-openapi/loads v0.22.0 // indirect + github.com/go-openapi/runtime v0.24.2 // indirect + github.com/go-openapi/spec v0.21.0 // indirect + github.com/go-openapi/swag v0.23.0 // indirect + github.com/go-openapi/validate v0.24.0 // indirect + github.com/goccy/go-json v0.10.5 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang/protobuf v1.5.4 // indirect + github.com/google/btree v1.1.3 // indirect + github.com/google/s2a-go v0.1.9 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect + github.com/googleapis/gax-go/v2 v2.15.0 // indirect + github.com/gregjones/httpcache v0.0.0-20171119193500-2bcd89a1743f // indirect + github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/go-hclog v1.6.3 // indirect + github.com/hashicorp/go-immutable-radix v1.3.1 // indirect + github.com/hashicorp/go-metrics v0.5.4 // indirect + github.com/hashicorp/go-msgpack/v2 v2.1.2 // indirect + github.com/hashicorp/go-multierror v1.1.1 // indirect + github.com/hashicorp/go-sockaddr v1.0.0 // indirect + github.com/hashicorp/golang-lru v1.0.2 // indirect + github.com/hashicorp/memberlist v0.5.2 // indirect + github.com/hashicorp/raft v1.7.2 // indirect + github.com/jmespath/go-jmespath v0.4.0 // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/karrick/godirwalk v1.15.3 // indirect + github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect + github.com/klauspost/compress v1.18.0 // indirect + github.com/klauspost/cpuid/v2 v2.2.10 // indirect + github.com/lanrat/extsort v1.0.2 // indirect + github.com/launchdarkly/ccache v1.1.0 // indirect + github.com/launchdarkly/eventsource v1.6.2 // indirect + github.com/launchdarkly/go-jsonstream/v3 v3.1.0 // indirect + github.com/launchdarkly/go-sdk-common/v3 v3.2.0 // indirect + github.com/launchdarkly/go-sdk-events/v3 v3.4.0 // indirect + github.com/launchdarkly/go-semver v1.0.3 // indirect + github.com/launchdarkly/go-server-sdk-evaluation/v3 v3.0.1 // indirect + github.com/launchdarkly/go-server-sdk/v7 v7.8.0 // indirect + github.com/lufia/plan9stats v0.0.0-20240909124753-873cd0166683 // indirect + github.com/magiconair/properties v1.8.10 // indirect + github.com/mailru/easyjson v0.7.7 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-runewidth v0.0.15 // indirect + github.com/miekg/dns v1.1.43 // indirect + github.com/minio/crc64nvme v1.0.1 // indirect + github.com/minio/md5-simd v1.1.2 // indirect + github.com/minio/minio-go/v7 v7.0.91 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/moby/docker-image-spec v1.3.1 // indirect + github.com/moby/go-archive v0.1.0 // indirect + github.com/moby/patternmatcher v0.6.0 // indirect + github.com/moby/sys/sequential v0.6.0 // indirect + github.com/moby/sys/user v0.4.0 // indirect + github.com/moby/sys/userns v0.1.0 // indirect + github.com/moby/term v0.5.0 // indirect + github.com/morikuni/aec v1.0.0 // indirect + github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect + github.com/nyaruka/phonenumbers v1.0.54 // indirect + github.com/oklog/ulid v1.3.1 // indirect + github.com/opencontainers/go-digest v1.0.0 // indirect + github.com/opencontainers/image-spec v1.1.1 // indirect + github.com/opentracing/opentracing-go v1.2.0 // indirect + github.com/patrickmn/go-cache v2.1.0+incompatible // indirect + github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect + github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect + github.com/prometheus/client_golang v1.20.5 // indirect + github.com/prometheus/client_model v0.6.2 // indirect + github.com/prometheus/common v0.65.0 // indirect + github.com/prometheus/procfs v0.15.1 // indirect + github.com/rivo/uniseg v0.2.0 // indirect + github.com/rs/xid v1.6.0 // indirect + github.com/russross/blackfriday/v2 v2.1.0 // indirect + github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 // indirect + github.com/shirou/gopsutil/v4 v4.25.5 // indirect + github.com/sirupsen/logrus v1.9.3 // indirect + github.com/spaolacci/murmur3 v1.1.0 // indirect + github.com/spiffe/go-spiffe/v2 v2.5.0 // indirect + github.com/stretchr/objx v0.5.2 // indirect + github.com/termie/go-shutil v0.0.0-20140729215957-bcacb06fecae // indirect + github.com/testcontainers/testcontainers-go v0.38.0 // indirect + github.com/tklauser/go-sysconf v0.3.14 // indirect + github.com/tklauser/numcpus v0.9.0 // indirect + github.com/urfave/cli/v2 v2.27.5 // indirect + github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect + github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect + github.com/weaviate/s5cmd/v2 v2.0.1 // indirect + github.com/weaviate/sroar v0.0.11 // indirect + github.com/weaviate/tiktoken-go v0.0.3 // indirect + github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect + github.com/yusufpapurcu/wmi v1.2.4 // indirect + github.com/zeebo/errs v1.4.0 // indirect + go.mongodb.org/mongo-driver v1.14.0 // indirect + go.opentelemetry.io/auto/sdk v1.1.0 // indirect + go.opentelemetry.io/contrib/detectors/gcp v1.36.0 // indirect + go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect + go.opentelemetry.io/otel v1.36.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0 // indirect + go.opentelemetry.io/otel/metric v1.36.0 // indirect + go.opentelemetry.io/otel/sdk v1.36.0 // indirect + go.opentelemetry.io/otel/sdk/metric v1.36.0 // indirect + go.opentelemetry.io/otel/trace v1.36.0 // indirect + go.opentelemetry.io/proto/otlp v1.4.0 // indirect + go.uber.org/mock v0.4.0 // indirect + golang.org/x/crypto v0.40.0 // indirect + golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa // indirect + golang.org/x/net v0.42.0 // indirect + golang.org/x/oauth2 v0.30.0 // indirect + golang.org/x/sync v0.16.0 // indirect + golang.org/x/sys v0.34.0 // indirect + golang.org/x/text v0.27.0 // indirect + golang.org/x/time v0.12.0 // indirect + gonum.org/v1/gonum v0.15.1 // indirect + google.golang.org/api v0.245.0 // indirect + google.golang.org/genproto v0.0.0-20250603155806-513f23925822 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0 // indirect + google.golang.org/grpc v1.74.2 // indirect + google.golang.org/protobuf v1.36.6 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/go.sum b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/go.sum new file mode 100644 index 0000000000000000000000000000000000000000..c292d14bc53f3007798b65162fe7989c73c90b9c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/go.sum @@ -0,0 +1,798 @@ +cel.dev/expr v0.24.0 h1:56OvJKSH3hDGL0ml5uSxZmz3/3Pq4tJ+fb1unVLAFcY= +cel.dev/expr v0.24.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.121.0 h1:pgfwva8nGw7vivjZiRfrmglGWiCJBP+0OmDpenG/Fwg= +cloud.google.com/go v0.121.0/go.mod h1:rS7Kytwheu/y9buoDmu5EIpMMCI4Mb8ND4aeN4Vwj7Q= +cloud.google.com/go/auth v0.16.3 h1:kabzoQ9/bobUmnseYnBO6qQG7q4a/CffFRlJSxv2wCc= +cloud.google.com/go/auth v0.16.3/go.mod h1:NucRGjaXfzP1ltpcQ7On/VTZ0H4kWB5Jy+Y9Dnm76fA= +cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= +cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= +cloud.google.com/go/compute/metadata v0.7.0 h1:PBWF+iiAerVNe8UCHxdOt6eHLVc3ydFeOCw78U8ytSU= +cloud.google.com/go/compute/metadata v0.7.0/go.mod h1:j5MvL9PprKL39t166CoB1uVHfQMs4tFQZZcKwksXUjo= +cloud.google.com/go/iam v1.5.2 h1:qgFRAGEmd8z6dJ/qyEchAuL9jpswyODjA2lS+w234g8= +cloud.google.com/go/iam v1.5.2/go.mod h1:SE1vg0N81zQqLzQEwxL2WI6yhetBdbNQuTvIKCSkUHE= +cloud.google.com/go/logging v1.13.0 h1:7j0HgAp0B94o1YRDqiqm26w4q1rDMH7XNRU34lJXHYc= +cloud.google.com/go/logging v1.13.0/go.mod h1:36CoKh6KA/M0PbhPKMq6/qety2DCAErbhXT62TuXALA= +cloud.google.com/go/longrunning v0.6.7 h1:IGtfDWHhQCgCjwQjV9iiLnUta9LBCo8R9QmAFsS/PrE= +cloud.google.com/go/longrunning v0.6.7/go.mod h1:EAFV3IZAKmM56TyiE6VAP3VoTzhZzySwI/YI1s/nRsY= +cloud.google.com/go/monitoring v1.24.2 h1:5OTsoJ1dXYIiMiuL+sYscLc9BumrL3CarVLL7dd7lHM= +cloud.google.com/go/monitoring v1.24.2/go.mod h1:x7yzPWcgDRnPEv3sI+jJGBkwl5qINf+6qY4eq0I9B4U= +cloud.google.com/go/storage v1.54.0 h1:Du3XEyliAiftfyW0bwfdppm2MMLdpVAfiIg4T2nAI+0= +cloud.google.com/go/storage v1.54.0/go.mod h1:hIi9Boe8cHxTyaeqh7KMMwKg088VblFK46C2x/BWaZE= +cloud.google.com/go/trace v1.11.6 h1:2O2zjPzqPYAHrn3OKl029qlqG6W8ZdYaOWRyr8NgMT4= +cloud.google.com/go/trace v1.11.6/go.mod h1:GA855OeDEBiBMzcckLPE2kDunIpC72N+Pq8WFieFjnI= +dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s= +dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6 h1:He8afgbRMd7mFxO99hRNu+6tazq8nFF9lIwo9JFroBk= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.9.0 h1:OVoM452qUFBrX+URdH3VpR299ma4kfom0yB0URYky9g= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.9.0/go.mod h1:kUjrAo8bgEwLeZ/CmHqNl3Z/kPm7y6FKfxxK0izYUg4= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 h1:FPKJS1T+clwv+OLGt13a8UjqeRuh0O4SJ3lUriThc+4= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1/go.mod h1:j2chePtV91HrC22tGoRX3sGY42uF13WzmmV80/OdVAA= +github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.8.0 h1:LR0kAX9ykz8G4YgLCaRDVJ3+n43R8MneB5dTy2konZo= +github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.8.0/go.mod h1:DWAciXemNf++PQJLeXUB4HHH5OpsAh12HZnu2wXE1jA= +github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.1 h1:lhZdRq7TIx0GJQvSyX2Si406vrYsov2FXGp/RnSEtcs= +github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.1/go.mod h1:8cl44BDmi+effbARHMQjgOKA2AYvcohNm7KEt42mSV8= +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0= +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 h1:oygO0locgZJe7PpYPXT5A29ZkwJaPqcva7BVeemZOZs= +github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0 h1:ErKg/3iS1AKcTkf3yixlZ54f9U1rljCkQyEXWUnIUxc= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0/go.mod h1:yAZHSGnqScoU556rBOVkwLze6WP5N+U11RHuWaGVxwY= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.51.0 h1:fYE9p3esPxA/C0rQ0AHhP0drtPXDRhaWiwg1DPqO7IU= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.51.0/go.mod h1:BnBReJLvVYx2CS/UHOgVz2BXKXD9wsQPxZug20nZhd0= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.51.0 h1:OqVGm6Ei3x5+yZmSJG1Mh2NwHvpVmZ08CB5qJhT9Nuk= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.51.0/go.mod h1:SZiPHWGOOk3bl8tkevxkoiwPgsIl6CwrWcbwjfHZpdM= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.51.0 h1:6/0iUd0xrnX7qt+mLNRwg5c0PGv8wpE8K90ryANQwMI= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.51.0/go.mod h1:otE2jQekW/PqXk1Awf5lmfokJx4uwuqcj1ab5SpGeW0= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= +github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/RoaringBitmap/roaring v0.6.1 h1:O36Tdaj1Fi/zyr25shTHwlQPGdq53+u4WkM08AOEjiE= +github.com/RoaringBitmap/roaring v0.6.1/go.mod h1:WZ83fjBF/7uBHi6QoFyfGL4+xuV4Qn+xFkm4+vSzrhE= +github.com/VividCortex/ewma v1.2.0 h1:f58SaIzcDXrSy3kWaHNvuJgJ3Nmz59Zji6XoJR/q1ow= +github.com/VividCortex/ewma v1.2.0/go.mod h1:nz4BbCtbLyFDeC9SUHbtcT5644juEuWfUAUnGx7j5l4= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= +github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA= +github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4= +github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= +github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= +github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so= +github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= +github.com/aws/aws-sdk-go v1.44.298 h1:5qTxdubgV7PptZJmp/2qDwD2JL187ePL7VOxsSh1i3g= +github.com/aws/aws-sdk-go v1.44.298/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= +github.com/aws/aws-sdk-go-v2 v1.36.5 h1:0OF9RiEMEdDdZEMqF9MRjevyxAQcf6gY+E7vwBILFj0= +github.com/aws/aws-sdk-go-v2 v1.36.5/go.mod h1:EYrzvCCN9CMUTa5+6lf6MM4tq3Zjp8UhSGR/cBsjai0= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.11 h1:12SpdwU8Djs+YGklkinSSlcrPyj3H4VifVsKf78KbwA= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.11/go.mod h1:dd+Lkp6YmMryke+qxW/VnKyhMBDTYP41Q2Bb+6gNZgY= +github.com/aws/aws-sdk-go-v2/config v1.29.14 h1:f+eEi/2cKCg9pqKBoAIwRGzVb70MRKqWX4dg1BDcSJM= +github.com/aws/aws-sdk-go-v2/config v1.29.14/go.mod h1:wVPHWcIFv3WO89w0rE10gzf17ZYy+UVS1Geq8Iei34g= +github.com/aws/aws-sdk-go-v2/credentials v1.17.67 h1:9KxtdcIA/5xPNQyZRgUSpYOE6j9Bc4+D7nZua0KGYOM= +github.com/aws/aws-sdk-go-v2/credentials v1.17.67/go.mod h1:p3C44m+cfnbv763s52gCqrjaqyPikj9Sg47kUVaNZQQ= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.30 h1:x793wxmUWVDhshP8WW2mlnXuFrO4cOd3HLBroh1paFw= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.30/go.mod h1:Jpne2tDnYiFascUEs2AWHJL9Yp7A5ZVy3TNyxaAjD6M= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.36 h1:SsytQyTMHMDPspp+spo7XwXTP44aJZZAC7fBV2C5+5s= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.36/go.mod h1:Q1lnJArKRXkenyog6+Y+zr7WDpk4e6XlR6gs20bbeNo= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.36 h1:i2vNHQiXUvKhs3quBR6aqlgJaiaexz/aNvdCktW/kAM= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.36/go.mod h1:UdyGa7Q91id/sdyHPwth+043HhmP6yP9MBHgbZM0xo8= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 h1:bIqFDwgGXXN1Kpp99pDOdKMTTb5d2KyU5X/BZxjOkRo= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3/go.mod h1:H5O/EsxDWyU+LP/V8i5sm8cxoZgc2fdNR9bxlOFrQTo= +github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.30.0 h1:eMOwQ8ZZK+76+08RfxeaGUtRFN6wxmD1rvqovc2kq2w= +github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.30.0/go.mod h1:0b5Rq7rUvSQFYHI1UO0zFTV/S6j6DUyuykXA80C+YOI= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.4 h1:CXV68E2dNqhuynZJPB80bhPQwAKqBWVer887figW6Jc= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.4/go.mod h1:/xFi9KtvBXP97ppCz1TAEvU1Uf66qvid89rbem3wCzQ= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.17 h1:t0E6FzREdtCsiLIoLCWsYliNsRBgyGD/MCK571qk4MI= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.17/go.mod h1:ygpklyoaypuyDvOM5ujWGrYWpAK3h7ugnmKCU/76Ys4= +github.com/aws/aws-sdk-go-v2/service/sagemakerruntime v1.33.4 h1:KTLBj2ZyAsXyd4GeYYfTxXuYZAGXeYf+bTb7LQ4PbHc= +github.com/aws/aws-sdk-go-v2/service/sagemakerruntime v1.33.4/go.mod h1:+iASEUUKmfo4pyZrc3acVh8wUGAciCESoSt/Q3cFzvM= +github.com/aws/aws-sdk-go-v2/service/sso v1.25.3 h1:1Gw+9ajCV1jogloEv1RRnvfRFia2cL6c9cuKV2Ps+G8= +github.com/aws/aws-sdk-go-v2/service/sso v1.25.3/go.mod h1:qs4a9T5EMLl/Cajiw2TcbNt2UNo/Hqlyp+GiuG4CFDI= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.1 h1:hXmVKytPfTy5axZ+fYbR5d0cFmC3JvwLm5kM83luako= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.1/go.mod h1:MlYRNmYu/fGPoxBQVvBYr9nyr948aY/WLUvwBMBJubs= +github.com/aws/aws-sdk-go-v2/service/sts v1.33.19 h1:1XuUZ8mYJw9B6lzAkXhqHlJd/XvaX32evhproijJEZY= +github.com/aws/aws-sdk-go-v2/service/sts v1.33.19/go.mod h1:cQnB8CUnxbMU82JvlqjKR2HBOm3fe9pWorWBza6MBJ4= +github.com/aws/smithy-go v1.22.4 h1:uqXzVZNuNexwc/xrh6Tb56u89WDlJY6HS+KC0S4QSjw= +github.com/aws/smithy-go v1.22.4/go.mod h1:t1ufH5HMublsJYulve2RKmHDC15xu1f26kHCp/HgceI= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= +github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cheggaaa/pb/v3 v3.1.4 h1:DN8j4TVVdKu3WxVwcRKu0sG00IIU6FewoABZzXbRQeo= +github.com/cheggaaa/pb/v3 v3.1.4/go.mod h1:6wVjILNBaXMs8c21qRiaUM8BR82erfgau1DQ4iUXmSA= +github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= +github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= +github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv1aFbZMiM9vblcSArJRf2Irls= +github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= +github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI= +github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M= +github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE= +github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpSBQv6A= +github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw= +github.com/cpuguy83/dockercfg v0.3.2 h1:DlJTyZGBDlXqUZ2Dk2Q3xHs/FtnooJJVaad2S9GKorA= +github.com/cpuguy83/dockercfg v0.3.2/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc= +github.com/cpuguy83/go-md2man/v2 v2.0.5 h1:ZtcqGrnekaHpVLArFSe4HK5DoKx1T0rq2DwVB0alcyc= +github.com/cpuguy83/go-md2man/v2 v2.0.5/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY= +github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= +github.com/danaugrs/go-tsne v0.0.0-20200708172100-6b7d1d577fd3 h1:4V3w6LD+GOVbkF0jtjAzMRczS18+Gx0/nSZ3Pub3h00= +github.com/danaugrs/go-tsne v0.0.0-20200708172100-6b7d1d577fd3/go.mod h1:tcVxJUGCaPp/YynlqJTfJtGc/LF9vn4WUZSSmaGu3dA= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/dlclark/regexp2 v1.10.0 h1:+/GIL799phkJqYW+3YbOd8LCcbHzT0Pbo8zl70MHsq0= +github.com/dlclark/regexp2 v1.10.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= +github.com/docker/docker v28.3.3+incompatible h1:Dypm25kh4rmk49v1eiVbsAtpAsYURjYkaKubwuBdxEI= +github.com/docker/docker v28.3.3+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= +github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= +github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/ebitengine/purego v0.8.4 h1:CF7LEKg5FFOsASUj0+QwaXf8Ht6TlFxg09+S9wz0omw= +github.com/ebitengine/purego v0.8.4/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ= +github.com/envoyproxy/go-control-plane v0.13.4 h1:zEqyPVyku6IvWCFwux4x9RxkLOMUL+1vC9xUFv5l2/M= +github.com/envoyproxy/go-control-plane v0.13.4/go.mod h1:kDfuBlDVsSj2MjrLEtRWtHlsWIFcGyB2RMO44Dc5GZA= +github.com/envoyproxy/go-control-plane/envoy v1.32.4 h1:jb83lalDRZSpPWW2Z7Mck/8kXZ5CQAFYVjQcdVIr83A= +github.com/envoyproxy/go-control-plane/envoy v1.32.4/go.mod h1:Gzjc5k8JcJswLjAx1Zm+wSYE20UrLtt7JZMWiWQXQEw= +github.com/envoyproxy/go-control-plane/ratelimit v0.1.0 h1:/G9QYbddjL25KvtKTv3an9lx6VBE2cnb8wp1vEGNYGI= +github.com/envoyproxy/go-control-plane/ratelimit v0.1.0/go.mod h1:Wk+tMFAFbCXaJPzVVHnPgRKdUdwW/KdbRt94AzgRee4= +github.com/envoyproxy/protoc-gen-validate v1.2.1 h1:DEo3O99U8j4hBFwbJfrz9VtgcDfUKS7KJ7spH3d86P8= +github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2TmLfsJ31lvEjwamM4DxlWXU= +github.com/fatih/camelcase v1.0.0 h1:hxNvNX/xYBp0ovncs8WyWZrOrpBNub/JfaMvbURyft8= +github.com/fatih/camelcase v1.0.0/go.mod h1:yN2Sb0lFhZJUdVvtELVWefmrXpuZESvPmqwoZc+/fpc= +github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4= +github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/getsentry/sentry-go v0.30.0 h1:lWUwDnY7sKHaVIoZ9wYqRHJ5iEmoc0pqcRqFkosKzBo= +github.com/getsentry/sentry-go v0.30.0/go.mod h1:WU9B9/1/sHDqeV8T+3VwwbjeR5MSXs/6aqG3mqZrezA= +github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA= +github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= +github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A= +github.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= +github.com/go-jose/go-jose/v4 v4.0.5 h1:M6T8+mKZl/+fNNuFHvGIzDz7BTLQPIounk/b9dw3AaE= +github.com/go-jose/go-jose/v4 v4.0.5/go.mod h1:s3P1lRrkT8igV8D9OjyL4WRyHvjB6a4JSllnOrmmBOA= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= +github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE= +github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= +github.com/go-openapi/analysis v0.21.2/go.mod h1:HZwRk4RRisyG8vx2Oe6aqeSQcoxRp47Xkp3+K6q+LdY= +github.com/go-openapi/analysis v0.23.0 h1:aGday7OWupfMs+LbmLZG4k0MYXIANxcuBTYUC03zFCU= +github.com/go-openapi/analysis v0.23.0/go.mod h1:9mz9ZWaSlV8TvjQHLl2mUW2PbZtemkE8yA5v22ohupo= +github.com/go-openapi/errors v0.19.8/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M= +github.com/go-openapi/errors v0.19.9/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M= +github.com/go-openapi/errors v0.20.2/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M= +github.com/go-openapi/errors v0.22.0 h1:c4xY/OLxUBSTiepAg3j/MHuAv5mJhnf53LLMWFB+u/w= +github.com/go-openapi/errors v0.22.0/go.mod h1:J3DmZScxCDufmIMsdOuDHxJbdOGC0xtUynjIx092vXE= +github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ= +github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY= +github.com/go-openapi/jsonreference v0.19.6/go.mod h1:diGHMEHg2IqXZGKxqyvWdfWU/aim5Dprw5bqpKkTvns= +github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ= +github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4= +github.com/go-openapi/loads v0.21.1/go.mod h1:/DtAMXXneXFjbQMGEtbamCZb+4x7eGwkvZCvBmwUG+g= +github.com/go-openapi/loads v0.22.0 h1:ECPGd4jX1U6NApCGG1We+uEozOAvXvJSF4nnwHZ8Aco= +github.com/go-openapi/loads v0.22.0/go.mod h1:yLsaTCS92mnSAZX5WWoxszLj0u+Ojl+Zs5Stn1oF+rs= +github.com/go-openapi/runtime v0.24.2 h1:yX9HMGQbz32M87ECaAhGpJjBmErO3QLcgdZj9BzGx7c= +github.com/go-openapi/runtime v0.24.2/go.mod h1:AKurw9fNre+h3ELZfk6ILsfvPN+bvvlaU/M9q/r9hpk= +github.com/go-openapi/spec v0.20.4/go.mod h1:faYFR1CvsJZ0mNsmsphTMSoRrNV3TEDoAM7FOEWeq8I= +github.com/go-openapi/spec v0.21.0 h1:LTVzPc3p/RzRnkQqLRndbAzjY0d0BCL72A6j3CdL9ZY= +github.com/go-openapi/spec v0.21.0/go.mod h1:78u6VdPw81XU44qEWGhtr982gJ5BWg2c0I5XwVMotYk= +github.com/go-openapi/strfmt v0.21.0/go.mod h1:ZRQ409bWMj+SOgXofQAGTIo2Ebu72Gs+WaRADcS5iNg= +github.com/go-openapi/strfmt v0.21.1/go.mod h1:I/XVKeLc5+MM5oPNN7P6urMOpuLXEcNrCX/rPGuWb0k= +github.com/go-openapi/strfmt v0.21.2/go.mod h1:I/XVKeLc5+MM5oPNN7P6urMOpuLXEcNrCX/rPGuWb0k= +github.com/go-openapi/strfmt v0.23.0 h1:nlUS6BCqcnAk0pyhi9Y+kdDVZdZMHfEKQiS4HaMgO/c= +github.com/go-openapi/strfmt v0.23.0/go.mod h1:NrtIpfKtWIygRkKVsxh7XQMDQW5HKQl6S5ik2elW+K4= +github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= +github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= +github.com/go-openapi/swag v0.21.1/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= +github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE= +github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ= +github.com/go-openapi/validate v0.21.0/go.mod h1:rjnrwK57VJ7A8xqfpAOEKRH8yQSGUriMu5/zuPSQ1hg= +github.com/go-openapi/validate v0.24.0 h1:LdfDKwNbpB6Vn40xhTdNZAnfLECL81w+VX3BumrGD58= +github.com/go-openapi/validate v0.24.0/go.mod h1:iyeX1sEufmv3nPbBdX3ieNviWnOZaJ1+zquzJEf2BAQ= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-stack/stack v1.8.1/go.mod h1:dcoOX6HbPZSZptuspn9bctJ+N/CnF5gGygcUP3XYfe4= +github.com/gobuffalo/attrs v0.0.0-20190224210810-a9411de4debd/go.mod h1:4duuawTqi2wkkpB4ePgWMaai6/Kc6WEz83bhFwpHzj0= +github.com/gobuffalo/depgen v0.0.0-20190329151759-d478694a28d3/go.mod h1:3STtPUQYuzV0gBVOY3vy6CfMm/ljR4pABfrTeHNLHUY= +github.com/gobuffalo/depgen v0.1.0/go.mod h1:+ifsuy7fhi15RWncXQQKjWS9JPkdah5sZvtHc2RXGlg= +github.com/gobuffalo/envy v1.6.15/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI= +github.com/gobuffalo/envy v1.7.0/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI= +github.com/gobuffalo/flect v0.1.0/go.mod h1:d2ehjJqGOH/Kjqcoz+F7jHTBbmDb38yXA598Hb50EGs= +github.com/gobuffalo/flect v0.1.1/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI= +github.com/gobuffalo/flect v0.1.3/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI= +github.com/gobuffalo/genny v0.0.0-20190329151137-27723ad26ef9/go.mod h1:rWs4Z12d1Zbf19rlsn0nurr75KqhYp52EAGGxTbBhNk= +github.com/gobuffalo/genny v0.0.0-20190403191548-3ca520ef0d9e/go.mod h1:80lIj3kVJWwOrXWWMRzzdhW3DsrdjILVil/SFKBzF28= +github.com/gobuffalo/genny v0.1.0/go.mod h1:XidbUqzak3lHdS//TPu2OgiFB+51Ur5f7CSnXZ/JDvo= +github.com/gobuffalo/genny v0.1.1/go.mod h1:5TExbEyY48pfunL4QSXxlDOmdsD44RRq4mVZ0Ex28Xk= +github.com/gobuffalo/gitgen v0.0.0-20190315122116-cc086187d211/go.mod h1:vEHJk/E9DmhejeLeNt7UVvlSGv3ziL+djtTr3yyzcOw= +github.com/gobuffalo/gogen v0.0.0-20190315121717-8f38393713f5/go.mod h1:V9QVDIxsgKNZs6L2IYiGR8datgMhB577vzTDqypH360= +github.com/gobuffalo/gogen v0.1.0/go.mod h1:8NTelM5qd8RZ15VjQTFkAW6qOMx5wBbW4dSCS3BY8gg= +github.com/gobuffalo/gogen v0.1.1/go.mod h1:y8iBtmHmGc4qa3urIyo1shvOD8JftTtfcKi+71xfDNE= +github.com/gobuffalo/logger v0.0.0-20190315122211-86e12af44bc2/go.mod h1:QdxcLw541hSGtBnhUc4gaNIXRjiDppFGaDqzbrBd3v8= +github.com/gobuffalo/mapi v1.0.1/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc= +github.com/gobuffalo/mapi v1.0.2/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc= +github.com/gobuffalo/packd v0.0.0-20190315124812-a385830c7fc0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4= +github.com/gobuffalo/packd v0.1.0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4= +github.com/gobuffalo/packr/v2 v2.0.9/go.mod h1:emmyGweYTm6Kdper+iywB6YK5YzuKchGtJQZ0Odn4pQ= +github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/VCm/3ptBN+0= +github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw= +github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= +github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8= +github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/btree v1.1.3 h1:CVpQJjYgC4VbzxeGVHfvZrv1ctoYCAI8vbl07Fcxlyg= +github.com/google/btree v1.1.3/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc= +github.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0= +github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0= +github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM= +github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU9uHLo7OnF5tL52HFAgMmyrf4= +github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= +github.com/googleapis/gax-go/v2 v2.15.0 h1:SyjDc1mGgZU5LncH8gimWo9lW1DtIfPibOG81vgd/bo= +github.com/googleapis/gax-go/v2 v2.15.0/go.mod h1:zVVkkxAQHa1RQpg9z2AUCMnKhi0Qld9rcmyfL1OZhoc= +github.com/gregjones/httpcache v0.0.0-20171119193500-2bcd89a1743f h1:kOkUP6rcVVqC+KlKKENKtgfFfJyDySYhqL9srXooghY= +github.com/gregjones/httpcache v0.0.0-20171119193500-2bcd89a1743f/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.24.0 h1:TmHmbvxPmaegwhDubVz0lICL0J5Ka2vwTzhoePEXsGE= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.24.0/go.mod h1:qztMSjm835F2bXf+5HKAPIS5qsmQDqZna/PgVt4rWtI= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= +github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= +github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-immutable-radix v1.3.1 h1:DKHmCUm2hRBK510BaiZlwvpD40f8bJFeZnpfm2KLowc= +github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-metrics v0.5.4 h1:8mmPiIJkTPPEbAiV97IxdAGNdRdaWwVap1BU6elejKY= +github.com/hashicorp/go-metrics v0.5.4/go.mod h1:CG5yz4NZ/AI/aQt9Ucm/vdBnbh7fvmv4lxZ350i+QQI= +github.com/hashicorp/go-msgpack/v2 v2.1.2 h1:4Ee8FTp834e+ewB71RDrQ0VKpyFdrKOjvYtnQ/ltVj0= +github.com/hashicorp/go-msgpack/v2 v2.1.2/go.mod h1:upybraOAblm4S7rx0+jeNy+CWWhzywQsSRV5033mMu4= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= +github.com/hashicorp/go-sockaddr v1.0.0 h1:GeH6tui99pF4NJgfnhp+L6+FfobzVW3Ah46sLo0ICXs= +github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1 h1:fv1ep09latC32wFoVwnqcnKJGnMSdBanPczbHAYm1BE= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v1.0.2 h1:dV3g9Z/unq5DpblPpw+Oqcv4dU/1omnb4Ok8iPY6p1c= +github.com/hashicorp/golang-lru v1.0.2/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/hashicorp/memberlist v0.5.2 h1:rJoNPWZ0juJBgqn48gjy59K5H4rNgvUoM1kUD7bXiuI= +github.com/hashicorp/memberlist v0.5.2/go.mod h1:Ri9p/tRShbjYnpNf4FFPXG7wxEGY4Nrcn6E7jrVa//4= +github.com/hashicorp/raft v1.7.2 h1:pyvxhfJ4R8VIAlHKvLoKQWElZspsCVT6YWuxVxsPAgc= +github.com/hashicorp/raft v1.7.2/go.mod h1:DfvCGFxpAUPE0L4Uc8JLlTPtc3GzSbdH0MTJCLgnmJQ= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= +github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= +github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= +github.com/karlseguin/expect v1.0.2-0.20190806010014-778a5f0c6003 h1:vJ0Snvo+SLMY72r5J4sEfkuE7AFbixEP2qRbEcum/wA= +github.com/karlseguin/expect v1.0.2-0.20190806010014-778a5f0c6003/go.mod h1:zNBxMY8P21owkeogJELCLeHIt+voOSduHYTFUbwRAV8= +github.com/karrick/godirwalk v1.8.0/go.mod h1:H5KPZjojv4lE+QYImBI8xVtrBRgYrIVsaRPx4tDPEn4= +github.com/karrick/godirwalk v1.10.3/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA= +github.com/karrick/godirwalk v1.15.3 h1:0a2pXOgtB16CqIqXTiT7+K9L73f74n/aNQUnH6Ortew= +github.com/karrick/godirwalk v1.15.3/go.mod h1:j4mkqPuvaLI8mp1DroR3P6ad7cyYd4c1qeJ3RV7ULlk= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= +github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= +github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= +github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE= +github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/lanrat/extsort v1.0.2 h1:p3MLVpQEPwEGPzeLBb+1eSErzRl6Bgjgr+qnIs2RxrU= +github.com/lanrat/extsort v1.0.2/go.mod h1:ivzsdLm8Tv+88qbdpMElV6Z15StlzPUtZSKsGb51hnQ= +github.com/launchdarkly/ccache v1.1.0 h1:voD1M+ZJXR3MREOKtBwgTF9hYHl1jg+vFKS/+VAkR2k= +github.com/launchdarkly/ccache v1.1.0/go.mod h1:TlxzrlnzvYeXiLHmesMuvoZetu4Z97cV1SsdqqBJi1Q= +github.com/launchdarkly/eventsource v1.6.2 h1:5SbcIqzUomn+/zmJDrkb4LYw7ryoKFzH/0TbR0/3Bdg= +github.com/launchdarkly/eventsource v1.6.2/go.mod h1:LHxSeb4OnqznNZxCSXbFghxS/CjIQfzHovNoAqbO/Wk= +github.com/launchdarkly/go-jsonstream/v3 v3.1.0 h1:U/7/LplZO72XefBQ+FzHf6o4FwLHVqBE+4V58Ornu/E= +github.com/launchdarkly/go-jsonstream/v3 v3.1.0/go.mod h1:2Pt4BR5AwWgsuVTCcIpB6Os04JFIKWfoA+7faKkZB5E= +github.com/launchdarkly/go-sdk-common/v3 v3.2.0 h1:LzwlrXRBPC7NjdbnDxio8YGHMvDrNb4i6lbjpLgwsyk= +github.com/launchdarkly/go-sdk-common/v3 v3.2.0/go.mod h1:mXFmDGEh4ydK3QilRhrAyKuf9v44VZQWnINyhqbbOd0= +github.com/launchdarkly/go-sdk-events/v3 v3.4.0 h1:22sVSEDEXpdOEK3UBtmThwsUHqc+cbbe/pJfsliBAA4= +github.com/launchdarkly/go-sdk-events/v3 v3.4.0/go.mod h1:oepYWQ2RvvjfL2WxkE1uJJIuRsIMOP4WIVgUpXRPcNI= +github.com/launchdarkly/go-semver v1.0.3 h1:agIy/RN3SqeQDIfKkl+oFslEdeIs7pgsJBs3CdCcGQM= +github.com/launchdarkly/go-semver v1.0.3/go.mod h1:xFmMwXba5Mb+3h72Z+VeSs9ahCvKo2QFUTHRNHVqR28= +github.com/launchdarkly/go-server-sdk-evaluation/v3 v3.0.1 h1:rTgcYAFraGFj7sBMB2b7JCYCm0b9kph4FaMX02t4osQ= +github.com/launchdarkly/go-server-sdk-evaluation/v3 v3.0.1/go.mod h1:fPS5d+zOsgFnMunj+Ki6jjlZtFvo4h9iNbtNXxzYn58= +github.com/launchdarkly/go-server-sdk/v7 v7.8.0 h1:QRJmx30DqOVH81FiwFiHAi81FCiwAS/0fRBLJIGTK2U= +github.com/launchdarkly/go-server-sdk/v7 v7.8.0/go.mod h1:rf/K2E4s5OjkB8Nn3ATDOR6W6S3U7D8FJ3WAKLxSTIQ= +github.com/launchdarkly/go-test-helpers/v2 v2.2.0 h1:L3kGILP/6ewikhzhdNkHy1b5y4zs50LueWenVF0sBbs= +github.com/launchdarkly/go-test-helpers/v2 v2.2.0/go.mod h1:L7+th5govYp5oKU9iN7To5PgznBuIjBPn+ejqKR0avw= +github.com/launchdarkly/go-test-helpers/v3 v3.0.2 h1:rh0085g1rVJM5qIukdaQ8z1XTWZztbJ49vRZuveqiuU= +github.com/launchdarkly/go-test-helpers/v3 v3.0.2/go.mod h1:u2ZvJlc/DDJTFrshWW50tWMZHLVYXofuSHUfTU/eIwM= +github.com/lufia/plan9stats v0.0.0-20240909124753-873cd0166683 h1:7UMa6KCCMjZEMDtTVdcGu0B1GmmC7QJKiCCjyTAWQy0= +github.com/lufia/plan9stats v0.0.0-20240909124753-873cd0166683/go.mod h1:ilwx/Dta8jXAgpFYFvSWEMwxmbWXyiUHkd5FwyKhb5k= +github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE= +github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= +github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= +github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U= +github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/miekg/dns v1.1.43 h1:JKfpVSCB84vrAmHzyrsxB5NAr5kLoMXZArPSw7Qlgyg= +github.com/miekg/dns v1.1.43/go.mod h1:+evo5L0630/F6ca/Z9+GAqzhjGyn8/c+TBaOyfEl0V4= +github.com/minio/crc64nvme v1.0.1 h1:DHQPrYPdqK7jQG/Ls5CTBZWeex/2FMS3G5XGkycuFrY= +github.com/minio/crc64nvme v1.0.1/go.mod h1:eVfm2fAzLlxMdUGc0EEBGSMmPwmXD5XiNRpnu9J3bvg= +github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34= +github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM= +github.com/minio/minio-go/v7 v7.0.91 h1:tWLZnEfo3OZl5PoXQwcwTAPNNrjyWwOh6cbZitW5JQc= +github.com/minio/minio-go/v7 v7.0.91/go.mod h1:uvMUcGrpgeSAAI6+sD3818508nUyMULw94j2Nxku/Go= +github.com/mitchellh/mapstructure v1.3.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/go-archive v0.1.0 h1:Kk/5rdW/g+H8NHdJW2gsXyZ7UnzvJNOy6VKJqueWdcQ= +github.com/moby/go-archive v0.1.0/go.mod h1:G9B+YoujNohJmrIYFBpSd54GTUB4lt9S+xVQvsJyFuo= +github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= +github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= +github.com/moby/sys/atomicwriter v0.1.0 h1:kw5D/EqkBwsBFi0ss9v1VG3wIkVhzGvLklJ+w3A14Sw= +github.com/moby/sys/atomicwriter v0.1.0/go.mod h1:Ul8oqv2ZMNHOceF643P6FKPXeCmYtlQMvpizfsSoaWs= +github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU= +github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko= +github.com/moby/sys/user v0.4.0 h1:jhcMKit7SA80hivmFJcbB1vqmw//wU61Zdui2eQXuMs= +github.com/moby/sys/user v0.4.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs= +github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g= +github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28= +github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= +github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae h1:VeRdUYdCw49yizlSbMEn2SZ+gT+3IUKx8BqxyQdz+BY= +github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae/go.mod h1:qAyveg+e4CE+eKJXWVjKXM4ck2QobLqTDytGJbLLhJg= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/nyaruka/phonenumbers v1.0.54 h1:vU9IUfiHrpu+lZcCkjEzDsCIdurQV8lxjrAdqW2osAU= +github.com/nyaruka/phonenumbers v1.0.54/go.mod h1:sDaTZ/KPX5f8qyV9qN+hIm+4ZBARJrupC6LuhshJq1U= +github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= +github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040= +github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= +github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs= +github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= +github.com/pascaldekloe/goe v0.1.0 h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY= +github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc= +github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ= +github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE= +github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4= +github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 h1:o4JXh1EVt9k/+g42oCprj/FisM4qX9L3sZB3upGN2ZU= +github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= +github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= +github.com/prometheus/client_golang v1.11.1/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= +github.com/prometheus/client_golang v1.20.5 h1:cxppBPuYhUnsO6yo/aoRol4L7q7UFfdm+bR9r+8l63Y= +github.com/prometheus/client_golang v1.20.5/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk= +github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= +github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= +github.com/prometheus/common v0.65.0 h1:QDwzd+G1twt//Kwj/Ww6E9FQq1iVMmODnILtW1t2VzE= +github.com/prometheus/common v0.65.0/go.mod h1:0gZns+BLRQ3V6NdaerOhMbwwRbNh9hkGINtQAsP5GS8= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= +github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc= +github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk= +github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= +github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= +github.com/rs/xid v1.6.0 h1:fV591PaemRlL6JfRxGDEPl69wICngIQ3shQtzfy2gxU= +github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0= +github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= +github.com/shirou/gopsutil/v4 v4.25.5 h1:rtd9piuSMGeU8g1RMXjZs9y9luK5BwtnG7dZaQUJAsc= +github.com/shirou/gopsutil/v4 v4.25.5/go.mod h1:PfybzyydfZcN+JMMjkF6Zb8Mq1A/VcogFFg7hj50W9c= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI= +github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spiffe/go-spiffe/v2 v2.5.0 h1:N2I01KCUkv1FAjZXJMwh95KK1ZIQLYbPfhaxw8WS0hE= +github.com/spiffe/go-spiffe/v2 v2.5.0/go.mod h1:P+NxobPc6wXhVtINNtFjNWGBTreew1GBUCwT2wPmb7g= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/tailor-inc/graphql v0.5.7 h1:M33mFZmAvJ8GjqIl4jGhZVJFsGl9Bo5uUflRv6mFuJU= +github.com/tailor-inc/graphql v0.5.7/go.mod h1:kBiPFdeNPJOFCnffxI0lT6+1/853hIK8P+mIVOJ/d0M= +github.com/termie/go-shutil v0.0.0-20140729215957-bcacb06fecae h1:vgGSvdW5Lqg+I1aZOlG32uyE6xHpLdKhZzcTEktz5wM= +github.com/termie/go-shutil v0.0.0-20140729215957-bcacb06fecae/go.mod h1:quDq6Se6jlGwiIKia/itDZxqC5rj6/8OdFyMMAwTxCs= +github.com/testcontainers/testcontainers-go v0.38.0 h1:d7uEapLcv2P8AvH8ahLqDMMxda2W9gQN1nRbHS28HBw= +github.com/testcontainers/testcontainers-go v0.38.0/go.mod h1:C52c9MoHpWO+C4aqmgSU+hxlR5jlEayWtgYrb8Pzz1w= +github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/tklauser/go-sysconf v0.3.14 h1:g5vzr9iPFFz24v2KZXs/pvpvh8/V9Fw6vQK5ZZb78yU= +github.com/tklauser/go-sysconf v0.3.14/go.mod h1:1ym4lWMLUOhuBOPGtRcJm7tEGX4SCYNEEEtghGG/8uY= +github.com/tklauser/numcpus v0.9.0 h1:lmyCHtANi8aRUgkckBgoDk1nHCux3n2cgkJLXdQGPDo= +github.com/tklauser/numcpus v0.9.0/go.mod h1:SN6Nq1O3VychhC1npsWostA+oW+VOQTxZrS604NSRyI= +github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= +github.com/urfave/cli/v2 v2.27.5 h1:WoHEJLdsXr6dDWoJgMq/CboDmyY/8HMMH1fTECbih+w= +github.com/urfave/cli/v2 v2.27.5/go.mod h1:3Sevf16NykTbInEnD0yKkjDAeZDS0A6bzhBH5hrMvTQ= +github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IUPn0Bjt8= +github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= +github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= +github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= +github.com/weaviate/s5cmd/v2 v2.0.1 h1:NLsBemDEeUa1pcqVAGl5Y2quKH8EFpkz/Z8n4s620hg= +github.com/weaviate/s5cmd/v2 v2.0.1/go.mod h1:JEoBF8SXVwK8qoaRKi0fPA4qi4OFmr+iVgc4XO3l/Zs= +github.com/weaviate/sroar v0.0.11 h1:yO25NXecuUcrcgdwTvahT91RDS6FDmaNSjgA7nCKVcw= +github.com/weaviate/sroar v0.0.11/go.mod h1:VgBRWPKPHRV/k9ABnD5w7QgdH9xe4RACzDzkrrK977g= +github.com/weaviate/tiktoken-go v0.0.3 h1:05QrZ0un7zoGyLYBWVK4HuxVHfpdbbdRfQokvxBNtXg= +github.com/weaviate/tiktoken-go v0.0.3/go.mod h1:u47qSckEGSi4sOcVJmUnd3xoHpDV9/5FDDi3KUCFUq4= +github.com/weaviate/weaviate-go-client/v5 v5.4.2-0.20250822111337-d751da280fa2 h1:rXF53fTzV3BHtkYrNz+GSMqSdi93v6MdHYnzBTflsCg= +github.com/weaviate/weaviate-go-client/v5 v5.4.2-0.20250822111337-d751da280fa2/go.mod h1:6Koi63u1qBlVydIdDwtY1pA2A5HjiMnrwRoFNA7zaIk= +github.com/willf/bitset v1.1.11 h1:N7Z7E9UvjW+sGsEl7k/SJrvY2reP1A07MrGuCjIOjRE= +github.com/willf/bitset v1.1.11/go.mod h1:83CECat5yLh5zVOf4P1ErAgKA5UDvKtgyUABdr3+MjI= +github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0 h1:3UeQBvD0TFrlVjOeLOBz+CPAI8dnbqNSVwUwRrkp7vQ= +github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0/go.mod h1:IXCdmsXIht47RaVFLEdVnh1t+pgYtTAhQGj73kz+2DM= +github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= +github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs= +github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM= +github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 h1:gEOO8jv9F4OT7lGCjxCBTO/36wtF6j2nSip77qHd4x4= +github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM= +github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0= +github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= +github.com/zeebo/errs v1.4.0 h1:XNdoD/RRMKP7HD0UhJnIzUy74ISdGGxURlYG8HSWSfM= +github.com/zeebo/errs v1.4.0/go.mod h1:sgbWHsvVuTPHcqJJGQ1WhI5KbWlHYz+2+2C/LSEtCw4= +go.mongodb.org/mongo-driver v1.7.3/go.mod h1:NqaYOwnXWr5Pm7AOpO5QFxKJ503nbMse/R79oO62zWg= +go.mongodb.org/mongo-driver v1.7.5/go.mod h1:VXEWRZ6URJIkUq2SCAyapmhH0ZLRBP+FT4xhp5Zvxng= +go.mongodb.org/mongo-driver v1.8.3/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= +go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80= +go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/contrib/detectors/gcp v1.36.0 h1:F7q2tNlCaHY9nMKHR6XH9/qkp8FktLnIcy6jJNyOCQw= +go.opentelemetry.io/contrib/detectors/gcp v1.36.0/go.mod h1:IbBN8uAIIx734PTonTPxAxnjc2pQTxWNkwfstZ+6H2k= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 h1:q4XOmH/0opmeuJtPsbFNivyl7bCt7yRBbeEm2sC/XtQ= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0/go.mod h1:snMWehoOh2wsEwnvvwtDyFCxVeDAODenXHtn5vzrKjo= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0/go.mod h1:UHB22Z8QsdRDrnAtX4PntOl36ajSxcdUMt1sF7Y6E7Q= +go.opentelemetry.io/otel v1.36.0 h1:UumtzIklRBY6cI/lllNZlALOF5nNIzJVb16APdvgTXg= +go.opentelemetry.io/otel v1.36.0/go.mod h1:/TcFMXYjyRNh8khOAO9ybYkqaDBb/70aVwkNML4pP8E= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0 h1:Vh5HayB/0HHfOQA7Ctx69E/Y/DcQSMPpKANYVMQ7fBA= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0/go.mod h1:cpgtDBaqD/6ok/UG0jT15/uKjAY8mRA53diogHBg3UI= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0 h1:wpMfgF8E1rkrT1Z6meFh1NDtownE9Ii3n3X2GJYjsaU= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0/go.mod h1:wAy0T/dUbs468uOlkT31xjvqQgEVXv58BRFWEgn5v/0= +go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.35.0 h1:PB3Zrjs1sG1GBX51SXyTSoOTqcDglmsk7nT6tkKPb/k= +go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.35.0/go.mod h1:U2R3XyVPzn0WX7wOIypPuptulsMcPDPs/oiSVOMVnHY= +go.opentelemetry.io/otel/metric v1.36.0 h1:MoWPKVhQvJ+eeXWHFBOPoBOi20jh6Iq2CcCREuTYufE= +go.opentelemetry.io/otel/metric v1.36.0/go.mod h1:zC7Ks+yeyJt4xig9DEw9kuUFe5C3zLbVjV2PzT6qzbs= +go.opentelemetry.io/otel/sdk v1.36.0 h1:b6SYIuLRs88ztox4EyrvRti80uXIFy+Sqzoh9kFULbs= +go.opentelemetry.io/otel/sdk v1.36.0/go.mod h1:+lC+mTgD+MUWfjJubi2vvXWcVxyr9rmlshZni72pXeY= +go.opentelemetry.io/otel/sdk/metric v1.36.0 h1:r0ntwwGosWGaa0CrSt8cuNuTcccMXERFwHX4dThiPis= +go.opentelemetry.io/otel/sdk/metric v1.36.0/go.mod h1:qTNOhFDfKRwX0yXOqJYegL5WRaW376QbB7P4Pb0qva4= +go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKrsNd4w= +go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA= +go.opentelemetry.io/proto/otlp v1.4.0 h1:TA9WRvW6zMwP+Ssb6fLoUIuirti1gGbP28GcKG1jgeg= +go.opentelemetry.io/proto/otlp v1.4.0/go.mod h1:PPBWZIP98o2ElSqI35IHfu7hIhSwvc5N38Jw8pXuGFY= +go.uber.org/mock v0.4.0 h1:VcM4ZOtdbR4f6VXfiOpwpVJDL6lCReaZ6mw31wqh7KU= +go.uber.org/mock v0.4.0/go.mod h1:a6FSlNadKUHUa9IP5Vyt1zh4fC7uAwxMutEAscFbkZc= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201216223049-8b5274cf687f/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.40.0 h1:r4x+VvoG5Fm+eJcxMaY8CQM7Lb0l1lsmjGBQ6s8BfKM= +golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY= +golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa h1:ELnwvuAXPNtPk1TJRuGkI9fDTwym6AYBu0qzT8AcHdI= +golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa/go.mod h1:akd2r19cwCdwSwWeIdzYQGa/EZZyqcOdwWiwj5L5eKQ= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM= +golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= +golang.org/x/net v0.42.0 h1:jzkYrhi3YQWD6MLBJcsklgQsoAcw89EcZbJw8Z614hs= +golang.org/x/net v0.42.0/go.mod h1:FF1RA5d3u7nAYA4z2TkclSCKh68eSXtiFwcWQpPXdt8= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= +golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190412183630-56d357773e84/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw= +golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190419153524-e8e3143a4f4a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190531175056-4c3a928424d2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210420072515-93ed5bcd2bfe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA= +golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.33.0 h1:NuFncQrRcaRvVmgRkvM3j/F00gWIAlcmlB8ACEKmGIg= +golang.org/x/term v0.33.0/go.mod h1:s18+ql9tYWp1IfpV9DmCtQDDSRBUjKaw9M1eAv5UeF0= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4= +golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU= +golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= +golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190329151228-23e29df326fe/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190416151739-9c9e1878f421/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190420181800-aa740d480789/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190531172133-b3315ee88b7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gonum.org/v1/gonum v0.15.1 h1:FNy7N6OUZVUaWG9pTiD+jlhdQ3lMP+/LcTpJ6+a8sQ0= +gonum.org/v1/gonum v0.15.1/go.mod h1:eZTZuRFrzu5pcyjN5wJhcIhnUdNijYxX1T2IcrOGY0o= +google.golang.org/api v0.245.0 h1:YliGvz1rjXB+sTLNIST6Ffeji9WlRdLQ+LPl9ruSa5Y= +google.golang.org/api v0.245.0/go.mod h1:dMVhVcylamkirHdzEBAIQWUCgqY885ivNeZYd7VAVr8= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/genproto v0.0.0-20250603155806-513f23925822 h1:rHWScKit0gvAPuOnu87KpaYtjK5zBMLcULh7gxkCXu4= +google.golang.org/genproto v0.0.0-20250603155806-513f23925822/go.mod h1:HubltRL7rMh0LfnQPkMH4NPDFEWp0jw3vixw7jEM53s= +google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 h1:oWVWY3NzT7KJppx2UKhKmzPq4SRe0LdCijVRwvGeikY= +google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822/go.mod h1:h3c4v36UTKzUiuaOKQ6gr3S+0hovBtUrXzTG/i3+XEc= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0 h1:MAKi5q709QWfnkkpNQ0M12hYJ1+e8qYVDyowc4U1XZM= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= +google.golang.org/grpc v1.74.2 h1:WoosgB65DlWVC9FqI82dGsZhWFNBSLjQ84bjROOpMu4= +google.golang.org/grpc v1.74.2/go.mod h1:CtQ+BGjaAIXHs/5YS3i473GqwBBa1zGQNevxdeBEXrM= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= +google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools/v3 v3.5.2 h1:7koQfIKdy+I8UTetycgUqXWSDwpgv193Ka+qRsmBY8Q= +gotest.tools/v3 v3.5.2/go.mod h1:LtdLGcnqToBH83WByAAi/wiwSFCArdFIUV/xxN4pcjA= diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/helper.go b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/helper.go new file mode 100644 index 0000000000000000000000000000000000000000..f4ed4242dce0c9957f20aa9f6cddf51a065bc9bd --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/helper.go @@ -0,0 +1,129 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package acceptance_with_go_client + +import ( + "fmt" + "testing" + + "github.com/davecgh/go-spew/spew" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" +) + +var ( + vFalse = false + vTrue = true +) + +func GetIds(t *testing.T, resp *models.GraphQLResponse, className string) []string { + return ExtractGraphQLField[string](t, resp, "Get", className, "_additional", "id") +} + +func ExtractGraphQLField[T any](t *testing.T, resp *models.GraphQLResponse, path ...string) []T { + require.NotNil(t, resp) + for _, err := range resp.Errors { + t.Logf("ExtractGraphQLField error: %v", err) + } + require.Empty(t, resp.Errors) + + require.NotNil(t, resp.Data) + classMap, ok := resp.Data[path[0]].(map[string]interface{}) + require.True(t, ok) + + objects, ok := classMap[path[1]].([]interface{}) + require.True(t, ok) + + results := make([]T, len(objects)) + for i := range objects { + resultMap, ok := objects[i].(map[string]interface{}) + for j := 2; j < len(path)-1; j++ { + resultMap, ok = resultMap[path[j]].(map[string]interface{}) + require.True(t, ok) + } + + results[i], ok = resultMap[path[len(path)-1]].(T) + require.True(t, ok, fmt.Sprintf("failed to extract %s from response: %s", path[len(path)-1], spew.Sdump(resp))) + } + return results +} + +func GetVectors(t *testing.T, + resp *models.GraphQLResponse, + className string, + withCertainty bool, + targetVectors ...string, +) map[string]models.Vector { + require.NotNil(t, resp) + require.NotNil(t, resp.Data) + require.Empty(t, resp.Errors) + + classMap, ok := resp.Data["Get"].(map[string]interface{}) + require.True(t, ok) + + class, ok := classMap[className].([]interface{}) + require.True(t, ok) + + targetVectorsMap := make(map[string]models.Vector) + for i := range class { + resultMap, ok := class[i].(map[string]interface{}) + require.True(t, ok) + + additional, ok := resultMap["_additional"].(map[string]interface{}) + require.True(t, ok) + + if withCertainty { + certainty, ok := additional["certainty"].(float64) + require.True(t, ok) + require.True(t, certainty >= 0) + } + + vectors, ok := additional["vectors"].(map[string]interface{}) + require.True(t, ok) + + for _, targetVector := range targetVectors { + if targetVector == "" { + targetVectorsMap[""] = parseVector(t, additional["vector"]) + continue + } + + targetVectorsMap[targetVector] = parseVector(t, vectors[targetVector]) + } + } + + return targetVectorsMap +} + +func parseVector(t *testing.T, data interface{}) models.Vector { + vector, ok := data.([]interface{}) + require.Truef(t, ok, "unexpected vector types in GraphQL response: %T", data) + + var multiVector [][]float32 + var vec []float32 + for i := range vector { + switch v := vector[i].(type) { + case float64: + vec = append(vec, float32(v)) + case []interface{}: + multiVectorVector := make([]float32, len(v)) + for j := range v { + multiVectorVector[j] = float32(v[j].(float64)) + } + multiVector = append(multiVector, multiVectorVector) + } + } + + if len(multiVector) > 0 { + return multiVector + } + return vec +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/mock_oidc_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/mock_oidc_test.go new file mode 100644 index 0000000000000000000000000000000000000000..cbd6f5f855ba6d25c4496bfb10d36b434994583d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/mock_oidc_test.go @@ -0,0 +1,91 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package acceptance_with_go_client + +import ( + "context" + "fmt" + "io" + "net/http" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + client "github.com/weaviate/weaviate-go-client/v5/weaviate" + "github.com/weaviate/weaviate/test/docker" +) + +func TestMockOIDC(t *testing.T) { + ctx := context.Background() + + checkOpenIDConfigurationGetter := func(endpoint string, withCertificate bool) { + c, err := client.NewClient(client.Config{Scheme: "http", Host: endpoint}) + require.NoError(t, err) + + openid, err := c.Misc().OpenIDConfigurationGetter().Do(ctx) + require.NoError(t, err) + require.NotNil(t, openid) + assert.Equal(t, "mock-oidc-test", openid.ClientID) + require.NotEmpty(t, openid.Href) + if withCertificate { + assert.Equal(t, openid.Href, "https://mock-oidc:48001/oidc/.well-known/openid-configuration") + } else { + assert.Equal(t, openid.Href, "http://mock-oidc:48001/oidc/.well-known/openid-configuration") + } + } + + t.Run("with certificate", func(t *testing.T) { + compose, err := docker.New(). + WithWeaviate(). + WithMockOIDCWithCertificate(). + Start(ctx) + require.NoError(t, err) + defer func() { + require.NoError(t, compose.Terminate(ctx)) + }() + + t.Run("weaviate", func(t *testing.T) { + checkOpenIDConfigurationGetter(compose.GetWeaviate().URI(), true) + }) + }) + + t.Run("without certificate", func(t *testing.T) { + compose, err := docker.New(). + WithWeaviate(). + WithMockOIDC(). + Start(ctx) + require.NoError(t, err) + defer func() { + require.NoError(t, compose.Terminate(ctx)) + }() + + t.Run("weaviate", func(t *testing.T) { + checkOpenIDConfigurationGetter(compose.GetWeaviate().URI(), false) + }) + + t.Run("mock OIDC", func(t *testing.T) { + endpoint := compose.GetMockOIDC().URI() + req, err := http.NewRequestWithContext(ctx, "GET", fmt.Sprintf("http://%s/oidc/.well-known/openid-configuration", endpoint), nil) + require.NoError(t, err) + httpClient := &http.Client{Timeout: time.Minute} + res, err := httpClient.Do(req) + require.NoError(t, err) + defer res.Body.Close() + bodyBytes, err := io.ReadAll(res.Body) + require.NoError(t, err) + response := string(bodyBytes) + require.NotEmpty(t, response) + assert.Contains(t, response, "code_challenge_methods_supported") + }) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/reranker_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/reranker_test.go new file mode 100644 index 0000000000000000000000000000000000000000..a284a4762634831f5fb3b7b4548013f37f85061a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/reranker_test.go @@ -0,0 +1,96 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package acceptance_with_go_client + +import ( + "context" + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/require" + client "github.com/weaviate/weaviate-go-client/v5/weaviate" + "github.com/weaviate/weaviate-go-client/v5/weaviate/graphql" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" +) + +func TestReRanker(t *testing.T) { + ctx := context.Background() + c, err := client.NewClient(client.Config{Scheme: "http", Host: "localhost:8080"}) + require.Nil(t, err) + + className := "BigFurryMonsterDog" + c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + defer c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + classCreator := c.Schema().ClassCreator() + class := models.Class{ + Class: className, + Properties: []*models.Property{ + { + Name: "first", + DataType: []string{string(schema.DataTypeText)}, + }, + { + Name: "second", + DataType: []string{string(schema.DataTypeText)}, + }, + }, + ModuleConfig: map[string]interface{}{ + "reranker-dummy": map[string]interface{}{}, + }, + } + require.Nil(t, classCreator.WithClass(&class).Do(ctx)) + uids := []string{uuid.New().String(), uuid.New().String()} + _, err = c.Data().Creator().WithClassName(className).WithProperties( + map[string]interface{}{"first": "apple", "second": "longlong"}, + ).WithID(uids[0]).WithVector([]float32{1, 0}).Do(ctx) + require.Nil(t, err) + + _, err = c.Data().Creator().WithClassName(className).WithProperties( + map[string]interface{}{"first": "apple", "second": "longlonglong"}, + ).WithID(uids[1]).WithVector([]float32{1, 0}).Do(ctx) + require.Nil(t, err) + nv := graphql.NearVectorArgumentBuilder{} + + // vector search and non-vector search take different codepaths to the storage object. We need to make sure that + // for both paths all the necessary properties are unmarshalled from binary, even if they are not requested by the + // user. + t.Run("Rerank with vector search", func(t *testing.T) { + fields := []graphql.Field{ + {Name: "_additional{id}"}, + {Name: "_additional{rerank(property: \"second\",query: \"apple\" ){score}}"}, + } + result, err := c.GraphQL().Get().WithClassName(className).WithNearVector(nv.WithVector([]float32{1, 0})).WithFields(fields...).Do(ctx) + require.Nil(t, err) + + expected := []float64{12, 8} + for i := 0; i < 2; i++ { + rerankScore := result.Data["Get"].(map[string]interface{})[className].([]interface{})[i].(map[string]interface{})["_additional"].(map[string]interface{})["rerank"].([]interface{})[0].(map[string]interface{})["score"].(float64) + require.Equal(t, rerankScore, expected[i]) + } + }) + + t.Run("Rerank without vector search", func(t *testing.T) { + fields := []graphql.Field{ + {Name: "_additional{id}"}, + {Name: "_additional{rerank(property: \"second\",query: \"apple\" ){score}}"}, + } + result, err := c.GraphQL().Get().WithClassName(className).WithFields(fields...).Do(ctx) + require.Nil(t, err) + + expected := []float64{12, 8} + for i := 0; i < 2; i++ { + rerankScore := result.Data["Get"].(map[string]interface{})[className].([]interface{})[i].(map[string]interface{})["_additional"].(map[string]interface{})["rerank"].([]interface{})[0].(map[string]interface{})["score"].(float64) + require.Equal(t, rerankScore, expected[i]) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/schema_class_names_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/schema_class_names_test.go new file mode 100644 index 0000000000000000000000000000000000000000..b76382b92d32ed2e099b74cd2cbf914bb231faad --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/schema_class_names_test.go @@ -0,0 +1,156 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package acceptance_with_go_client + +import ( + "context" + "fmt" + "testing" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/tailor-inc/graphql" + client "github.com/weaviate/weaviate-go-client/v5/weaviate" + "github.com/weaviate/weaviate-go-client/v5/weaviate/grpc" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" +) + +func TestSchemaClassNames(t *testing.T) { + ctx := context.Background() + config := client.Config{ + Scheme: "http", Host: "localhost:8080", + GrpcConfig: &grpc.Config{Host: "localhost:50051", Secured: false}, + } + client, err := client.NewClient(config) + require.Nil(t, err) + + // clean DB + err = client.Schema().AllDeleter().Do(ctx) + require.NoError(t, err) + + tests := []struct { + className string + }{ + { + className: graphql.String.Name(), + }, + { + className: graphql.Float.Name(), + }, + { + className: graphql.Boolean.Name(), + }, + { + className: graphql.Int.Name(), + }, + { + className: graphql.DateTime.Name(), + }, + { + className: graphql.ID.Name(), + }, + { + className: graphql.FieldSet.Name(), + }, + } + for _, tt := range tests { + t.Run(tt.className, func(t *testing.T) { + id1 := "00000000-0000-0000-0000-000000000001" + className := tt.className + class := &models.Class{ + Class: className, + Properties: []*models.Property{ + { + Name: "description", + DataType: []string{schema.DataTypeText.String()}, + }, + { + Name: "number", + DataType: []string{schema.DataTypeInt.String()}, + }, + }, + VectorConfig: map[string]models.VectorConfig{ + "description": { + Vectorizer: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "properties": []interface{}{"description"}, + "vectorizeClassName": false, + }, + }, + VectorIndexType: "hnsw", + }, + }, + } + t.Run("create class", func(t *testing.T) { + err := client.Schema().ClassCreator().WithClass(class).Do(ctx) + require.NoError(t, err) + }) + t.Run("batch import object", func(t *testing.T) { + objects := []*models.Object{ + { + ID: strfmt.UUID(id1), + Class: className, + Properties: map[string]interface{}{ + "description": "some text property", + "number": 1, + }, + }, + } + resp, err := client.Batch().ObjectsBatcher().WithObjects(objects...).Do(ctx) + require.NoError(t, err) + require.NotNil(t, resp) + require.Len(t, resp, 1) + require.NotNil(t, resp[0].Result) + assert.Nil(t, resp[0].Result.Errors) + }) + t.Run("check existence", func(t *testing.T) { + objs, err := client.Data().ObjectsGetter().WithClassName(className).WithID(id1).Do(ctx) + require.NoError(t, err) + require.NotNil(t, objs) + require.Len(t, objs, 1) + assert.Equal(t, className, objs[0].Class) + props, ok := objs[0].Properties.(map[string]interface{}) + require.True(t, ok) + require.Equal(t, 2, len(props)) + }) + t.Run("graphql check", func(t *testing.T) { + query := fmt.Sprintf("{ Get { %s { description number _additional { id } } } }", className) + resp, err := client.GraphQL().Raw().WithQuery(query).Do(ctx) + require.NoError(t, err) + require.NotNil(t, resp) + + classMap, ok := resp.Data["Get"].(map[string]interface{}) + require.True(t, ok) + + classResult, ok := classMap[className].([]interface{}) + require.True(t, ok) + for i := range classResult { + resultMap, ok := classResult[i].(map[string]interface{}) + require.True(t, ok) + description, ok := resultMap["description"].(string) + require.True(t, ok) + require.NotEmpty(t, description) + number, ok := resultMap["number"].(float64) + require.True(t, ok) + require.Equal(t, float64(1), number) + additional, ok := resultMap["_additional"].(map[string]interface{}) + require.True(t, ok) + id, ok := additional["id"].(string) + require.True(t, ok) + require.NotEmpty(t, id) + } + }) + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/schema_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/schema_test.go new file mode 100644 index 0000000000000000000000000000000000000000..7c8c99014d77015e94bdf9fe3e5d6fe564af049a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/schema_test.go @@ -0,0 +1,115 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package acceptance_with_go_client + +import ( + "context" + "encoding/json" + "fmt" + "testing" + + "github.com/stretchr/testify/require" + client "github.com/weaviate/weaviate-go-client/v5/weaviate" + "github.com/weaviate/weaviate-go-client/v5/weaviate/fault" + "github.com/weaviate/weaviate-go-client/v5/weaviate/graphql" + "github.com/weaviate/weaviate/entities/models" +) + +type testCase struct { + className1 string + className2 string +} + +func TestSchemaCasingClass(t *testing.T) { + ctx := context.Background() + c, err := client.NewClient(client.Config{Scheme: "http", Host: "localhost:8080"}) + require.Nil(t, err) + + className1 := "RandomGreenCar" + className2 := "RANDOMGreenCar" + + cases := []testCase{ + {className1: className1, className2: className1}, + {className1: className2, className2: className2}, + {className1: className1, className2: className2}, + {className1: className2, className2: className1}, + } + for _, tt := range cases { + t.Run(tt.className1+" "+tt.className2, func(t *testing.T) { + c.Schema().ClassDeleter().WithClassName(tt.className1).Do(ctx) + c.Schema().ClassDeleter().WithClassName(tt.className2).Do(ctx) + class := &models.Class{Class: tt.className1, Vectorizer: "none"} + require.Nil(t, c.Schema().ClassCreator().WithClass(class).Do(ctx)) + + // try to create class again with permuted-casing duplicate. + // this should fail as it already exists + class2 := &models.Class{Class: tt.className2, Vectorizer: "none"} + err := c.Schema().ClassCreator().WithClass(class2).Do(ctx) + checkDuplicateClassErrors(t, err, tt) + + // create object with both casing as class name. + _, err = c.Data().Creator().WithClassName(tt.className1).Do(ctx) + require.Nil(t, err) + // this should fail if the 2nd class is a non-equal permutation of the first + _, err = c.Data().Creator().WithClassName(tt.className2).Do(ctx) + if tt.className1 != tt.className2 { + require.NotNil(t, err) + } else { + require.Nil(t, err) + } + + result, err := c.GraphQL().Aggregate().WithClassName(tt.className1).WithFields(graphql.Field{ + Name: "meta", Fields: []graphql.Field{ + {Name: "count"}, + }, + }).Do(ctx) + require.Nil(t, err) + require.Empty(t, result.Errors) + data := result.Data["Aggregate"].(map[string]interface{})[tt.className1].([]interface{})[0].(map[string]interface{})["meta"].(map[string]interface{})["count"] + if tt.className1 == tt.className2 { + // two objects should have been added if the test case contains exact class name matches + require.Equal(t, data, 2.) + } else { + // otherwise, only one object should have been created, since the permuted class name does not exist + require.Equal(t, data, 1.) + } + + // Regardless of whether a class exists or not, the delete operation will always return a success + require.Nil(t, c.Schema().ClassDeleter().WithClassName(tt.className1).Do(ctx)) + require.Nil(t, c.Schema().ClassDeleter().WithClassName(tt.className2).Do(ctx)) + }) + } +} + +func checkDuplicateClassErrors(t *testing.T, err error, tt testCase) { + require.NotNil(t, err) + rawError, ok := err.(*fault.WeaviateClientError) + if ok { + var clientErr clientError + require.Nil(t, json.Unmarshal([]byte(rawError.Msg), &clientErr)) + require.Len(t, clientErr.Error, 1) + if tt.className1 == tt.className2 { + require.Contains(t, clientErr.Error[0].Message, fmt.Sprintf("class name %s already exists", tt.className1)) + } else { + require.Contains(t, clientErr.Error[0].Message, "class already exists") + require.Contains(t, clientErr.Error[0].Message, fmt.Sprintf("found similar class %q", tt.className1)) + } + } else { + t.Fatalf("unexpected error: %v", err) + } +} + +type clientError struct { + Error []struct { + Message string `json:"message"` + } `json:"error"` +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/search_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/search_test.go new file mode 100644 index 0000000000000000000000000000000000000000..482b468e73d2eb5d1ae66229ccb20a6f339da53e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/search_test.go @@ -0,0 +1,482 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package acceptance_with_go_client + +import ( + "context" + "fmt" + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/require" + client "github.com/weaviate/weaviate-go-client/v5/weaviate" + "github.com/weaviate/weaviate-go-client/v5/weaviate/graphql" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" +) + +var paragraphs = []string{ + "Some random text", + "Other text", + "completely unrelated", + "this has nothing to do with the rest", +} + +var ( + TRUE = true + ctx = context.Background() +) + +func AddClassAndObjects(t *testing.T, className string, datatype string, c *client.Client, vectorizer string) { + class := &models.Class{ + Class: className, + Properties: []*models.Property{ + {Name: "contents", DataType: []string{datatype}, Tokenization: "word", IndexFilterable: &TRUE, IndexSearchable: &TRUE}, + {Name: "num", DataType: []string{"int"}}, + }, + InvertedIndexConfig: &models.InvertedIndexConfig{Bm25: &models.BM25Config{K1: 1.2, B: 0.75}, UsingBlockMaxWAND: config.DefaultUsingBlockMaxWAND}, + Vectorizer: vectorizer, + } + require.Nil(t, c.Schema().ClassCreator().WithClass(class).Do(ctx)) + + creator := c.Data().Creator() + _, err := creator.WithClassName(className).WithProperties( + map[string]interface{}{"contents": []string{"nice", "what a rain day"}, "num": 0}).Do(ctx) + require.Nil(t, err) + _, err = creator.WithClassName(className).WithProperties( + map[string]interface{}{"contents": []string{"rain", "snow and sun at once? nice"}, "num": 1}).Do(ctx) + require.Nil(t, err) + _, err = creator.WithClassName(className).WithProperties( + map[string]interface{}{"contents": []string{ + "super long text to get the score down", + "snow and sun at the same time? How nice", + "long text without any meaning", + "just ignore this", + "this too, it doesn't matter", + }, "num": 2}).Do(ctx) + _, err = creator.WithClassName(className).WithProperties( + map[string]interface{}{"contents": []string{ + "super long text to get the score down", + "rain is necessary", + "long text without any meaning", + "just ignore this", + "this too, it doesn't matter", + }, "num": 3}).Do(ctx) +} + +func TestSearchOnArrays(t *testing.T) { + ctx := context.Background() + c, err := client.NewClient(client.Config{Scheme: "http", Host: "localhost:8080"}) + require.Nil(t, err) + + c.Schema().AllDeleter().Do(ctx) + + cases := []struct { + datatype schema.DataType + useHybrid bool // bm25 if not + }{ + {datatype: schema.DataTypeTextArray, useHybrid: true}, + {datatype: schema.DataTypeTextArray, useHybrid: false}, + // deprecated string + {datatype: schema.DataTypeStringArray, useHybrid: false}, + } + for _, tt := range cases { + t.Run("arrays "+tt.datatype.String(), func(t *testing.T) { + className := "Paragraph15845" + class := &models.Class{ + Class: className, + Properties: []*models.Property{ + { + Name: "contents", + DataType: tt.datatype.PropString(), + Tokenization: models.PropertyTokenizationWord, + IndexFilterable: &vFalse, + IndexSearchable: &vTrue, + }, + { + Name: "num", + DataType: schema.DataTypeInt.PropString(), + IndexFilterable: &vTrue, + }, + }, + InvertedIndexConfig: &models.InvertedIndexConfig{Bm25: &models.BM25Config{K1: 1.2, B: 0.75}, UsingBlockMaxWAND: config.DefaultUsingBlockMaxWAND}, + Vectorizer: "none", + } + require.Nil(t, c.Schema().ClassCreator().WithClass(class).Do(ctx)) + defer c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + + creator := c.Data().Creator() + _, err := creator.WithClassName(className).WithProperties( + map[string]interface{}{"contents": []string{"what a nice day", "what a rainy day"}, "num": 0}).Do(ctx) + require.Nil(t, err) + _, err = creator.WithClassName(className).WithProperties( + map[string]interface{}{"contents": []string{"rain all day", "snow and sun at the same time? How nice"}, "num": 1}).Do(ctx) + require.Nil(t, err) + + var results *models.GraphQLResponse + if tt.useHybrid { + builder := c.GraphQL().HybridArgumentBuilder().WithQuery("nice").WithAlpha(0) + results, err = c.GraphQL().Get().WithClassName(className).WithHybrid(builder).WithFields(graphql.Field{Name: "num"}).Do(ctx) + require.Nil(t, err) + } else { + builder := c.GraphQL().Bm25ArgBuilder().WithQuery("nice").WithProperties("contents") + results, err = c.GraphQL().Get().WithClassName(className).WithBM25(builder).WithFields(graphql.Field{Name: "num"}).Do(ctx) + require.Nil(t, err) + } + result_int := results.Data["Get"].(map[string]interface{})[className] + result := result_int.([]interface{}) + require.Len(t, result, 2) + require.Equal(t, 0., result[0].(map[string]interface{})["num"]) + require.Equal(t, 1., result[1].(map[string]interface{})["num"]) + }) + } +} + +func TestSearchOnSomeProperties(t *testing.T) { + ctx := context.Background() + c, err := client.NewClient(client.Config{Scheme: "http", Host: "localhost:8080"}) + require.Nil(t, err) + + c.Schema().AllDeleter().Do(ctx) + + // only one property contains the search term + cases := []struct { + queryType string // hybrid or bm25 + property string + results int + }{ + {queryType: "bm25", property: "one", results: 1}, + {queryType: "hybrid", property: "one", results: 1}, + {queryType: "bm25", property: "two", results: 0}, + {queryType: "hybrid", property: "two", results: 0}, + } + for _, tt := range cases { + t.Run("search on some properties "+tt.queryType, func(t *testing.T) { + className := "Paragraph15845" + class := &models.Class{ + Class: className, + Properties: []*models.Property{ + { + Name: "one", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWord, + IndexFilterable: &vFalse, + IndexSearchable: &vTrue, + }, + { + Name: "two", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWord, + IndexFilterable: &vFalse, + IndexSearchable: &vTrue, + }, + }, + InvertedIndexConfig: &models.InvertedIndexConfig{Bm25: &models.BM25Config{K1: 1.2, B: 0.75}, UsingBlockMaxWAND: config.DefaultUsingBlockMaxWAND}, + Vectorizer: "none", + } + require.Nil(t, c.Schema().ClassCreator().WithClass(class).Do(ctx)) + defer c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + + creator := c.Data().Creator() + _, err := creator.WithClassName(className).WithProperties( + map[string]interface{}{"one": "hello", "two": "world"}).Do(ctx) + require.Nil(t, err) + + alpha := "" + if tt.queryType == "hybrid" { + alpha = "alpha:0" // exclude vector search, it doesn't matter for this testcase + } + + results, err := c.GraphQL().Raw().WithQuery(fmt.Sprintf("{Get{%s(%s:{query:\"hello\", properties: [\"%s\"] %s} ){_additional{id score}}}}", className, tt.queryType, tt.property, alpha)).Do(ctx) + result := results.Data["Get"].(map[string]interface{})[className].([]interface{}) + require.Len(t, result, tt.results) + + if len(result) > 0 && result[0].(map[string]interface{})["score"] != nil { + val, err := result[0].(map[string]interface{})["score"].(float64) + require.Nil(t, err) + require.Greater(t, val, 0.0) + } + }) + } +} + +func TestAutocut(t *testing.T) { + ctx := context.Background() + c := client.New(client.Config{Scheme: "http", Host: "localhost:8080"}) + c.Schema().AllDeleter().Do(ctx) + className := "Paragraph453745" + + AddClassAndObjects(t, className, string(schema.DataTypeTextArray), c, "none") + defer c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + + searchQuery := []string{"hybrid:{query:\"rain nice\", alpha: 0.0, fusionType: relativeScoreFusion", "bm25:{query:\"rain nice\""} + cases := []struct { + autocut int + numResults int + }{ + {autocut: 1, numResults: 2}, {autocut: 2, numResults: 4}, {autocut: -1, numResults: 4 /*disabled*/}, + } + for _, tt := range cases { + for _, search := range searchQuery { + t.Run("autocut "+fmt.Sprint(tt.autocut, " ", search), func(t *testing.T) { + results, err := c.GraphQL().Raw().WithQuery(fmt.Sprintf("{Get{%s(%s, properties: [\"contents\"]}, autocut: %d){num}}}", className, search, tt.autocut)).Do(ctx) + require.Nil(t, err) + result := results.Data["Get"].(map[string]interface{})[className].([]interface{}) + require.Len(t, result, tt.numResults) + require.Equal(t, 0., result[0].(map[string]interface{})["num"]) + require.Equal(t, 1., result[1].(map[string]interface{})["num"]) + }) + } + } +} + +func TestHybridWithPureVectorSearch(t *testing.T) { + ctx := context.Background() + c := client.New(client.Config{Scheme: "http", Host: "localhost:8080"}) + c.Schema().AllDeleter().Do(ctx) + className := "ParagraphWithManyWords" + + AddClassAndObjects(t, className, string(schema.DataTypeTextArray), c, "text2vec-contextionary") + defer c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + + results, err := c.GraphQL().Raw().WithQuery(fmt.Sprintf("{Get{%s(hybrid: {query: \"rain nice\" properties: [\"contents\"], alpha:1}, autocut: -1){num}}}", className)).Do(ctx) + require.Nil(t, err) + result := results.Data["Get"].(map[string]interface{})[className].([]interface{}) + require.Len(t, result, 4) +} + +func TestHybridWithNearTextSubsearch(t *testing.T) { + ctx := context.Background() + c := client.New(client.Config{Scheme: "http", Host: "localhost:8080"}) + c.Schema().AllDeleter().Do(ctx) + className := "ParagraphWithManyWords" + + AddClassAndObjects(t, className, string(schema.DataTypeTextArray), c, "text2vec-contextionary") + defer c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + + results, err := c.GraphQL().Raw().WithQuery(fmt.Sprintf("{Get{%s(hybrid: { searches: { nearText: {concepts: [\"rain\", \"nice\"]}}, properties: [\"contents\"], alpha:1}, autocut: -1){num}}}", className)).Do(ctx) + require.Nil(t, err) + result := results.Data["Get"].(map[string]interface{})[className].([]interface{}) + require.Len(t, result, 4) +} + +func TestHybridWithOnlyVectorSearch(t *testing.T) { + ctx := context.Background() + c := client.New(client.Config{Scheme: "http", Host: "localhost:8080"}) + c.Schema().AllDeleter().Do(ctx) + + className := "HybridVectorOnlySearch" + class := &models.Class{ + Class: className, + Properties: []*models.Property{ + {Name: "text", DataType: []string{"text"}}, + }, + Vectorizer: "text2vec-contextionary", + } + require.Nil(t, c.Schema().ClassCreator().WithClass(class).Do(ctx)) + + creator := c.Data().Creator() + model, err := creator.WithClassName(className).WithProperties( + map[string]interface{}{"text": "how much wood can a woodchuck chuck?"}).Do(ctx) + require.Nil(t, err) + + results, err := c.GraphQL().Raw().WithQuery(fmt.Sprintf("{Get{%s(hybrid:{searches: { nearVector: {vector:%v}}}){text}}}", className, model.Object.Vector)).Do(ctx) + require.Nil(t, err) + result := results.Data["Get"].(map[string]interface{})[className].([]interface{}) + require.Len(t, result, 1) +} + +func TestHybridWithVectorSubsearch(t *testing.T) { + ctx := context.Background() + c := client.New(client.Config{Scheme: "http", Host: "localhost:8080"}) + c.Schema().AllDeleter().Do(ctx) + + className := "HybridVectorOnlySearch" + class := &models.Class{ + Class: className, + Properties: []*models.Property{ + {Name: "text", DataType: []string{"text"}}, + }, + Vectorizer: "text2vec-contextionary", + } + require.Nil(t, c.Schema().ClassCreator().WithClass(class).Do(ctx)) + + creator := c.Data().Creator() + model, err := creator.WithClassName(className).WithProperties( + map[string]interface{}{"text": "how much wood can a woodchuck chuck?"}).Do(ctx) + require.Nil(t, err) + + results, err := c.GraphQL().Raw().WithQuery(fmt.Sprintf("{Get{%s(hybrid:{searches: { nearVector: { vector:%v}}}){text}}}", className, model.Object.Vector)).Do(ctx) + require.Nil(t, err) + result := results.Data["Get"].(map[string]interface{})[className].([]interface{}) + require.Len(t, result, 1) +} + +func TestNearVectorAndObjectAutocut(t *testing.T) { + ctx := context.Background() + c := client.New(client.Config{Scheme: "http", Host: "localhost:8080"}) + c.Schema().AllDeleter().Do(ctx) + className := "YellowAndBlueTrain" + + class := &models.Class{ + Class: className, + Vectorizer: "none", + } + require.Nil(t, c.Schema().ClassCreator().WithClass(class).Do(ctx)) + defer c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + + var uuids []string + creator := c.Data().Creator() + vectorNumbers := []float32{1, 1.1, 1.2, 2.0, 2.1, 2.2, 3.1, 3.2, 3.2} + for _, vectorNumber := range vectorNumbers { + uuids = append(uuids, uuid.New().String()) + _, err := creator.WithClassName(className).WithVector([]float32{1, 1, 1, 1, 1, vectorNumber}).WithID(uuids[len(uuids)-1]).Do(ctx) + require.Nil(t, err) + } + + t.Run("near vector", func(t *testing.T) { + cases := []struct { + autocut int + numResults int + }{ + {autocut: 1, numResults: 3}, {autocut: 2, numResults: 6}, {autocut: -1, numResults: 9 /*disabled*/}, + } + for _, tt := range cases { + t.Run("autocut "+fmt.Sprint(tt.autocut), func(t *testing.T) { + results, err := c.GraphQL().Raw().WithQuery(fmt.Sprintf("{Get{%s(nearVector:{vector:[1, 1, 1, 1, 1, 1]}, autocut: %d){_additional{vector}}}}", className, tt.autocut)).Do(ctx) + require.Nil(t, err) + result := results.Data["Get"].(map[string]interface{})[className].([]interface{}) + require.Len(t, result, tt.numResults) + }) + } + }) + + t.Run("near object", func(t *testing.T) { + cases := []struct { + autocut int + numResults int + }{ + {autocut: 1, numResults: 3}, {autocut: 2, numResults: 6}, {autocut: -1, numResults: 9 /*disabled*/}, + } + for _, tt := range cases { + t.Run("autocut "+fmt.Sprint(tt.autocut), func(t *testing.T) { + results, err := c.GraphQL().Raw().WithQuery(fmt.Sprintf("{Get{%s(nearObject:{id:%q}, autocut: %d){_additional{vector}}}}", className, uuids[0], tt.autocut)).Do(ctx) + require.Nil(t, err) + result := results.Data["Get"].(map[string]interface{})[className].([]interface{}) + require.Len(t, result, tt.numResults) + }) + } + }) +} + +func TestHybridExplainScore(t *testing.T) { + ctx := context.Background() + c := client.New(client.Config{Scheme: "http", Host: "localhost:8080"}) + c.Schema().AllDeleter().Do(ctx) + className := "ParagraphWithManyWords" + + AddClassAndObjects(t, className, string(schema.DataTypeTextArray), c, "text2vec-contextionary") + creator := c.Data().Creator() + creator.WithClassName(className).WithProperties( + map[string]interface{}{"contents": []string{ + "specific", + "hybrid", + "search", + "object", + }, "num": 4}).Do(ctx) + defer c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + + t.Run("hybrid explainscore 1", func(t *testing.T) { + results, err := c.GraphQL().Raw().WithQuery(fmt.Sprintf("{Get{%s(hybrid:{query:\"rain nice\", fusionType: rankedFusion, alpha: 0.5, properties: [\"contents\"]}){num _additional { score explainScore id }}}}", className)).Do(ctx) + + require.Nil(t, err) + result := results.Data["Get"].(map[string]interface{})[className].([]interface{}) + require.Len(t, result, 5) + for _, r := range result { + score := r.(map[string]interface{})["_additional"].(map[string]interface{})["score"] + require.NotNil(t, score) + } + explainScore := result[0].(map[string]interface{})["_additional"].(map[string]interface{})["explainScore"].(string) + require.Contains(t, explainScore, "contributed 0.008333334 to the score") + require.Contains(t, explainScore, "contributed 0.008196721 to the score") + }) + t.Run("hybrid explainscore 2", func(t *testing.T) { + results, err := c.GraphQL().Raw().WithQuery(fmt.Sprintf("{Get{%s(hybrid:{query:\"rain snow sun score\",fusionType: rankedFusion, properties: [\"contents\"]}){num _additional { score explainScore }}}}", className)).Do(ctx) + require.Nil(t, err) + result := results.Data["Get"].(map[string]interface{})[className].([]interface{}) + require.Len(t, result, 5) + for _, r := range result { + score := r.(map[string]interface{})["_additional"].(map[string]interface{})["score"] + require.NotNil(t, score) + } + explainScore := result[0].(map[string]interface{})["_additional"].(map[string]interface{})["explainScore"].(string) + require.Contains(t, explainScore, "contributed 0.004166667 to the score") + require.Contains(t, explainScore, "contributed 0.0125 to the score") + }) + t.Run("hybrid explainscore relative score fusion", func(t *testing.T) { + results, err := c.GraphQL().Raw().WithQuery(fmt.Sprintf("{Get{%s(hybrid:{query:\"rain snow sun score\", fusionType: relativeScoreFusion, properties: [\"contents\"]}){num _additional { score explainScore }}}}", className)).Do(ctx) + require.Nil(t, err) + result := results.Data["Get"].(map[string]interface{})[className].([]interface{}) + require.Len(t, result, 5) + for _, r := range result { + score := r.(map[string]interface{})["_additional"].(map[string]interface{})["score"] + require.NotNil(t, score) + } + explainScore := result[0].(map[string]interface{})["_additional"].(map[string]interface{})["explainScore"].(string) + require.Contains(t, explainScore, "normalized score: 0.75") + require.Contains(t, explainScore, "normalized score: 0.25") + }) +} + +func TestNearTextAutocut(t *testing.T) { + ctx := context.Background() + c := client.New(client.Config{Scheme: "http", Host: "localhost:8080"}) + c.Schema().AllDeleter().Do(ctx) + className := "YellowAndBlueSub" + + class := &models.Class{ + Class: className, + Properties: []*models.Property{ + { + Name: "text", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWord, + }, + }, + Vectorizer: "text2vec-contextionary", + } + require.Nil(t, c.Schema().ClassCreator().WithClass(class).Do(ctx)) + defer c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + + creator := c.Data().Creator() + + texts := []string{"word", "another word", "another word and", "completely unrelated"} + for _, text := range texts { + _, err := creator.WithClassName(className).WithProperties(map[string]interface{}{"text": text}).Do(ctx) + require.Nil(t, err) + } + cases := []struct { + autocut int + numResults int + }{ + {autocut: 1, numResults: 3}, {autocut: -1, numResults: 4 /*disabled*/}, + } + for _, tt := range cases { + t.Run("autocut "+fmt.Sprint(tt.autocut), func(t *testing.T) { + results, err := c.GraphQL().Raw().WithQuery(fmt.Sprintf("{Get{%s(nearText:{concepts: \"word\"}, autocut: %d){_additional{vector}}}}", className, tt.autocut)).Do(ctx) + require.Nil(t, err) + result := results.Data["Get"].(map[string]interface{})[className].([]interface{}) + require.Len(t, result, tt.numResults) + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/vector_dimensions_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/vector_dimensions_test.go new file mode 100644 index 0000000000000000000000000000000000000000..b743d8f8762d8483bb2400147b7e764ee4882045 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/vector_dimensions_test.go @@ -0,0 +1,123 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package acceptance_with_go_client + +import ( + "context" + "fmt" + "math/rand" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + client "github.com/weaviate/weaviate-go-client/v5/weaviate" + "github.com/weaviate/weaviate-go-client/v5/weaviate/graphql" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" +) + +func TestEdgeVectorDimensions(t *testing.T) { + r := rand.New(rand.NewSource(time.Now().UnixNano())) + c := client.New(client.Config{Scheme: "http", Host: "localhost:8080"}) + ctx := context.Background() + + objID1 := "00000000-0000-0000-0000-000000000001" + objID2 := "00000000-0000-0000-0000-000000000002" + className := "VectorDimensions65k" + propName := "title" + + // 65535 is the max value for uint16 + maxUint16 := uint16(65535) + + for _, vectorLength := range []uint16{1, 50000, maxUint16} { + t.Run(fmt.Sprintf("%v vector dimensions", vectorLength), func(t *testing.T) { + class := &models.Class{ + Class: className, + Properties: []*models.Property{ + {Name: propName, DataType: []string{string(schema.DataTypeText)}, IndexInverted: &vTrue}, + }, + Vectorizer: "none", + } + + // delete class if exists and cleanup after test + c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + require.Nil(t, c.Schema().ClassCreator().WithClass(class).Do(ctx)) + defer c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + + t.Run("insert vectors", func(t *testing.T) { + generateVector := func(dims uint16) []float32 { + vector := make([]float32, dims) + for i := range vector { + vector[i] = r.Float32() + } + return vector + } + for i, objID := range []string{objID1, objID2} { + _, err := c.Data().Creator(). + WithClassName(className).WithID(objID). + WithProperties(map[string]interface{}{ + propName: fmt.Sprintf("title %v", i), + }). + WithVector(generateVector(vectorLength)). + Do(ctx) + require.Nil(t, err) + } + }) + + t.Run("check Aggregate", func(t *testing.T) { + getCount := func(t *testing.T, result *models.GraphQLResponse) int { + aggregate, ok := result.Data["Aggregate"].(map[string]interface{}) + require.True(t, ok) + require.NotNil(t, aggregate) + class, ok := aggregate[className].([]interface{}) + require.True(t, ok) + require.Len(t, class, 1) + title, ok := class[0].(map[string]interface{}) + require.True(t, ok) + require.NotNil(t, title) + count, ok := title["title"].(map[string]interface{}) + require.True(t, ok) + require.NotNil(t, count) + titleCount, ok := count["count"].(float64) + require.True(t, ok) + return int(titleCount) + } + result, err := c.GraphQL().Aggregate().WithClassName(className).WithFields(graphql.Field{ + Name: propName, Fields: []graphql.Field{ + {Name: "count"}, + }, + }).Do(ctx) + require.Nil(t, err) + require.Empty(t, result.Errors) + assert.Equal(t, 2, getCount(t, result)) + }) + + t.Run("check nearObject", func(t *testing.T) { + nearObject := c.GraphQL().NearObjectArgBuilder().WithID(objID1) + result, err := c.GraphQL().Get(). + WithClassName(className). + WithNearObject(nearObject). + WithFields(graphql.Field{Name: propName}). + Do(ctx) + require.Nil(t, err) + require.Empty(t, result.Errors) + get, ok := result.Data["Get"].(map[string]interface{}) + require.True(t, ok) + require.NotNil(t, get) + class, ok := get[className].([]interface{}) + require.True(t, ok) + require.Len(t, class, 2) + }) + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/where_filters_test.go b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/where_filters_test.go new file mode 100644 index 0000000000000000000000000000000000000000..e99342138fe03059d49610a341c1964b242e3e93 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_go_client/where_filters_test.go @@ -0,0 +1,56 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package acceptance_with_go_client + +import ( + "context" + "testing" + + "github.com/weaviate/weaviate-go-client/v5/weaviate/graphql" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate-go-client/v5/weaviate/filters" + + client "github.com/weaviate/weaviate-go-client/v5/weaviate" +) + +func TestCorrectErrorForIsNullFilter(t *testing.T) { + c := client.New(client.Config{Scheme: "http", Host: "localhost:8080"}) + ctx := context.Background() + + className := "RandomClass45357" + propName := "title" + class := &models.Class{ + Class: className, + Properties: []*models.Property{ + {Name: propName, DataType: []string{string(schema.DataTypeText)}, IndexInverted: &vTrue}, + }, + InvertedIndexConfig: &models.InvertedIndexConfig{IndexNullState: true, UsingBlockMaxWAND: config.DefaultUsingBlockMaxWAND}, + Vectorizer: "none", + } + + // delete class if exists and cleanup after test + c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + require.Nil(t, c.Schema().ClassCreator().WithClass(class).Do(ctx)) + defer c.Schema().ClassDeleter().WithClassName(className).Do(ctx) + + filter := filters.Where() + filter.WithOperator(filters.IsNull) + filter.WithValueString("asd") // wrong type + filter.WithPath([]string{propName}) + result, err := c.GraphQL().Get().WithClassName(className).WithWhere(filter).WithFields(graphql.Field{Name: propName}).Do(ctx) + require.Nil(t, err) + require.Contains(t, result.Errors[0].Message, "booleanValue") +} diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/__init__.py b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/conftest.py b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/conftest.py new file mode 100644 index 0000000000000000000000000000000000000000..0c1989f7873d4d4f186f86d01dce0c99b6ed2314 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/conftest.py @@ -0,0 +1,174 @@ +from typing import Any, Optional, List, Generator, Protocol, Type, Dict, Tuple, Union, Callable + +import pytest +from _pytest.fixtures import SubRequest + +import weaviate +from weaviate.collections import Collection +from weaviate.collections.classes.config import ( + Property, + _VectorizerConfigCreate, + _InvertedIndexConfigCreate, + _ReferencePropertyBase, + _GenerativeProvider, + _ReplicationConfigCreate, + _MultiTenancyConfigCreate, + _VectorIndexConfigCreate, + _RerankerProvider, +) +from weaviate.collections.classes.types import Properties +from weaviate.config import AdditionalConfig + +from weaviate.collections.classes.config_named_vectors import _NamedVectorConfigCreate +import weaviate.classes as wvc + + +class CollectionFactory(Protocol): + """Typing for fixture.""" + + def __call__( + self, + name: str = "", + properties: Optional[List[Property]] = None, + references: Optional[List[_ReferencePropertyBase]] = None, + vectorizer_config: Optional[ + Union[_VectorizerConfigCreate, List[_NamedVectorConfigCreate]] + ] = None, + inverted_index_config: Optional[_InvertedIndexConfigCreate] = None, + multi_tenancy_config: Optional[_MultiTenancyConfigCreate] = None, + generative_config: Optional[_GenerativeProvider] = None, + headers: Optional[Dict[str, str]] = None, + ports: Tuple[int, int] = (8080, 50051), + data_model_properties: Optional[Type[Properties]] = None, + data_model_refs: Optional[Type[Properties]] = None, + replication_config: Optional[_ReplicationConfigCreate] = None, + vector_index_config: Optional[_VectorIndexConfigCreate] = None, + description: Optional[str] = None, + reranker_config: Optional[_RerankerProvider] = None, + ) -> Collection[Any, Any]: + """Typing for fixture.""" + ... + + +@pytest.fixture +def weaviate_client() -> Callable[[int, int], weaviate.WeaviateClient]: + def connect(http_port: int = 8080, grpc_port: int = 50051) -> weaviate.WeaviateClient: + return weaviate.connect_to_local( + port=http_port, + grpc_port=grpc_port, + additional_config=AdditionalConfig(timeout=(60, 120)), # for image tests + ) + + return connect + + +@pytest.fixture +def collection_factory(request: SubRequest) -> Generator[CollectionFactory, None, None]: + name_fixture: Optional[str] = None + client_fixture: Optional[weaviate.WeaviateClient] = None + + def _factory( + name: str = "", + properties: Optional[List[Property]] = None, + references: Optional[List[_ReferencePropertyBase]] = None, + vectorizer_config: Optional[ + Union[_VectorizerConfigCreate, List[_NamedVectorConfigCreate]] + ] = None, + inverted_index_config: Optional[_InvertedIndexConfigCreate] = None, + multi_tenancy_config: Optional[_MultiTenancyConfigCreate] = None, + generative_config: Optional[_GenerativeProvider] = None, + headers: Optional[Dict[str, str]] = None, + ports: Tuple[int, int] = (8080, 50051), + data_model_properties: Optional[Type[Properties]] = None, + data_model_refs: Optional[Type[Properties]] = None, + replication_config: Optional[_ReplicationConfigCreate] = None, + vector_index_config: Optional[_VectorIndexConfigCreate] = None, + description: Optional[str] = None, + reranker_config: Optional[_RerankerProvider] = None, + ) -> Collection[Any, Any]: + nonlocal client_fixture, name_fixture + name_fixture = _sanitize_collection_name(request.node.name) + name + client_fixture = weaviate.connect_to_local( + headers=headers, + grpc_port=ports[1], + port=ports[0], + additional_config=AdditionalConfig(timeout=(60, 120)), # for image tests + ) + client_fixture.collections.delete(name_fixture) + + collection: Collection[Any, Any] = client_fixture.collections.create( + name=name_fixture, + description=description, + vectorizer_config=vectorizer_config, + properties=properties, + references=references, + inverted_index_config=inverted_index_config, + multi_tenancy_config=multi_tenancy_config, + generative_config=generative_config, + data_model_properties=data_model_properties, + data_model_references=data_model_refs, + replication_config=replication_config, + vector_index_config=vector_index_config, + reranker_config=reranker_config, + ) + return collection + + try: + yield _factory + finally: + if client_fixture is not None and name_fixture is not None: + client_fixture.collections.delete(name_fixture) + client_fixture.close() + + +class NamedCollection(Protocol): + """Typing for fixture.""" + + def __call__(self, name: str = "", props: Optional[List[str]] = None) -> Collection: + """Typing for fixture.""" + ... + + +@pytest.fixture +def named_collection( + collection_factory: CollectionFactory, +) -> Generator[NamedCollection, None, None]: + def _factory(name: str = "", props: Optional[List[str]] = None) -> Collection: + if props is None: + props = ["title1", "title2", "title3"] + + properties = [ + Property(name=prop, data_type=wvc.config.DataType.TEXT, vectorize_property_name=False) + for prop in props + ] + named_vectors = [ + wvc.config.Configure.NamedVectors.text2vec_contextionary( + name=prop.name, + source_properties=[prop.name], + vectorize_collection_name=False, + ) + for prop in properties + ] + + collection = collection_factory( + name, + properties=properties, + vectorizer_config=named_vectors, + ) + + return collection + + yield _factory + + +def _sanitize_collection_name(name: str) -> str: + name = ( + name.replace("[", "") + .replace("]", "") + .replace("-", "") + .replace(" ", "") + .replace(".", "") + .replace("{", "") + .replace("}", "") + ) + return name[0].upper() + name[1:] diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/requirements.txt b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..10e72ae07d759e99e368dbc4415862428896864c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/requirements.txt @@ -0,0 +1,10 @@ +git+https://github.com/weaviate/weaviate-python-client.git@2a0d43514ba757c515eb2fd7130b9de525d17aed + +pytest>=8.0.1,<9.0.0 +pytest-xdist==3.6.1 +pytest-benchmark==4.0.0 + +black>=24.2.0,<25.0.0 +loguru>=0.7.2 + +numpy==2.1.2 diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/run.sh b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/run.sh new file mode 100644 index 0000000000000000000000000000000000000000..f96480f5c0be60a144eca9cb0cb6a9a46b64657b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/run.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash + +set -euo pipefail + +SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" + +# Check if Python is installed +if ! command -v python3 &>/dev/null; then + echo "Python is not installed. Please install Python and try again." + exit 1 +fi + +# Check if a virtual environment (venv) exists +if [ ! -d "venv" ]; then + echo "Creating a new virtual environment (venv)..." + python3 -m venv .venv +fi + +# Activate the virtual environment +source .venv/bin/activate + +cd "$SCRIPT_DIR" || return + +pip install --upgrade pip --quiet +pip install -r requirements.txt --quiet + +# run python tests in parallel +pytest -n auto --dist loadgroup . diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_auto_schema_ec.py b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_auto_schema_ec.py new file mode 100644 index 0000000000000000000000000000000000000000..ee7a65a91d7cd2922d402d54338c716e374d8525 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_auto_schema_ec.py @@ -0,0 +1,50 @@ +import random +import math +from loguru import logger +import weaviate +import weaviate.classes as wvc +import pytest + + +# The idea is that we append this number to collection or prop names. It is +# meant to return the same number a lot of tiems, but not always. This way, +# when it returns a number it has returned before, we're likely to hit an +# existing col or prop, but occasionally we'll get something new. +def random_number_with_frequent_collisions(): + return math.floor(math.log(random.randint(1, 1000000), 2)) + + +@pytest.mark.skip( + reason="currently python integration test run on a single node cluster, this test needs a dedicated 3 node cluster" +) +def test_auto_schema_explicit_property_update_ec(weaviate_client) -> None: + clients: [weaviate.WeaviateClient] = [weaviate_client(8080 + 1, 50051 + 1) for i in range(3)] + client = random.choice(clients) + client.collections.delete_all() + logger.info("cleanup completed") + + for col_iter in range(25): + client = random.choice(clients) + col_name = f"ExplicitCollection{col_iter}" + logger.info(f"start collection {col_name}") + client.collections.create( + col_name, + vectorizer_config=wvc.config.Configure.Vectorizer.none(), + sharding_config=wvc.config.Configure.sharding(desired_count=random.randint(1, 3)), + replication_config=wvc.config.Configure.replication(factor=random.randint(1, 3)), + ) + for i in range(50): + if i % 10 == 0: + logger.info(f"start iteration {i}") + + client: weaviate.WeaviateClient = random.choice(clients) + with client.batch.fixed_size(10) as b: + for j in range(10): + text_prop = f"text_{random_number_with_frequent_collisions()}" + number_prop = f"number_{random_number_with_frequent_collisions()}" + obj = { + text_prop: text_prop, + number_prop: random.randint(0, 10000), + } + b.add_object(col_name, obj) + assert len(client.batch.failed_objects) == 0 diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_batch.py b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_batch.py new file mode 100644 index 0000000000000000000000000000000000000000..ba3061e4673b1df16e1eebccb845e7058e96a922 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_batch.py @@ -0,0 +1,30 @@ +import weaviate +import weaviate.classes as wvc + +from .conftest import CollectionFactory + + +def test_batch_update_empty_list(collection_factory: CollectionFactory) -> None: + collection = collection_factory( + properties=[ + wvc.config.Property(name="array", data_type=wvc.config.DataType.TEXT_ARRAY), + ], + vectorizer_config=wvc.config.Configure.Vectorizer.text2vec_contextionary( + vectorize_collection_name=False + ), + ) + + uuid1 = collection.data.insert({"array": []}) + collection.data.insert_many( + [wvc.data.DataObject(properties={"array": ["one", "two"]}, uuid=uuid1)] + ) + + +def test_batch_without_properties(collection_factory: CollectionFactory) -> None: + collection = collection_factory(vectorizer_config=wvc.config.Configure.Vectorizer.none()) + + ret = collection.data.insert_many([wvc.data.DataObject(vector=[0.0, 1.0])]) + assert not ret.has_errors + + res = collection.query.fetch_objects() + assert len(res.objects) == 1 diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_collection.py b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_collection.py new file mode 100644 index 0000000000000000000000000000000000000000..3d5398c0f75c8c4dbfa1045e56743961e4206746 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_collection.py @@ -0,0 +1,35 @@ +import weaviate +import weaviate.classes as wvc + +def test_collection_casing() -> None: + with weaviate.connect_to_local() as client: + # Goal: Collection create/get/delete should automatically transform class name to GQL (first letter caps). + + # create collection with all "lower" case -> GQL ("Testcollectioncase") + assert client.collections.create_from_dict({ + "class": "testcollectioncase", + "vectorizer": "none", + } + ) is not None + + # GET should also tranform to GQL + assert client.collections.get("testcollectioncase") is not None + + # DELETE should also transform to GQL + client.collections.delete("testcollectioncase") + assert client.collections.exists("testcollectioncase") is False + + # same with mult-word with "_" + assert client.collections.create_from_dict({ + "class": "test_collection_case", + "vectorizer": "none", + } + ) is not None + + + # GET should also tranform to GQL + assert client.collections.get("test_collection_case") is not None + + # DELETE should also transform to GQL + client.collections.delete("test_collection_case") + assert client.collections.exists("testcollectioncase") is False diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_errors.py b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_errors.py new file mode 100644 index 0000000000000000000000000000000000000000..0701791d3cfaf9831951ef0f6abf227832b5e42a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_errors.py @@ -0,0 +1,18 @@ +import pytest +from weaviate.exceptions import WeaviateQueryError + +from .conftest import CollectionFactory +import weaviate.classes as wvc + + +def test_error_no_module_for_vectorizer(collection_factory: CollectionFactory) -> None: + collection = collection_factory( + properties=[wvc.config.Property(name="title", data_type=wvc.config.DataType.TEXT)], + vectorizer_config=[wvc.config.Configure.NamedVectors.none(name="custom")], + ) + + collection.data.insert({"title": "Hello"}) + + with pytest.raises(WeaviateQueryError) as exc: + collection.query.near_text("hello") + assert "could not vectorize input" in str(exc.value) diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_filter.py b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_filter.py new file mode 100644 index 0000000000000000000000000000000000000000..179db097087756e7e7d0647ffb33bfa517e672b2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_filter.py @@ -0,0 +1,51 @@ +from typing import List + +import pytest +from weaviate.collections.classes.config import Configure, Property, DataType +from weaviate.collections.classes.filters import Filter, _FilterValue + +from .conftest import CollectionFactory + + +# bug in client + not supported in weaviate for filter by empty list +@pytest.mark.parametrize( + "weaviate_filter,results,skip", + [ + ( + Filter.by_property("textArray").equal([]), + [1], + True, + ), + (Filter.by_property("textArray", length=True).equal(0), [1], False), + ( + Filter.by_property("textArray").not_equal([]), + [0], + True, + ), + (Filter.by_property("textArray", length=True).not_equal(0), [0], False), + ], +) +def test_empty_list_filter( + collection_factory: CollectionFactory, + weaviate_filter: _FilterValue, + results: List[int], + skip: bool, +) -> None: + if skip: + pytest.skip("Not supported in this version") + collection = collection_factory( + vectorizer_config=Configure.Vectorizer.none(), + properties=[Property(name="textArray", data_type=DataType.TEXT_ARRAY)], + inverted_index_config=Configure.inverted_index(index_property_length=True), + ) + + uuids = [ + collection.data.insert({"textArray": ["one", "two"]}), + collection.data.insert({"textArray": []}), + ] + + objects = collection.query.fetch_objects(filters=weaviate_filter).objects + assert len(objects) == len(results) + + uuids = [uuids[result] for result in results] + assert all(obj.uuid in uuids for obj in objects) diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_generative.py b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_generative.py new file mode 100644 index 0000000000000000000000000000000000000000..cc25840151e0e9bfe541d69a20da5b40db3263df --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_generative.py @@ -0,0 +1,90 @@ +from typing import List + +import pytest +from _pytest.fixtures import SubRequest +from weaviate.collections.classes.config import Configure, Property, DataType + +from .conftest import CollectionFactory + + +# the dummy generative module is not supported in the python client => create collection from dict +@pytest.mark.parametrize( + "single,grouped,grouped_properties", + [ + ("show me {prop}", None, None), + (None, "combine these", ["prop"]), + (None, "combine these", None), + ("show me {prop}", "combine these", ["prop"]), + ], +) +def test_generative( + collection_factory: CollectionFactory, + single: str, + grouped: str, + grouped_properties: List[str], +) -> None: + collection = collection_factory( + vectorizer_config=Configure.Vectorizer.none(), + generative_config=Configure.Generative.custom("generative-dummy"), + properties=[ + Property(name="prop", data_type=DataType.TEXT), + Property(name="prop2", data_type=DataType.TEXT), + ], + ) + + collection.data.insert({"prop": "hello", "prop2": "banana"}, vector=[1, 0]) + collection.data.insert({"prop": "world", "prop2": "banana"}, vector=[1, 0]) + + ret = collection.generate.near_vector( + [1, 0], + single_prompt=single, + grouped_task=grouped, + grouped_properties=grouped_properties, + return_properties=[], + ) + assert len(ret.objects) == 2 + + if single is not None: + for obj in ret.objects: + assert "show me" in obj.generated + assert "hello" in obj.generated or "world" in obj.generated + assert ( + "You want me to generate something based on the following prompt" in obj.generated + ) + if grouped is not None: + assert "hello" in ret.generated + assert "world" in ret.generated + if grouped_properties is None and grouped is not None: + assert "banana" in ret.generated + + +def test_generative_array(collection_factory: CollectionFactory) -> None: + collection = collection_factory( + vectorizer_config=Configure.Vectorizer.none(), + generative_config=Configure.Generative.custom("generative-dummy"), + properties=[Property(name="array", data_type=DataType.TEXT_ARRAY)], + ) + + collection.data.insert({"array": ["hello", "apple"]}, vector=[1, 0]) + collection.data.insert({"array": ["world", "wide"]}, vector=[1, 0]) + + ret = collection.generate.near_vector( + [1, 0], + single_prompt="show me {array}", + grouped_task="combine these", + grouped_properties=["array"], + return_properties=[], + ) + assert len(ret.objects) == 2 + + for obj in ret.objects: + assert "show me" in obj.generated + assert ("hello" in obj.generated and "apple" in obj.generated) or ( + "world" in obj.generated and "wide" in obj.generated + ) + assert "You want me to generate something based on the following prompt" in obj.generated + + assert "hello" in ret.generated + assert "world" in ret.generated + assert "apple" in ret.generated + assert "wide" in ret.generated diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_groupby.py b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_groupby.py new file mode 100644 index 0000000000000000000000000000000000000000..9eb2c6209fd2d86e303d7df4073cb952eb3c5ffa --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_groupby.py @@ -0,0 +1,79 @@ +from typing import Optional + +import pytest +import weaviate +import weaviate.classes as wvc +from .conftest import CollectionFactory + + +@pytest.mark.parametrize("return_refs", [None, [wvc.query.QueryReference(link_on="ref")]]) +def test_groupby_with_refs( + collection_factory: CollectionFactory, return_refs: Optional[wvc.query.QueryReference] +) -> None: + col = collection_factory( + properties=[wvc.config.Property(name="text", data_type=wvc.config.DataType.TEXT)], + vectorizer_config=wvc.config.Configure.Vectorizer.none(), + ) + col.config.add_reference(wvc.config.ReferenceProperty(name="ref", target_collection=col.name)) + + uuids = [ + str(uid) + for uid in col.data.insert_many( + [ + wvc.data.DataObject(properties={"text": "a1"}, vector=[1, 0, 0]), + wvc.data.DataObject(properties={"text": "a2"}, vector=[0, 1, 0]), + wvc.data.DataObject(properties={"text": "a2"}, vector=[0, 0, 1]), + ] + ).uuids.values() + ] + + for uid in uuids: + col.data.reference_add_many( + [wvc.data.DataReference(from_property="ref", from_uuid=uid, to_uuid=uid)] + ) + + return_props = None + if return_refs is None: + return_props = ["text"] + + res = col.query.near_object( + uuids[0], + group_by=wvc.query.GroupBy(prop="ref", objects_per_group=2, number_of_groups=3), + return_properties=return_props, + return_references=return_refs, + ) + assert len(res.groups) == 3 + for _, grp in res.groups.items(): + for obj in grp.objects: + if return_refs is not None: + assert obj.references is not None + else: + assert len(obj.properties.get("text")) == 2 + + # repeat with GQL - slightly different code path in + client = weaviate.connect_to_local() + ref = "_additional{id distance}" + if return_refs is None: + ref = f"text {ref}" + if return_refs is not None: + ref = "ref{... on " + col.name + "{_additional{id}}} _additional{id distance}" + hits = "hits{" + ref + "}" + group = f"group{{ id groupedBy {{ value path }} count maxDistance minDistance {hits} }}" + _additional = f"_additional{{ {group} }}" + res = client.graphql_raw_query( + f"""{{ + Get {{ + {col.name}(nearObject: {{id: "{uuids[0]}"}} groupBy:{{path: [\"ref\"] groups: 3 objectsPerGroup: 10}}) {{ + {_additional} + }} + }} + }}""" + ) + + assert res.errors is None + for group in res.get[col.name]: + assert len(group["_additional"]["group"]["hits"]) == 1 + if return_refs is not None: + assert group["_additional"]["group"]["hits"][0]["ref"] is not None + else: + assert len(group["_additional"]["group"]["hits"][0]["text"]) == 2 diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_grpc_search.py b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_grpc_search.py new file mode 100644 index 0000000000000000000000000000000000000000..2ffcd7fc54328f531e47b4c2cd40a53184e2d778 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_grpc_search.py @@ -0,0 +1,166 @@ +import uuid +from typing import List, Optional, Union + +import pytest +from weaviate.classes.config import Configure, DataType, Property +from weaviate.classes.data import DataObject +from weaviate.classes.query import HybridVector, MetadataQuery, Move +from weaviate.collections.classes.grpc import PROPERTIES +from weaviate.types import UUID + +from .conftest import CollectionFactory + +UUID1 = "00000000-0000-0000-0000-000000000001" + +def test_fetch_objects_search(collection_factory: CollectionFactory) -> None: + collection = collection_factory( + properties=[Property(name="Name", data_type=DataType.TEXT)], + vectorizer_config=Configure.Vectorizer.none(), + ) + for i in range(5): + collection.data.insert({"Name": str(i)}) + + assert len(collection.query.fetch_objects().objects) == 5 + + +def test_near_object_search(collection_factory: CollectionFactory) -> None: + collection = collection_factory( + properties=[Property(name="Name", data_type=DataType.TEXT)], + vectorizer_config=Configure.Vectorizer.text2vec_contextionary( + vectorize_collection_name=False + ), + ) + uuid_banana = collection.data.insert({"Name": "Banana"}) + collection.data.insert({"Name": "Fruit"}) + collection.data.insert({"Name": "car"}) + collection.data.insert({"Name": "Mountain"}) + + full_objects = collection.query.near_object( + uuid_banana, return_metadata=MetadataQuery(distance=True, certainty=True) + ).objects + assert len(full_objects) == 4 + + objects_distance = collection.query.near_object( + uuid_banana, distance=full_objects[2].metadata.distance + ).objects + assert len(objects_distance) == 3 + + objects_certainty = collection.query.near_object( + uuid_banana, certainty=full_objects[2].metadata.certainty + ).objects + assert len(objects_certainty) == 3 + + +def test_near_vector_search(collection_factory: CollectionFactory) -> None: + collection = collection_factory( + properties=[Property(name="Name", data_type=DataType.TEXT)], + vectorizer_config=Configure.Vectorizer.text2vec_contextionary( + vectorize_collection_name=False + ), + ) + uuid_banana = collection.data.insert({"Name": "Banana"}) + collection.data.insert({"Name": "Fruit"}) + collection.data.insert({"Name": "car"}) + collection.data.insert({"Name": "Mountain"}) + + banana = collection.query.fetch_object_by_id(uuid_banana, include_vector=True) + + full_objects = collection.query.near_vector( + banana.vector["default"], return_metadata=MetadataQuery(distance=True, certainty=True) + ).objects + assert len(full_objects) == 4 + + objects_distance = collection.query.near_vector( + banana.vector["default"], distance=full_objects[2].metadata.distance + ).objects + assert len(objects_distance) == 3 + + objects_distance = collection.query.near_vector( + banana.vector["default"], certainty=full_objects[2].metadata.certainty + ).objects + assert len(objects_distance) == 3 + + +@pytest.mark.parametrize("query", ["cake", ["cake"]]) +@pytest.mark.parametrize("objects", [UUID1, str(UUID1), [UUID1], [str(UUID1)]]) +@pytest.mark.parametrize("concepts", ["hiking", ["hiking"]]) +@pytest.mark.parametrize( + "return_properties", [["value"], None] +) # Passing none here causes a server-side bug with <=1.22.2 +def test_near_text_search( + collection_factory: CollectionFactory, + query: Union[str, List[str]], + objects: Union[UUID, List[UUID]], + concepts: Union[str, List[str]], + return_properties: Optional[PROPERTIES], +) -> None: + collection = collection_factory( + vectorizer_config=Configure.Vectorizer.text2vec_contextionary( + vectorize_collection_name=False + ), + properties=[Property(name="value", data_type=DataType.TEXT)], + ) + + batch_return = collection.data.insert_many( + [ + DataObject(properties={"value": "Apple"}, uuid=UUID1), + DataObject(properties={"value": "Mountain climbing"}), + DataObject(properties={"value": "apple cake"}), + DataObject(properties={"value": "cake"}), + ] + ) + + objs = collection.query.near_text( + query=query, + move_to=Move(force=1.0, objects=objects), + move_away=Move(force=0.5, concepts=concepts), + include_vector=True, + return_properties=return_properties, + ).objects + + assert len(objs) == 4 + + assert objs[0].uuid == batch_return.uuids[2] + assert "default" in objs[0].vector + if return_properties is not None: + assert objs[0].properties["value"] == "apple cake" + +def test_hybrid_near_vector_search(collection_factory: CollectionFactory) -> None: + collection = collection_factory( + properties=[ + Property(name="text", data_type=DataType.TEXT), + ], + vectorizer_config=Configure.Vectorizer.text2vec_contextionary( + vectorize_collection_name=False + ), + ) + uuid_banana = collection.data.insert({"text": "banana"}) + obj = collection.query.fetch_object_by_id(uuid_banana, include_vector=True) + + collection.data.insert({"text": "dog"}) + collection.data.insert({"text": "different concept"}) + + hybrid_objs = collection.query.hybrid( + query=None, + vector=HybridVector.near_vector(vector=obj.vector["default"]), + ).objects + + assert hybrid_objs[0].uuid == uuid_banana + assert len(hybrid_objs) == 3 + + # make a near vector search to get the distance + near_vec = collection.query.near_vector( + near_vector=obj.vector["default"], return_metadata=["distance"] + ).objects + assert near_vec[0].metadata.distance is not None + + hybrid_objs2 = collection.query.hybrid( + query=None, + vector=HybridVector.near_vector( + vector=obj.vector["default"], distance=near_vec[0].metadata.distance + 0.001 + ), + return_metadata=MetadataQuery.full(), + ).objects + + assert hybrid_objs2[0].uuid == uuid_banana + assert len(hybrid_objs2) == 1 \ No newline at end of file diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_hybrid.py b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_hybrid.py new file mode 100644 index 0000000000000000000000000000000000000000..1e2151905a3aa8f969860e0afa15413aaa81613a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_hybrid.py @@ -0,0 +1,293 @@ +import uuid +from typing import List, Optional + +import pytest + +from weaviate.classes.config import Configure, Property, DataType +from weaviate.classes.query import TargetVectors +import weaviate.classes as wvc +from .conftest import CollectionFactory + +from weaviate.collections.classes.grpc import HybridVectorType + + +UUID1 = uuid.uuid4() +UUID2 = uuid.uuid4() +UUID3 = uuid.uuid4() +UUID4 = uuid.uuid4() + + +@pytest.mark.parametrize( + "query", ["banana", "car"] +) # does not matter if a result is found for bm25 +@pytest.mark.parametrize( + "vector,expected,distance", + [ + ([1, 0, 0], [UUID1, UUID2, UUID4], 1.5), + ({"first": [1, 0, 0], "second": [0, 1, 0]}, [UUID1], 0.5), + ([1, 0, 0], [UUID1, UUID2, UUID3, UUID4], 2.5), + ([0.5, 0.5, 0.5], [], 0.0001), + ], +) +def test_multi_target_near_vector( + collection_factory: CollectionFactory, + vector: List[int], + expected: List[uuid.UUID], + distance: float, + query: str, +) -> None: + collection = collection_factory( + properties=[Property(name="name", data_type=DataType.TEXT)], + vectorizer_config=[ + Configure.NamedVectors.none("first"), + Configure.NamedVectors.none("second"), + ], + ) + + collection.data.insert( + {"name": "banana one"}, vector={"first": [1, 0, 0], "second": [0, 1, 0]}, uuid=UUID1 + ) + collection.data.insert( + {"name": "banana two"}, vector={"first": [0, 1, 0], "second": [1, 0, 0]}, uuid=UUID2 + ) + collection.data.insert( + {"name": "banana three"}, vector={"first": [0, 1, 0], "second": [0, 0, 1]}, uuid=UUID3 + ) + collection.data.insert( + {"name": "banana four"}, vector={"first": [1, 0, 0], "second": [0, 0, 1]}, uuid=UUID4 + ) + + objs = collection.query.hybrid( + "banana", + vector=vector, + target_vector=TargetVectors.sum(["first", "second"]), + max_vector_distance=distance, + ).objects + assert sorted([obj.uuid for obj in objs]) == sorted(expected) # order is not guaranteed + + +@pytest.mark.parametrize( + "query", ["banana", "car"] +) # does not matter if a result is found for bm25 +@pytest.mark.parametrize( + "vector,expected,distance", + [ + ([1, 0, 0, 0], [UUID1], 0.5), + ([1, 0, 0, 0], [UUID1, UUID2, UUID3, UUID4], 2.5), + ([0.5, 0.5, 0.5, 0.5], [], 0.0001), + ([0.5, 0.5, 0.5, 0.5], [UUID1, UUID2, UUID3, UUID4], None), # everything is found + ], +) +def test_aggregate_max_vector_distance( + collection_factory: CollectionFactory, + vector: List[int], + expected: List[uuid.UUID], + distance: float, + query: str, +) -> None: + collection = collection_factory( + properties=[Property(name="name", data_type=DataType.TEXT)], + vectorizer_config=Configure.Vectorizer.none(), + ) + + collection.data.insert({"name": "banana one"}, vector=[1, 0, 0, 0], uuid=UUID1) + collection.data.insert({"name": "banana two"}, vector=[0, 1, 0, 0], uuid=UUID2) + collection.data.insert({"name": "banana three"}, vector=[0, 0, 1, 0], uuid=UUID3) + collection.data.insert({"name": "banana four"}, vector=[0, 0, 0, 1], uuid=UUID4) + + # get abd aggregate should match the same objects + objs = collection.query.hybrid("banana", vector=vector, max_vector_distance=distance).objects + assert sorted([obj.uuid for obj in objs]) == sorted(expected) # order is not guaranteed + + res = collection.aggregate.hybrid( + "banana", + vector=vector, + max_vector_distance=distance, + return_metrics=[wvc.aggregate.Metrics("name").text(count=True)], + ) + assert res.total_count == len(expected) + + +@pytest.mark.parametrize("query", ["apple", "banana", "beach", "mountain", "summer dress"]) +@pytest.mark.parametrize( + "distance", + [ + wvc.config.VectorDistances.DOT, + wvc.config.VectorDistances.COSINE, + wvc.config.VectorDistances.L2_SQUARED, + ], +) +@pytest.mark.parametrize("offset", [0, 2]) +def test_hybrid_search_vector_distance_more_objects( + collection_factory: CollectionFactory, + distance: wvc.config.VectorDistances, + query: str, + offset: Optional[int], +) -> None: + collection = collection_factory( + properties=[Property(name="name", data_type=DataType.TEXT)], + vectorizer_config=Configure.Vectorizer.text2vec_contextionary( + vectorize_collection_name=False + ), + vector_index_config=Configure.VectorIndex.hnsw(distance_metric=distance), + ) + + ret = collection.data.insert_many( + [ + {"name": entry} + for entry in [ + "mountain hike", + "banana apple", + "road trip", + "coconut smoothie", + "beach vacation", + "apple pie", + "banana split", + "mountain biking", + "apple cider", + "beach volleyball", + "sailing", + ] + ] + ) + assert ret.has_errors is False + + objs = collection.query.near_text( + query, return_metadata=wvc.query.MetadataQuery.full(), limit=100 + ).objects + middle_distance = objs[len(objs) // 2].metadata.distance + + # with the cutoff distance, the results should be the same for hybrid and near + objs_nt_cutoff = collection.query.near_text( + query, + distance=middle_distance, + return_metadata=wvc.query.MetadataQuery.full(), + limit=100, + offset=offset, + ).objects + objs_hy_cutoff = collection.query.hybrid( + query, + max_vector_distance=middle_distance, + alpha=1, + return_metadata=wvc.query.MetadataQuery.full(), + limit=100, + offset=offset, + ).objects + + assert len(objs_nt_cutoff) == len(objs_hy_cutoff) + assert all( + objs_nt_cutoff[i].uuid == objs_hy_cutoff[i].uuid for i, _ in enumerate(objs_nt_cutoff) + ) + + res = collection.aggregate.hybrid( + query, + max_vector_distance=middle_distance, + return_metrics=[wvc.aggregate.Metrics("name").text(count=True)], + ) + assert res.total_count == len(objs_nt_cutoff) + offset + + +def test_hybrid_search_with_bm25_only_objects( + collection_factory: CollectionFactory, +) -> None: + collection = collection_factory( + properties=[Property(name="name", data_type=DataType.TEXT)], + vectorizer_config=Configure.Vectorizer.none(), + ) + + collection.data.insert({"name": "banana"}, vector=[1, 0, 0, 0], uuid=UUID1) + collection.data.insert({"name": "apple"}, uuid=UUID2) # not in vector search results + + # both objects are found without limit as second object is found via BM25 search + objs = collection.query.hybrid("apple", vector=[1, 0, 0, 0]).objects + assert len(objs) == 2 + res = collection.aggregate.hybrid( + "apple", + vector=[1, 0, 0, 0], + object_limit=50, + return_metrics=[wvc.aggregate.Metrics("name").text(count=True)], + ) + assert res.total_count == 2 + + # only first object with vector is found with a max vector distance + objs = collection.query.hybrid("apple", vector=[1, 0, 0, 0], max_vector_distance=0.5).objects + assert len(objs) == 1 + assert objs[0].uuid == UUID1 + + res = collection.aggregate.hybrid( + "apple", + vector=[1, 0, 0, 0], + object_limit=50, + max_vector_distance=0.5, + return_metrics=[wvc.aggregate.Metrics("name").text(count=True)], + ) + assert res.total_count == 1 + + # no results found + objs = collection.query.hybrid("apple", vector=[0, 1, 0, 0], max_vector_distance=0.5).objects + assert len(objs) == 0 + + res = collection.aggregate.hybrid( + "apple", + vector=[0, 1, 0, 0], + object_limit=50, + max_vector_distance=0.5, + return_metrics=[wvc.aggregate.Metrics("name").text(count=True)], + ) + assert res.total_count == 0 + + +@pytest.mark.parametrize("vector", [None, wvc.query.HybridVector.near_text("summer dress")]) +def test_hybrid_with_offset( + collection_factory: CollectionFactory, vector: Optional[HybridVectorType] +) -> None: + collection = collection_factory( + properties=[Property(name="name", data_type=DataType.TEXT)], + vectorizer_config=Configure.Vectorizer.text2vec_contextionary( + vectorize_collection_name=False + ), + ) + + ret = collection.data.insert_many( + [ + {"name": entry} + for entry in [ + "mountain hike", + "banana apple", + "road trip", + "coconut smoothie", + "beach vacation", + "apple pie", + "banana split", + "mountain biking", + "apple cider", + "beach volleyball", + "sailing", + ] + ] + ) + assert ret.has_errors is False + + hy = collection.query.hybrid("summer dress") + assert len(hy.objects) > 0 + + hy_offset = collection.query.hybrid("summer dress", offset=2, vector=vector) + assert len(hy_offset.objects) + 2 == len(hy.objects) + + +def test_flipping(collection_factory: CollectionFactory): + collection = collection_factory( + properties=[Property(name="name", data_type=DataType.TEXT)], + vectorizer_config=Configure.Vectorizer.none(), + ) + + collection.data.insert({"name": "banana fruit"}, vector=[1, 0, 0], uuid=UUID1) + collection.data.insert({"name": "apple fruit first"}, vector=[1, 0, 0], uuid=UUID2) + collection.data.insert({"name": "apple fruit second"}, vector=[1, 0, 0], uuid=UUID3) + + hy = collection.query.hybrid("fruit", vector=[1, 0, 0]).objects + + # repeat search to make sure order is always the same + for i in range(10): + hy2 = collection.query.hybrid("fruit", vector=[1, 0, 0]).objects + assert all(hy[i].uuid == hy2[i].uuid for i in range(len(hy))) diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_limits.py b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_limits.py new file mode 100644 index 0000000000000000000000000000000000000000..1306644d73712e24c741698dc4f927b10eddfa7a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_limits.py @@ -0,0 +1,16 @@ +import weaviate.classes as wvc + +from .conftest import CollectionFactory + + +def test_requesting_more_than_the_default_limit(collection_factory: CollectionFactory) -> None: + collection = collection_factory( + vectorizer_config=wvc.config.Configure.Vectorizer.none(), + ) + + # This bug is somewhat hard to trigger and needs two things: + # 1. A higher than default QUERY_MAXIMUM_RESULTS in the weaviate config + # 2. Adding more objects than the default limit to the collection + # 3. Querying, so the limit is lower than the limit, but that limit+offset are higher than the default limit + collection.data.insert_many([{} for _ in range(10001)]) + collection.query.fetch_objects(limit=9999, offset=2) diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_multi_target_search.py b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_multi_target_search.py new file mode 100644 index 0000000000000000000000000000000000000000..b15e6bd4bab675651eb1e41e124f6b7825c3c860 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_multi_target_search.py @@ -0,0 +1,126 @@ +import math +from typing import List + +import pytest + +from weaviate.classes.config import Configure +from weaviate.classes.query import TargetVectors +from weaviate.collections.classes.grpc import TargetVectorJoinType +import weaviate.classes as wvc + +from .conftest import CollectionFactory + +UUID1 = "00000000-0000-0000-0000-000000000001" + + +@pytest.mark.parametrize( + "target_vector", + [ + ["first", "second"], + TargetVectors.sum(["first", "second"]), + TargetVectors.minimum(["first", "second"]), + TargetVectors.average(["first", "second"]), + TargetVectors.manual_weights({"first": 1.2, "second": 0.7}), + TargetVectors.relative_score({"first": 1.2, "second": 0.7}), + ], +) +def test_multi_target_near_vector( + collection_factory: CollectionFactory, target_vector: TargetVectorJoinType +) -> None: + collection = collection_factory( + properties=[], + vectorizer_config=[ + Configure.NamedVectors.none("first"), + Configure.NamedVectors.none("second"), + ], + ) + + uuid1 = collection.data.insert({}, vector={"first": [1, 0, 0], "second": [0, 1, 0]}) + uuid2 = collection.data.insert({}, vector={"first": [0, 1, 0], "second": [1, 0, 0]}) + + objs = collection.query.near_vector([1.0, 0.0, 0.0], target_vector=target_vector).objects + assert sorted([obj.uuid for obj in objs]) == sorted([uuid1, uuid2]) # order is not guaranteed + + +@pytest.mark.parametrize( + "target_vector", + [ + ["first", "second"], + TargetVectors.sum(["first", "second"]), + TargetVectors.minimum(["first", "second"]), + TargetVectors.average(["first", "second"]), + TargetVectors.manual_weights({"first": 1.2, "second": 0.7}), + TargetVectors.relative_score({"first": 1.2, "second": 0.7}), + ], +) +def test_multi_target_near_object( + collection_factory: CollectionFactory, target_vector: TargetVectorJoinType +) -> None: + collection = collection_factory( + properties=[], + vectorizer_config=[ + Configure.NamedVectors.none("first"), + Configure.NamedVectors.none("second"), + ], + ) + + uuid1 = collection.data.insert({}, vector={"first": [1, 0], "second": [0, 1, 0]}) + uuid2 = collection.data.insert({}, vector={"first": [0, 1], "second": [1, 0, 0]}) + + objs = collection.query.near_object(uuid1, target_vector=target_vector).objects + assert sorted([obj.uuid for obj in objs]) == sorted([uuid1, uuid2]) # order is not guaranteed + + +@pytest.mark.parametrize( + "target_vector,distances", + [ + (["first", "first", "second", "second", "third"], (0, 0)), + (TargetVectors.sum(["first", "second", "third"]), [3, 4]), + (TargetVectors.minimum(["first", "second", "third"]), [0, 0]), + (TargetVectors.average(["first", "second", "third"]), [0.6, 0.8]), + ( + TargetVectors.manual_weights({"first": [3, 2], "second": [1.5, 1], "third": 0.5}), + [3, 6.5], + ), + ( + # same as above because the scores are already between 0 and 1 =>normalization does not change anything + TargetVectors.relative_score({"first": [3, 2], "second": [1.5, 1], "third": 0.5}), + [3, 6.5], + ), + ], +) +def test_multi_target_near_vector_multiple_inputs( + collection_factory: CollectionFactory, + target_vector: TargetVectorJoinType, + distances: List[float], +) -> None: + collection = collection_factory( + properties=[], + vectorizer_config=[ + Configure.NamedVectors.none("first"), + Configure.NamedVectors.none("second"), + Configure.NamedVectors.none("third"), + ], + ) + + uuid1 = collection.data.insert( + {}, vector={"first": [1, 0], "second": [0, 1, 0], "third": [0, 0, 0, 1]} + ) + uuid2 = collection.data.insert( + {}, vector={"first": [0, 1], "second": [1, 0, 0], "third": [1, 0, 0, 0]} + ) + + objs = collection.query.near_vector( + {"first": [[1, 0], [1, 0]], "second": [[1, 0, 0], [0, 0, 1]], "third": [0, 1, 0, 0]}, + target_vector=target_vector, + return_metadata=wvc.query.MetadataQuery.full(), + ).objects + if distances[0] == distances[1]: + assert sorted(obj.uuid for obj in objs) == sorted([uuid1, uuid2]) + else: + assert [obj.uuid for obj in objs] == [uuid1, uuid2] + + obj1 = [obj for obj in objs if obj.uuid == uuid1][0] + assert math.isclose(obj1.metadata.distance, distances[0], rel_tol=1e-5) + obj2 = [obj for obj in objs if obj.uuid == uuid2][0] + assert math.isclose(obj2.metadata.distance, distances[1], rel_tol=1e-5) diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_multi_target_search_gql.py b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_multi_target_search_gql.py new file mode 100644 index 0000000000000000000000000000000000000000..843263d8e3b586927e8b9301cb9932226b581b26 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_multi_target_search_gql.py @@ -0,0 +1,226 @@ +import weaviate +import weaviate.classes as wvc +import math + + +from .conftest import CollectionFactory, NamedCollection + +GQL_RETURNS = "{_additional {distance id score}" +GQL_TARGETS = 'targets: {targetVectors: ["title1", "title2", "title3"], combinationMethod: sum}' +CAR_DISTANCE = 0.7892138957977295 +APPLE_DISTANCE = 0.5168729424476624 +KALE_DISTANCE = 0.5732871294021606 + + +def test_gql_near_text(named_collection: NamedCollection): + collection = named_collection() + collection.data.insert(properties={"title1": "apple", "title2": "car", "title3": "kale"}) + + # use collection for auto cleanup etc, but we need the client to use gql directly + client = weaviate.connect_to_local() + gql = client.graphql_raw_query( + """{ + Get { + """ + + collection.name + + """( + nearText: { + concepts: ["fruit"] + """ + + GQL_TARGETS + + """ + } + ) """ + + GQL_RETURNS + + """ + } + } + }""" + ) + + assert math.isclose( + gql.get[collection.name][0]["_additional"]["distance"], + CAR_DISTANCE + APPLE_DISTANCE + KALE_DISTANCE, + rel_tol=1e-5, + ) + + +def test_gql_near_vector(named_collection: NamedCollection): + collection = named_collection() + collection.data.insert( + properties={"title1": "first"}, + vector={ + "title1": [1, 0, 0], + "title2": [0, 0, 1], + "title3": [1, 0, 0], + }, + ) + + # use collection for auto cleanup etc, but we need the client to use gql directly + client = weaviate.connect_to_local() + gql = client.graphql_raw_query( + """{ + Get { + """ + + collection.name + + """( + nearVector: { + vector: [0, 0, 1] + """ + + GQL_TARGETS + + """ + } + ) """ + + GQL_RETURNS + + """ + } + } + }""" + ) + + assert gql.get[collection.name][0]["_additional"]["distance"] == 2 + + +def test_gql_near_object(named_collection: NamedCollection): + collection = named_collection() + uuid1 = collection.data.insert( + properties={"title1": "first"}, + vector={ + "title1": [1, 0, 0], + "title2": [0, 0, 1], + "title3": [1, 0, 0], + }, + ) + uuid2 = collection.data.insert( + properties={"title1": "second"}, + vector={ + "title1": [1, 0, 0], + "title2": [0, 0, 1], + "title3": [0, 1, 0], + }, + ) + + uuid_str = '"' + str(uuid1) + '"' + # use collection for auto cleanup etc, but we need the client to use gql directly + with weaviate.connect_to_local() as client: + gql = client.graphql_raw_query( + """{ + Get { + """ + + collection.name + + """( + nearObject: { + id: """ + + uuid_str + + """ + """ + + GQL_TARGETS + + """ + } + ) """ + + GQL_RETURNS + + """ + } + } + }""" + ) + + assert gql.get[collection.name][0]["_additional"]["distance"] == 0 + assert gql.get[collection.name][1]["_additional"]["distance"] == 1 + + +def test_test_multi_target_near_vector_gql(collection_factory: CollectionFactory): + collection = collection_factory( + vectorizer_config=[ + wvc.config.Configure.NamedVectors.none( + name=entry, + ) + for entry in ["title1", "title2", "title3"] + ] + ) + + collection.data.insert( + properties={}, vector={"title1": [1, 0], "title2": [0, 0, 1], "title3": [0, 0, 0, 1]} + ) + uuid2 = collection.data.insert( + properties={}, vector={"title1": [0, 1], "title2": [0, 1, 0], "title3": [0, 0, 1, 0]} + ) + + client = weaviate.connect_to_local() + gql = client.graphql_raw_query( + """{ + Get { + """ + + collection.name + + """( + nearVector: { + vectorPerTarget: {title1: [0, 1], title2: [0, 1, 0], title3: [0, 0, 1, 0]} + distance: 0.1 + """ + + GQL_TARGETS + + """ + } + ) """ + + GQL_RETURNS + + """ + } + } + }""" + ) + assert gql.get[collection.name][0]["_additional"]["distance"] == 0 + assert gql.get[collection.name][0]["_additional"]["id"] == str(uuid2) + + +def test_test_multi_target_hybrid_gql(collection_factory: CollectionFactory): + collection = collection_factory( + vectorizer_config=[ + wvc.config.Configure.NamedVectors.none( + name=entry, + ) + for entry in ["title1", "title2", "title3"] + ] + ) + + uuid0 = collection.data.insert( + properties={}, vector={"title1": [1, 0], "title2": [0, 0, 1], "title3": [0, 0, 0, 1]} + ) + uuid1 = collection.data.insert( + properties={}, vector={"title1": [0, 1], "title2": [0, 1, 0], "title3": [0, 0, 1, 0]} + ) + uuid2 = collection.data.insert( + properties={}, vector={"title1": [1, 0], "title2": [1, 0, 0], "title3": [0, 1, 0, 0]} + ) + + client = weaviate.connect_to_local() + + gql_query = ( + """{ + Get { + """ + + collection.name + + """( + hybrid: { + alpha:1 + searches: { nearVector:{ + vectorPerTarget: {title1: [1, 0], title2: [0, 0, 1], title3: [0, 0, 0, 1]} + distance: 3.1 + }} + """ + + GQL_TARGETS + + """ + } + ) """ + + GQL_RETURNS + + """ + } + } + }""" + ) + + gql = client.graphql_raw_query(gql_query) + assert gql.get[collection.name][0]["_additional"]["score"] == "1" + assert gql.get[collection.name][0]["_additional"]["id"] == str(uuid0) + assert gql.get[collection.name][1]["_additional"]["score"] == "0.33333334" + assert gql.get[collection.name][1]["_additional"]["id"] == str(uuid2) + assert gql.get[collection.name][2]["_additional"]["score"] == "0" + assert gql.get[collection.name][2]["_additional"]["id"] == str(uuid1) diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_named_vectors.py b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_named_vectors.py new file mode 100644 index 0000000000000000000000000000000000000000..6b9330555b589fc3cf1c34f23521ec76afa8e604 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_named_vectors.py @@ -0,0 +1,611 @@ +import pytest +import weaviate.classes as wvc +import math +from weaviate.collections.classes.grpc import ( + _MultiTargetVectorJoin, + TargetVectors, + _MultiTargetVectorJoinEnum, + HybridVectorType, +) +from weaviate.exceptions import UnexpectedStatusCodeError, WeaviateInsertManyAllFailedError + +from .conftest import CollectionFactory, NamedCollection + + +def test_create_named_vectors_with_and_without_vectorizer( + collection_factory: CollectionFactory, +) -> None: + collection = collection_factory( + properties=[ + wvc.config.Property(name="title", data_type=wvc.config.DataType.TEXT), + wvc.config.Property(name="content", data_type=wvc.config.DataType.TEXT), + ], + vectorizer_config=[ + wvc.config.Configure.NamedVectors.text2vec_contextionary( + name="AllExplicit", + source_properties=["title", "content"], + vectorize_collection_name=False, + ), + wvc.config.Configure.NamedVectors.none(name="bringYourOwn"), + ], + ) + + uuid = collection.data.insert( + properties={"title": "Hello", "content": "World"}, + vector={"bringYourOwn": [0.5, 0.25, 0.75]}, + ) + obj = collection.query.fetch_object_by_id(uuid, include_vector=True) + assert obj.vector["AllExplicit"] is not None + assert obj.vector["bringYourOwn"] is not None + + +def test_hybrid_search_with_multiple_target_vectors(named_collection: NamedCollection) -> None: + collection = named_collection() + + uuid1 = collection.data.insert( + properties={"title1": "apple", "title2": "cocoa"}, + ) + uuid2 = collection.data.insert( + properties={"title1": "cocoa", "title2": "apple"}, + ) + uuid3 = collection.data.insert( + properties={"title1": "mountain", "title2": "ridge line"}, + ) + + direct = collection.query.hybrid( + "apple", + target_vector=["title1", "title2"], + return_metadata=wvc.query.MetadataQuery.full(), + alpha=1, # to make sure that the vector part works + ) + + assert len(direct.objects) == 3 + + # first two objects are a perfect fit, but their order is not guaranteed + assert sorted([obj.uuid for obj in direct.objects[:2]]) == sorted([uuid1, uuid2]) + assert direct.objects[2].uuid == uuid3 + + assert direct.objects[0].metadata.score == 1 + assert direct.objects[1].metadata.score == 1 + assert direct.objects[2].metadata.score == 0 + + near_text_sub_search = collection.query.hybrid( + "something else", + vector=wvc.query.HybridVector.near_text("apple sandwich"), + target_vector=["title1", "title2"], + return_metadata=wvc.query.MetadataQuery.full(), + ) + assert len(near_text_sub_search.objects) == 3 + + # first two objects are a perfect fit for vector search, but their order is not guaranteed + assert sorted([obj.uuid for obj in near_text_sub_search.objects[:2]]) == sorted([uuid1, uuid2]) + assert ( + near_text_sub_search.objects[0].metadata.score > 0.5 + ) # only vector search part has result + assert near_text_sub_search.objects[1].metadata.score > 0.5 + assert near_text_sub_search.objects[2].metadata.score == 0 + + obj1 = collection.query.fetch_object_by_id(uuid1, include_vector=True) + near_vector_sub_search = collection.query.hybrid( + "something else", + vector=wvc.query.HybridVector.near_vector(obj1.vector["title1"]), + target_vector=["title1", "title2"], + return_metadata=wvc.query.MetadataQuery.full(), + ) + assert len(near_vector_sub_search.objects) == 3 + + # first two objects are a perfect fit for vector search, but their order is not guaranteed + assert sorted([obj.uuid for obj in near_vector_sub_search.objects[:2]]) == sorted( + [uuid1, uuid2] + ) + assert ( + near_vector_sub_search.objects[0].metadata.score > 0.5 + ) # only vector search part has result + assert near_vector_sub_search.objects[1].metadata.score > 0.5 + assert near_vector_sub_search.objects[2].metadata.score == 0 + + +def test_near_object(named_collection: NamedCollection) -> None: + collection = named_collection() + + uuid1 = collection.data.insert( + properties={"title1": "apple", "title2": "cocoa"}, + ) + uuid2 = collection.data.insert( + properties={"title1": "banana", "title2": "cocoa"}, + ) + collection.data.insert( + properties={"title1": "mountain", "title2": "ridge line"}, + ) + + # only finds first object with minimal distance + near_obj1 = collection.query.near_object( + uuid1, + target_vector="title1", + distance=0.1, + return_metadata=wvc.query.MetadataQuery.full(), + ) + assert len(near_obj1.objects) == 1 + assert near_obj1.objects[0].uuid == uuid1 + + # finds both objects, but the second target vector has a larger distance + near_obj2 = collection.query.near_object( + uuid1, + target_vector=TargetVectors.sum(["title1", "title2"]), + distance=0.9, + return_metadata=wvc.query.MetadataQuery.full(), + ) + assert len(near_obj2.objects) == 2 + # order is not guaranteed + assert sorted([obj.uuid for obj in near_obj2.objects]) == sorted([uuid1, uuid2]) + + +def test_near_text(named_collection: NamedCollection) -> None: + collection = named_collection() + + uuid1 = collection.data.insert( + properties={"title1": "apple", "title2": "cocoa"}, + ) + uuid2 = collection.data.insert( + properties={"title1": "cocoa", "title2": "apple"}, + ) + collection.data.insert( + properties={"title1": "mountain", "title2": "ridge line"}, + ) + + near_text1 = collection.query.near_text( + "apple", + target_vector="title1", + distance=0.1, + return_metadata=wvc.query.MetadataQuery.full(), + ) + assert len(near_text1.objects) == 1 + assert near_text1.objects[0].uuid == uuid1 + + # finds both objects, but the second target vector has a larger distance + near_text2 = collection.query.near_text( + "apple", + target_vector=TargetVectors.sum(["title1", "title2"]), + distance=0.9, + return_metadata=wvc.query.MetadataQuery.full(), + ) + assert len(near_text2.objects) == 2 + # order is not guaranteed + assert sorted([obj.uuid for obj in near_text2.objects]) == sorted([uuid1, uuid2]) + + +def test_near_vector(named_collection: NamedCollection) -> None: + collection = named_collection() + + uuid1 = collection.data.insert( + properties={"title1": "apple", "title2": "cocoa"}, + ) + uuid2 = collection.data.insert( + properties={"title1": "cocoa", "title2": "apple"}, + ) + collection.data.insert( + properties={"title1": "mountain", "title2": "ridge line"}, + ) + + obj1 = collection.query.fetch_object_by_id(uuid1, include_vector=True) + near_vector1 = collection.query.near_vector( + obj1.vector["title1"], + target_vector="title1", + distance=0.1, + return_metadata=wvc.query.MetadataQuery.full(), + ) + + assert len(near_vector1.objects) == 1 + assert near_vector1.objects[0].uuid == uuid1 + + # finds both objects, but the second target vector has a larger distance + near_vector2 = collection.query.near_vector( + obj1.vector["title1"], + target_vector=TargetVectors.sum(["title1", "title2"]), + distance=0.9, + return_metadata=wvc.query.MetadataQuery.full(), + ) + assert len(near_vector2.objects) == 2 + # order is not guaranteed + assert sorted([obj.uuid for obj in near_vector2.objects]) == sorted([uuid1, uuid2]) + + +@pytest.mark.parametrize("target_vector", [None, "title"]) +def test_near_vector_with_single_named_vector( + named_collection: NamedCollection, target_vector: str | None +) -> None: + collection = named_collection(props=["title"]) + + uuid1 = collection.data.insert( + properties={"title": "alpha"}, + ) + uuid2 = collection.data.insert( + properties={"title": "beta"}, + ) + collection.data.insert( + properties={"title": "gamma"}, + ) + + obj1 = collection.query.fetch_object_by_id(uuid1, include_vector=True) + near_vector1 = collection.query.near_vector( + obj1.vector["title"], + target_vector=target_vector, + distance=0.1, + return_metadata=wvc.query.MetadataQuery.full(), + ) + + assert len(near_vector1.objects) == 1 + assert near_vector1.objects[0].uuid == uuid1 + + +CAR_DISTANCE = 0.7892138957977295 +APPLE_DISTANCE = 0.5168729424476624 +KALE_DISTANCE = 0.5732871294021606 + + +@pytest.mark.parametrize( + "multi_target_fusion_method,distance", + [ + ( + TargetVectors.sum(["title1", "title2", "title3"]), + CAR_DISTANCE + APPLE_DISTANCE + KALE_DISTANCE, + ), + ( + TargetVectors.average(["title1", "title2", "title3"]), + (CAR_DISTANCE + APPLE_DISTANCE + KALE_DISTANCE) / 3, + ), + (TargetVectors.minimum(["title1", "title2", "title3"]), APPLE_DISTANCE), + ( + TargetVectors.manual_weights({"title1": 0.4, "title2": 1.2, "title3": 0.752}), + APPLE_DISTANCE * 0.4 + CAR_DISTANCE * 1.2 + KALE_DISTANCE * 0.752, + ), + ], +) +def test_different_target_fusion_methods( + named_collection: NamedCollection, + multi_target_fusion_method: _MultiTargetVectorJoin, + distance: float, +) -> None: + collection = named_collection() + + collection.data.insert(properties={"title1": "apple", "title2": "car", "title3": "kale"}) + + nt = collection.query.near_text( + "fruit", + target_vector=multi_target_fusion_method, + return_metadata=wvc.query.MetadataQuery.full(), + ) + assert len(nt.objects) == 1 + assert math.isclose(nt.objects[0].metadata.distance, distance, rel_tol=1e-5) + + +def test_score_fusion(named_collection: NamedCollection) -> None: + collection = named_collection() + + uuid0 = collection.data.insert( + properties={"title1": "first"}, + vector={ + "title1": [1, 0, 0], + "title2": [0, 0, 1], + "title3": [1, 0, 0], + }, + ) + uuid1 = collection.data.insert( + properties={"title1": "second"}, + vector={ + "title1": [0, 1, 0], + "title2": [1, 0, 0], + "title3": [0, 0, 1], + }, + ) + uuid2 = collection.data.insert( + properties={"title1": "third"}, + vector={ + "title1": [0, 1, 0], + "title2": [0, 0, 1], + "title3": [0, 0, 1], + }, + ) + + nt = collection.query.near_vector( + [1.0, 0.0, 0.0], + target_vector=TargetVectors.relative_score({"title1": 1, "title2": 1, "title3": 1}), + return_metadata=wvc.query.MetadataQuery.full(), + ) + assert len(nt.objects) == 3 + + assert math.isclose(nt.objects[0].metadata.distance, 1, rel_tol=1e-5) + assert nt.objects[0].uuid == uuid0 + assert math.isclose(nt.objects[1].metadata.distance, 2, rel_tol=1e-5) + assert nt.objects[1].uuid == uuid1 + assert math.isclose(nt.objects[2].metadata.distance, 3, rel_tol=1e-5) + assert nt.objects[2].uuid == uuid2 + + +@pytest.mark.parametrize( + "multi_target_fusion_method", + [ + TargetVectors.sum(["colour", "weather", "material"]), + TargetVectors.average(["colour", "weather", "material"]), + TargetVectors.manual_weights({"colour": 0.4, "weather": 1.2, "material": 0.752}), + TargetVectors.relative_score({"colour": 1, "weather": 1.0, "material": 1.0}), + ], +) +def test_more_results_than_limit( + named_collection: NamedCollection, + multi_target_fusion_method: _MultiTargetVectorJoin, +) -> None: + collection = named_collection(props=["colour", "weather", "material"]) + + uuid1 = collection.data.insert( + properties={"colour": "bright", "weather": "summer", "material": "cotton"}, + ) + uuid2 = collection.data.insert( + properties={"colour": "snow", "weather": "warm", "material": "breezy"}, + ) + uuid3 = collection.data.insert( + properties={"colour": "white", "weather": "cold", "material": "heavy fur"}, + ) + uuid4 = collection.data.insert( + properties={"colour": "red", "weather": "summer", "material": "thick"}, + ) + uuid5 = collection.data.insert( + properties={"colour": "black", "weather": "arctic", "material": "lite"}, + ) + + # uuid3 is the best match for colour but bad for the others targets => make sure that the extra distances are + # computed correctly + nt = collection.query.near_text( + "white summer clothing with breezy material", + target_vector=multi_target_fusion_method, + return_metadata=wvc.query.MetadataQuery.full(), + limit=2, + ) + + assert len(nt.objects) == 2 + assert nt.objects[0].uuid == uuid1 + assert nt.objects[1].uuid == uuid2 + + # get all results to check if the distances are correct + nt3 = collection.query.near_text( + "white summer clothing with breezy material", + target_vector=multi_target_fusion_method, + return_metadata=wvc.query.MetadataQuery.full(), + limit=5, + ) + + assert nt3.objects[0].uuid == uuid1 + assert nt3.objects[1].uuid == uuid2 + # fusion score depend on all the input scores and are expected to be different with more objects that are found + if ( + multi_target_fusion_method.combination.value + != _MultiTargetVectorJoinEnum.RELATIVE_SCORE.value + ): + assert math.isclose( + nt3.objects[0].metadata.distance, nt.objects[0].metadata.distance, rel_tol=0.001 + ) + assert math.isclose( + nt3.objects[1].metadata.distance, nt.objects[1].metadata.distance, rel_tol=0.001 + ) + + +@pytest.mark.parametrize( + "multi_target_fusion_method,number_objects", + [ + (TargetVectors.sum(["first", "second", "third"]), 1), + (TargetVectors.average(["first", "second", "third"]), 1), + (TargetVectors.minimum(["first", "second", "third"]), 2), + ( + TargetVectors.manual_weights({"first": 0.4, "second": 1.2, "third": 0.752}), + 1, + ), + (TargetVectors.relative_score({"first": 1, "second": 1, "third": 1}), 1), + ], +) +def test_named_vectors_missing_entries( + collection_factory: CollectionFactory, + multi_target_fusion_method: _MultiTargetVectorJoin, + number_objects: int, +) -> None: + collection = collection_factory( + vectorizer_config=[ + wvc.config.Configure.NamedVectors.none( + name=entry, + ) + for entry in ["first", "second", "third"] + ] + ) + + # first object has all entries, second object is missing the third entry is missing. + uuid1 = collection.data.insert( + properties={}, vector={"first": [1, 0, 0], "second": [1, 0, 0], "third": [1, 0, 0]} + ) + uuid2 = collection.data.insert( + properties={}, + vector={"first": [0, 1, 0], "second": [0, math.sqrt(3), 0]}, + ) + + nt = collection.query.near_vector( + [1, 0, 0], + target_vector=multi_target_fusion_method, + return_metadata=wvc.query.MetadataQuery.full(), + ) + + # first object is perfect fit, second object has a distance of 1 + assert len(nt.objects) == number_objects + assert nt.objects[0].uuid == uuid1 + if len(nt.objects) == 2: + assert nt.objects[1].uuid == uuid2 + + +def test_multi_target_near_vector(collection_factory: CollectionFactory) -> None: + collection = collection_factory( + vectorizer_config=[ + wvc.config.Configure.NamedVectors.none( + name=entry, + ) + for entry in ["first", "second", "third"] + ] + ) + + collection.data.insert( + properties={}, vector={"first": [1, 0], "second": [0, 0, 1], "third": [0, 0, 0, 1]} + ) + uuid2 = collection.data.insert( + properties={}, vector={"first": [0, 1], "second": [0, 1, 0], "third": [0, 0, 1, 0]} + ) + + nt = collection.query.near_vector( + {"first": [0, 1], "second": [0, 1, 0], "third": [0, 0, 1, 0]}, + return_metadata=wvc.query.MetadataQuery.full(), + target_vector=TargetVectors.sum(["first", "second", "third"]), + distance=0.1, + ) + assert len(nt.objects) == 1 + assert nt.objects[0].uuid == uuid2 + assert nt.objects[0].metadata.distance == 0 + + +def test_multi_target_with_filter(collection_factory: CollectionFactory): + collection = collection_factory( + properties=[ + wvc.config.Property(name="first", data_type=wvc.config.DataType.TEXT), + wvc.config.Property(name="second", data_type=wvc.config.DataType.TEXT), + wvc.config.Property(name="int", data_type=wvc.config.DataType.INT), + ], + vectorizer_config=[ + wvc.config.Configure.NamedVectors.text2vec_contextionary( + name=entry, source_properties=[entry], vectorize_collection_name=False + ) + for entry in ["first", "second"] + ], + ) + + uuid1 = collection.data.insert( + properties={"first": "apple", "second": "mountain", "int": 3}, + ) + collection.data.insert( + properties={"first": "banana", "second": "blueberry", "int": 1}, + ) + uuid3 = collection.data.insert( + properties={"first": "backpack", "second": "orange", "int": 2}, + ) + + objs = collection.query.near_text( + "fruit", + return_metadata=wvc.query.MetadataQuery.full(), + target_vector=wvc.query.TargetVectors.sum(["first", "second"]), + limit=5, + filters=wvc.query.Filter.by_property("int").greater_or_equal(2), + ).objects + + # second object should not be part of results + assert len(objs) == 2 + assert sorted(obj.uuid for obj in objs) == sorted( + [uuid1, uuid3] + ) # order is not guaranteed and does not matter for this test + + +@pytest.mark.parametrize( + "combination", + [ + wvc.query.TargetVectors.sum(["title1", "title2"]), + wvc.query.TargetVectors.average(["title1", "title2"]), + ], +) +@pytest.mark.parametrize( + "vector", + [ + wvc.query.HybridVector.near_vector({"title1": [1, 0, 0], "title2": [0, 0, 1]}), + {"title1": [1, 0, 0], "title2": [0, 0, 1]}, + ], +) +def test_hybrid_combinations( + collection_factory: CollectionFactory, + vector: HybridVectorType, + combination: _MultiTargetVectorJoin, +) -> None: + collection = collection_factory( + vectorizer_config=[ + wvc.config.Configure.NamedVectors.none( + name=entry, + ) + for entry in ["title1", "title2"] + ] + ) + uuid0 = collection.data.insert( + properties={"title1": "first"}, + vector={"title1": [1, 0, 0], "title2": [0, 0, 1]}, + ) + uuid1 = collection.data.insert( + properties={"title1": "second"}, + vector={"title1": [0, 1, 0], "title2": [1, 0, 0]}, + ) + uuid2 = collection.data.insert( + properties={"title1": "third"}, + vector={"title1": [0, 1, 0], "title2": [0, 0, 1]}, + ) + + res = collection.query.hybrid( + "something else", + vector=vector, + target_vector=wvc.query.TargetVectors.sum(["title1", "title2"]), + alpha=1, + return_metadata=wvc.query.MetadataQuery.full(), + ) + assert len(res.objects) == 3 + assert res.objects[0].uuid == uuid0 + assert res.objects[0].metadata.score == 1 + assert res.objects[1].uuid == uuid2 + assert res.objects[1].metadata.score == 0.5 + assert res.objects[2].uuid == uuid1 + assert res.objects[2].metadata.score == 0.0 + + +def test_single_named_vectors_without_names(collection_factory: CollectionFactory) -> None: + collection = collection_factory( + properties=[wvc.config.Property(name="title", data_type=wvc.config.DataType.TEXT)], + vectorizer_config=[wvc.config.Configure.NamedVectors.none("title")], + ) + + # insert object with single non-named vector. + uuid1 = collection.data.insert(properties={"title": "Hello"}, vector=[1, 2, 3]) + obj = collection.query.fetch_object_by_id(uuid1, include_vector=True) + assert "title" in obj.vector + + ret = collection.data.insert_many( + [wvc.data.DataObject(properties={"title": "Hello"}, vector=[1, 2, 3])] + ) + obj_batch = collection.query.fetch_object_by_id(ret.uuids[0], include_vector=True) + assert "title" in obj_batch.vector + + +def test_named_vectors_without_names(collection_factory: CollectionFactory) -> None: + collection = collection_factory( + properties=[wvc.config.Property(name="title", data_type=wvc.config.DataType.TEXT)], + vectorizer_config=[ + wvc.config.Configure.NamedVectors.none("first"), + wvc.config.Configure.NamedVectors.none("second"), + ], + ) + with pytest.raises(UnexpectedStatusCodeError): + collection.data.insert(properties={"title": "Hello"}, vector=[1, 2, 3]) + + with pytest.raises(WeaviateInsertManyAllFailedError): + collection.data.insert_many( + [wvc.data.DataObject(properties={"title": "Hello"}, vector=[1, 2, 3])] + ) + + +def test_single_vectorizer_with_named_vectors(collection_factory: CollectionFactory) -> None: + collection = collection_factory( + properties=[wvc.config.Property(name="title", data_type=wvc.config.DataType.TEXT)], + vectorizer_config=wvc.config.Configure.Vectorizer.none(), + ) + with pytest.raises(UnexpectedStatusCodeError): + collection.data.insert(properties={"title": "Hello"}, vector={"something": [1, 2, 3]}) + + with pytest.raises(WeaviateInsertManyAllFailedError): + collection.data.insert_many( + [wvc.data.DataObject(properties={"title": "Hello"}, vector={"something": [1, 2, 3]})] + ) diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_pq_vector_dims_match.py b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_pq_vector_dims_match.py new file mode 100644 index 0000000000000000000000000000000000000000..2b5fd0667ffe836d407c78d5efd907ab818f9258 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_pq_vector_dims_match.py @@ -0,0 +1,53 @@ +import pytest +import time +import numpy as np + +from .conftest import CollectionFactory +from weaviate.classes.config import Configure, Reconfigure +from weaviate.classes.query import MetadataQuery +from weaviate.exceptions import WeaviateQueryError + + +VEC_DIMS = 256 +OBJ_NUM = 1024 + +@pytest.mark.skip( + reason="fix PQ to return error when searching with wrong vector dimensions. This is a regression that was introduced in 1.26" +) +def test_pq_dims_match(collection_factory: CollectionFactory): + col = collection_factory( + name='CompressedVector', + vectorizer_config=Configure.Vectorizer.none() + ) + + with col.batch.dynamic() as batch: + for i in range(OBJ_NUM): + batch.add_object( + properties={ + 'someText': f'object-{i}' + }, + vector=generate_vec(VEC_DIMS) + ) + + col.config.update( + vector_index_config=Reconfigure.VectorIndex.hnsw( + quantizer=Reconfigure.VectorIndex.Quantizer.pq() + ) + ) + + # time to quantize + time2sleep = 3 + print(f'sleeping for {time2sleep} seconds to compress vectors...') + time.sleep(time2sleep) + + with pytest.raises(WeaviateQueryError) as exc: + col.query.near_vector( + near_vector=generate_vec(128), + limit=2, + return_metadata=MetadataQuery(distance=True) + ) + assert 'ProductQuantizer.DistanceBetweenCompressedAndUncompressedVectors: mismatched dimensions:' in str(exc.value) + + +def generate_vec(dims): + return np.random.random(dims).tolist() diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_refs.py b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_refs.py new file mode 100644 index 0000000000000000000000000000000000000000..1571492b5a1d44bb4a844dcfc586147f9dedccf0 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_refs.py @@ -0,0 +1,120 @@ +import pytest +import weaviate.classes as wvc + +from .conftest import CollectionFactory + + +def test_ref_with_cycle(collection_factory: CollectionFactory) -> None: + col = collection_factory( + properties=[wvc.config.Property(name="name", data_type=wvc.config.DataType.TEXT)], + vectorizer_config=wvc.config.Configure.Vectorizer.none(), + ) + col.config.add_reference(wvc.config.ReferenceProperty(name="ref", target_collection=col.name)) + + a = col.data.insert(properties={"name": "A"}) + b = col.data.insert(properties={"name": "B"}, references={"ref": a}) + col.data.reference_add(from_uuid=a, from_property="ref", to=b) + + ret = col.query.fetch_objects( + return_references=[ + wvc.query.QueryReference( + link_on="ref", + return_properties="name", + return_references=[ + wvc.query.QueryReference( + link_on="ref", + return_properties="name", + return_metadata=wvc.query.MetadataQuery.full(), + ) + ], + return_metadata=wvc.query.MetadataQuery.full(), + ), + ], + ).objects + + ret = sorted(ret, key=lambda x: x.properties["name"]) + assert ret[0].properties["name"] == "A" + assert ret[1].properties["name"] == "B" + assert ret[0].references["ref"].objects[0].properties["name"] == "B" + assert ret[1].references["ref"].objects[0].properties["name"] == "A" + + +@pytest.mark.skip(reason="DB-18") +def test_ref_with_multiple_cycle(collection_factory: CollectionFactory) -> None: + col = collection_factory( + properties=[wvc.config.Property(name="name", data_type=wvc.config.DataType.TEXT)], + vectorizer_config=wvc.config.Configure.Vectorizer.none(), + ) + col.config.add_reference(wvc.config.ReferenceProperty(name="ref", target_collection=col.name)) + + # Add objects with two cyclic paths + # c => b => a => c + # c => a => c + a = col.data.insert(properties={"name": "A"}) + b = col.data.insert(properties={"name": "B"}, references={"ref": a}) + c = col.data.insert(properties={"name": "C"}, references={"ref": [b, a]}) # has two refs + col.data.reference_add(from_uuid=a, from_property="ref", to=c) + + ret = col.query.fetch_objects( + return_references=[ + wvc.query.QueryReference( + link_on="ref", + return_properties=["name"], + return_references=[ + wvc.query.QueryReference( + link_on="ref", + return_properties="name", + return_metadata=wvc.query.MetadataQuery.full(), + return_references=[ + wvc.query.QueryReference( + link_on="ref", + return_properties="name", + return_metadata=wvc.query.MetadataQuery.full(), + ) + ], + ) + ], + return_metadata=wvc.query.MetadataQuery.full(), + ), + ], + ).objects + + # both paths are resolved correctly + ret = sorted(ret, key=lambda x: x.properties["name"]) + assert ret[0].properties["name"] == "A" + assert ret[1].properties["name"] == "B" + assert ret[2].properties["name"] == "C" + + assert ret[0].references["ref"].objects[0].properties["name"] == "C" + assert ret[1].references["ref"].objects[0].properties["name"] == "A" + + ret2_objects = sorted(ret[2].references["ref"].objects, key=lambda x: x.properties["name"]) + assert ret2_objects[0].properties["name"] == "A" + assert ret2_objects[1].properties["name"] == "B" + + +def test_return_metadata_ref(collection_factory: CollectionFactory) -> None: + target = collection_factory( + name="target", + vectorizer_config=[ + wvc.config.Configure.NamedVectors.none(name="bringYourOwn1"), + wvc.config.Configure.NamedVectors.none(name="bringYourOwn2"), + ], + ) + + source = collection_factory( + name="source", + references=[wvc.config.ReferenceProperty(name="ref", target_collection=target.name)], + vectorizer_config=wvc.config.Configure.Vectorizer.none(), + ) + + uuid_target = target.data.insert( + properties={}, vector={"bringYourOwn1": [1, 2, 3], "bringYourOwn2": [4, 5, 6]} + ) + source.data.insert(properties={}, references={"ref": uuid_target}) + + res = source.query.fetch_objects( + return_references=wvc.query.QueryReference(link_on="ref", include_vector=True) + ) + + assert res.objects[0].references["ref"].objects[0].vector["bringYourOwn1"] == [1, 2, 3] diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_reranker.py b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_reranker.py new file mode 100644 index 0000000000000000000000000000000000000000..8c784ebc60bc8669d6c156f7cf8bef046420bf10 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_reranker.py @@ -0,0 +1,33 @@ +import weaviate +import weaviate.classes as wvc + + +# the dummy reranker is not supported in the python client => create collection from dict +def test_reranker() -> None: + client = weaviate.connect_to_local() + collection_name = "TestRerankerDummy" + client.collections.delete(name=collection_name) + collection = client.collections.create_from_dict( + { + "class": collection_name, + "vectorizer": "none", + "moduleConfig": {"reranker-dummy": {}}, + "properties": [{"name": "prop", "dataType": ["text"]}], + } + ) + + uuid1 = collection.data.insert({"prop": "hello"}, vector=[1, 0]) + uuid2 = collection.data.insert({"prop": "hellohellohello"}, vector=[1, 0]) + uuid3 = collection.data.insert({"prop": "hellohello"}, vector=[1, 0]) + + objs = collection.query.near_vector( + [1, 0], rerank=wvc.query.Rerank(prop="prop"), return_properties=[] + ).objects + assert len(objs) == 3 + + # sorted by length by dummy reranker + assert objs[0].uuid == uuid2 + assert objs[1].uuid == uuid3 + assert objs[2].uuid == uuid1 + + client.collections.delete(name=collection_name) diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_stats_hnsw.py b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_stats_hnsw.py new file mode 100644 index 0000000000000000000000000000000000000000..961eb752d29656694df6f9f5216e5326ab62b6a2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_stats_hnsw.py @@ -0,0 +1,81 @@ +import json +import httpx +from weaviate.classes.config import Configure, VectorDistances +import weaviate + + +def test_stats_hnsw() -> None: + short_url = "http://localhost:6060/debug/stats/collection/collection_name/shards" + response = httpx.post(short_url) + assert response.status_code == 404 + assert "invalid path" in response.text + long_url = "http://localhost:6060/debug/stats/collection/collection_name/shards/shard_name/arg4/arg5/arg6" + response = httpx.post(long_url) + assert response.status_code == 404 + assert "invalid path" in response.text + wrong_url = "http://localhost:6060/debug/stats/collection/collection_name/wrong/shard_name" + response = httpx.post(wrong_url) + assert response.status_code == 404 + assert "invalid path" in response.text + # HNSW index + client = weaviate.connect_to_local() + client.collections.delete(name="vector") + collection = client.collections.create_from_dict( + { + "class": "vector", + "vectorizer": "none", + "moduleConfig": {"reranker-dummy": {}}, + "properties": [{"name": "prop", "dataType": ["text"]}], + } + ) + + collection.data.insert({"prop": "hello"}, vector=[1, 0]) + collection.data.insert({"prop": "hellohellohello"}, vector=[1, 0]) + collection.data.insert({"prop": "hellohello"}, vector=[1, 0]) + shards = collection.config.get_shards() + + wrong_collection = ( + "http://localhost:6060/debug/stats/collection/wrong_collection/shards/" + shards[0].name + ) + response = httpx.post(wrong_collection) + assert response.status_code == 404 + assert "collection not found" in response.text + wrong_shard = "http://localhost:6060/debug/stats/collection/vector/shards/wrong_shard" + response = httpx.post(wrong_shard) + assert response.status_code == 404 + assert "shard not found" in response.text + + url = "http://localhost:6060/debug/stats/collection/vector/shards/" + shards[0].name + response = httpx.post(url) + keywords = list(json.loads(response.text).keys()) + assert response.status_code == 200 + assert [ + "dimensions", + "entryPointID", + "distributionLayers", + "unreachablePoints", + "numTombstones", + "cacheSize", + "compressed", + "compressionStats", + "compressionType", + ] == keywords + + # Flat index + flat_index = client.collections.create( + name="flatIndex", + vector_index_config=Configure.VectorIndex.flat( + distance_metric=VectorDistances.COSINE, + quantizer=None, + vector_cache_max_objects=1000000, + ), + ) + flat_index.data.insert({"prop": "hello"}, vector=[1, 0]) + flat_index.data.insert({"prop": "hellohellohello"}, vector=[1, 0]) + flat_index.data.insert({"prop": "hellohello"}, vector=[1, 0]) + flat_shards = flat_index.config.get_shards() + flat_url = ( + "http://localhost:6060/debug/stats/collection/flatIndex/shards/" + flat_shards[0].name + ) + response = httpx.post(flat_url) + assert response.status_code == 400 diff --git a/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_updates.py b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_updates.py new file mode 100644 index 0000000000000000000000000000000000000000..250ea18dc6ca6ad009104e921c49a42be60767f3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/acceptance_with_python/test_updates.py @@ -0,0 +1,23 @@ +import weaviate.classes as wvc + +from .conftest import CollectionFactory + + +def test_batch_update_empty_list2(collection_factory: CollectionFactory) -> None: + collection = collection_factory( + properties=[ + wvc.config.Property(name="tags", data_type=wvc.config.DataType.TEXT_ARRAY), + wvc.config.Property(name="title", data_type=wvc.config.DataType.TEXT), + wvc.config.Property(name="authorized", data_type=wvc.config.DataType.BOOL), + ], + vectorizer_config=[ + wvc.config.Configure.NamedVectors.text2vec_contextionary( + name="title_vector", vectorize_collection_name=False + ), + ], + ) + + uuid1 = collection.data.insert({"tags": [], "authorized": False}) + + # update without the empty array + collection.data.update(properties={"authorized": True}, uuid=uuid1) diff --git a/platform/dbops/binaries/weaviate-src/test/benchmark/.gitattributes b/platform/dbops/binaries/weaviate-src/test/benchmark/.gitattributes new file mode 100644 index 0000000000000000000000000000000000000000..3dbd5c9d1c806263f948c001a30916d0c78030b7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/benchmark/.gitattributes @@ -0,0 +1 @@ +*.fvecs filter=lfs diff=lfs merge=lfs -text diff --git a/platform/dbops/binaries/weaviate-src/test/benchmark/benchmark.go b/platform/dbops/binaries/weaviate-src/test/benchmark/benchmark.go new file mode 100644 index 0000000000000000000000000000000000000000..70bde5fb2ca5c25a15971e10d5d0b3b008cc5b6f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/benchmark/benchmark.go @@ -0,0 +1,262 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +// Package implements performance tracking examples + +package main + +import ( + "bytes" + "encoding/json" + "flag" + "fmt" + "io" + "net" + "net/http" + "os" + "os/exec" + "time" + + "github.com/pkg/errors" + + "github.com/weaviate/weaviate/entities/models" +) + +type batch struct { + Objects []*models.Object +} + +type benchmarkResult map[string]map[string]int64 + +func main() { + var benchmarkName string + var numBatches, failPercentage, maxEntries int + + flag.StringVar(&benchmarkName, "name", "SIFT", "Which benchmark should be run. Currently only SIFT is available.") + flag.IntVar(&maxEntries, "numberEntries", 100000, "Maximum number of entries read from the dataset") + flag.IntVar(&numBatches, "numBatches", 1, "With how many parallel batches objects should be added") + flag.IntVar(&failPercentage, "fail", -1, "Fail if regression is larger") + flag.Parse() + + t := &http.Transport{ + Proxy: http.ProxyFromEnvironment, + DialContext: (&net.Dialer{ + Timeout: 30 * time.Second, + KeepAlive: 120 * time.Second, + }).DialContext, + MaxIdleConnsPerHost: 100, + MaxIdleConns: 100, + IdleConnTimeout: 90 * time.Second, + TLSHandshakeTimeout: 10 * time.Second, + ExpectContinueTimeout: 1 * time.Second, + } + c := &http.Client{Transport: t} + url := "http://localhost:8080/v1/" + + alreadyRunning := startWeaviate(c, url) + + var newRuntime map[string]int64 + var err error + switch benchmarkName { + case "SIFT": + newRuntime, err = benchmarkSift(c, url, maxEntries, numBatches) + default: + panic("Unknown benchmark " + benchmarkName) + } + + if err != nil { + clearExistingObjects(c, url) + } + + if !alreadyRunning { + tearDownWeaviate() + } + + if err != nil { + panic(errors.Wrap(err, "Error occurred during benchmarking")) + } + + FullBenchmarkName := benchmarkName + "-" + fmt.Sprint(maxEntries) + "_Entries-" + fmt.Sprint(numBatches) + "_Batch(es)" + + // Write results to file, keeping existing entries + oldBenchmarkRunTimes := readCurrentBenchmarkResults() + oldRuntime := oldBenchmarkRunTimes[FullBenchmarkName] + oldBenchmarkRunTimes[FullBenchmarkName] = newRuntime + benchmarkJSON, _ := json.MarshalIndent(oldBenchmarkRunTimes, "", "\t") + if err := os.WriteFile("benchmark_results.json", benchmarkJSON, 0o666); err != nil { + panic(err) + } + + totalNewRuntime := int64(0) + for _, runtime := range newRuntime { + totalNewRuntime += runtime + } + totalOldRuntime := int64(0) + for _, runtime := range oldRuntime { + totalOldRuntime += runtime + } + + fmt.Fprint( + os.Stdout, + "Runtime for benchmark "+FullBenchmarkName+ + ": old total runtime: "+fmt.Sprint(totalOldRuntime)+"ms, new total runtime:"+fmt.Sprint(totalNewRuntime)+"ms.\n"+ + "This is a change of "+fmt.Sprintf("%.2f", 100*float32(totalNewRuntime-totalOldRuntime)/float32(totalNewRuntime))+"%.\n"+ + "Please update the benchmark results if necessary.\n\n", + ) + fmt.Fprint(os.Stdout, "Runtime for individual steps:.\n") + for name, time := range newRuntime { + fmt.Fprint(os.Stdout, "Runtime for "+name+" is "+fmt.Sprint(time)+"ms.\n") + } + + // Return with error code if runtime regressed and corresponding flag was set + if failPercentage >= 0 && + totalOldRuntime > 0 && // don't report regression if no old entry exists + float64(totalOldRuntime)*(1.0+0.01*float64(failPercentage)) < float64(totalNewRuntime) { + fmt.Fprint( + os.Stderr, "Failed due to performance regressions.\n", + ) + os.Exit(1) + } +} + +// If there is already a schema present, clear it out +func clearExistingObjects(c *http.Client, url string) { + checkSchemaRequest := createRequest(url+"schema", "GET", nil) + checkSchemaResponseCode, body, _, err := performRequest(c, checkSchemaRequest) + if err != nil { + panic(errors.Wrap(err, "perform request")) + } + if checkSchemaResponseCode != 200 { + return + } + + var dump models.Schema + if err := json.Unmarshal(body, &dump); err != nil { + panic(errors.Wrap(err, "Could not unmarshal read response")) + } + for _, classObj := range dump.Classes { + requestDelete := createRequest(url+"schema/"+classObj.Class, "DELETE", nil) + responseDeleteCode, _, _, err := performRequest(c, requestDelete) + if err != nil { + panic(errors.Wrap(err, "Could delete schema")) + } + if responseDeleteCode != 200 { + panic(fmt.Sprintf("Could not delete schema, code: %v", responseDeleteCode)) + } + } +} + +func command(app string, arguments []string, waitForCompletion bool) error { + mydir, err := os.Getwd() + if err != nil { + return err + } + + cmd := exec.Command(app, arguments...) + execDir := mydir + "/../../" + cmd.Dir = execDir + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + if waitForCompletion { + err = cmd.Run() + } else { + err = cmd.Start() + } + + return err +} + +func readCurrentBenchmarkResults() benchmarkResult { + benchmarkFile, err := os.Open("benchmark_results.json") + if err != nil { + fmt.Print("No benchmark file present.") + return make(benchmarkResult) + } + defer benchmarkFile.Close() + + var result benchmarkResult + jsonParser := json.NewDecoder(benchmarkFile) + if err = jsonParser.Decode(&result); err != nil { + panic("Could not parse existing benchmark file.") + } + return result +} + +func tearDownWeaviate() error { + fmt.Print("Shutting down weaviate.\n") + app := "docker-compose" + arguments := []string{ + "down", + "--remove-orphans", + } + return command(app, arguments, true) +} + +// start weaviate in case it was not already started +// +// We want to benchmark the current state and therefore need to rebuild and then start a docker container +func startWeaviate(c *http.Client, url string) bool { + requestReady := createRequest(url+".well-known/ready", "GET", nil) + + responseStartedCode, _, _, err := performRequest(c, requestReady) + alreadyRunning := err == nil && responseStartedCode == 200 + + if alreadyRunning { + fmt.Print("Weaviate instance already running.\n") + return alreadyRunning + } + + fmt.Print("(Re-) build and start weaviate.\n") + cmd := "./tools/test/run_ci_server.sh" + if err := command(cmd, []string{}, true); err != nil { + panic(errors.Wrap(err, "Command to (re-) build and start weaviate failed")) + } + return false +} + +// createRequest creates requests +func createRequest(url string, method string, payload interface{}) *http.Request { + var body io.Reader = nil + if payload != nil { + jsonBody, err := json.Marshal(payload) + if err != nil { + panic(errors.Wrap(err, "Could not marshal request")) + } + body = bytes.NewBuffer(jsonBody) + } + request, err := http.NewRequest(method, url, body) + if err != nil { + panic(errors.Wrap(err, "Could not create request")) + } + request.Header.Add("Content-Type", "application/json") + request.Header.Add("Accept", "application/json") + + return request +} + +// performRequest runs requests +func performRequest(c *http.Client, request *http.Request) (int, []byte, int64, error) { + timeStart := time.Now() + response, err := c.Do(request) + requestTime := time.Since(timeStart).Milliseconds() + + if err != nil { + return 0, nil, requestTime, err + } + + body, err := io.ReadAll(response.Body) + response.Body.Close() + if err != nil { + return 0, nil, requestTime, err + } + + return response.StatusCode, body, requestTime, nil +} diff --git a/platform/dbops/binaries/weaviate-src/test/benchmark/benchmark_sift.go b/platform/dbops/binaries/weaviate-src/test/benchmark/benchmark_sift.go new file mode 100644 index 0000000000000000000000000000000000000000..d05e00f13931d15f89d59859b9bec4774658f5a2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/benchmark/benchmark_sift.go @@ -0,0 +1,246 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package main + +import ( + "encoding/binary" + "encoding/json" + "fmt" + "io" + "math" + "net/http" + "os" + "sync" + + "github.com/go-openapi/strfmt" + "github.com/google/uuid" + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + + enterrors "github.com/weaviate/weaviate/entities/errors" + "github.com/weaviate/weaviate/entities/models" +) + +const ( + class = "Benchmark" + nrSearchResults = 79 +) + +func createSchemaSIFTRequest(url string) *http.Request { + classObj := &models.Class{ + Class: class, + Description: "Dummy class for benchmarking purposes", + Properties: []*models.Property{ + { + DataType: []string{"int"}, + Description: "The value of the counter in the dataset", + Name: "counter", + }, + }, + VectorIndexConfig: map[string]interface{}{ // values are from benchmark script + "distance": "l2-squared", + "ef": -1, + "efConstruction": 64, + "maxConnections": 64, + "vectorCacheMaxObjects": 1000000000, + }, + Vectorizer: "none", + } + request := createRequest(url+"schema", "POST", classObj) + return request +} + +func float32FromBytes(bytes []byte) float32 { + bits := binary.LittleEndian.Uint32(bytes) + float := math.Float32frombits(bits) + return float +} + +func int32FromBytes(bytes []byte) int { + return int(binary.LittleEndian.Uint32(bytes)) +} + +func readSiftFloat(file string, maxObjects int) []*models.Object { + var objects []*models.Object + + f, err := os.Open("sift/" + file) + if err != nil { + panic(errors.Wrap(err, "Could not open SIFT file")) + } + defer f.Close() + + fi, err := f.Stat() + if err != nil { + panic(errors.Wrap(err, "Could not get SIFT file properties")) + } + fileSize := fi.Size() + if fileSize < 1000000 { + panic("The file is only " + fmt.Sprint(fileSize) + " bytes long. Did you forgot to install git lfs?") + } + + // The sift data is a binary file containing floating point vectors + // For each entry, the first 4 bytes is the length of the vector (in number of floats, not in bytes) + // which is followed by the vector data with vector length * 4 bytes. + // |-length-vec1 (4bytes)-|-Vec1-data-(4*length-vector-1 bytes)-|-length-vec2 (4bytes)-|-Vec2-data-(4*length-vector-2 bytes)-| + // The vector length needs to be converted from bytes to int + // The vector data needs to be converted from bytes to float + // Note that the vector entries are of type float but are integer numbers eg 2.0 + bytesPerF := 4 + vectorLengthFloat := 128 + vectorBytes := make([]byte, bytesPerF+vectorLengthFloat*bytesPerF) + for i := 0; i >= 0; i++ { + _, err = f.Read(vectorBytes) + if errors.Is(err, io.EOF) { + break + } else if err != nil { + panic(err) + } + if int32FromBytes(vectorBytes[0:bytesPerF]) != vectorLengthFloat { + panic("Each vector must have 128 entries.") + } + var vectorFloat []float32 + for j := 0; j < vectorLengthFloat; j++ { + start := (j + 1) * bytesPerF // first bytesPerF are length of vector + vectorFloat = append(vectorFloat, float32FromBytes(vectorBytes[start:start+bytesPerF])) + } + ObjectUuid := uuid.New() + object := &models.Object{ + Class: class, + ID: strfmt.UUID(ObjectUuid.String()), + Vector: models.C11yVector(vectorFloat), + Properties: map[string]interface{}{ + "counter": i, + }, + } + objects = append(objects, object) + + if i >= maxObjects { + break + } + } + if len(objects) < maxObjects { + panic("Could not load all elements.") + } + + return objects +} + +func benchmarkSift(c *http.Client, url string, maxObjects, numBatches int) (map[string]int64, error) { + logger := logrus.New() + clearExistingObjects(c, url) + objects := readSiftFloat("sift_base.fvecs", maxObjects) + queries := readSiftFloat("sift_query.fvecs", maxObjects/100) + requestSchema := createSchemaSIFTRequest(url) + + passedTime := make(map[string]int64) + + // Add schema + responseSchemaCode, _, timeSchema, err := performRequest(c, requestSchema) + passedTime["AddSchema"] = timeSchema + if err != nil { + return nil, errors.Wrap(err, "Could not add schema, error: ") + } else if responseSchemaCode != 200 { + return nil, errors.Errorf("Could not add schma, http error code: %v", responseSchemaCode) + } + + // Batch-add + passedTime["BatchAdd"] = 0 + wg := sync.WaitGroup{} + batchSize := len(objects) / numBatches + errorChan := make(chan error, numBatches) + timeChan := make(chan int64, numBatches) + + for i := 0; i < numBatches; i++ { + batchId := i + wg.Add(1) + enterrors.GoWrapper(func() { + batchObjects := objects[batchId*batchSize : (batchId+1)*batchSize] + requestAdd := createRequest(url+"batch/objects", "POST", batch{batchObjects}) + responseAddCode, _, timeBatchAdd, err := performRequest(c, requestAdd) + + timeChan <- timeBatchAdd + if err != nil { + errorChan <- errors.Wrap(err, "Could not add batch, error: ") + } else if responseAddCode != 200 { + errorChan <- errors.Errorf("Could not add batch, http error code: %v", responseAddCode) + } + wg.Done() + }, logger) + + } + wg.Wait() + close(errorChan) + close(timeChan) + for err := range errorChan { + return nil, err + } + for timing := range timeChan { + passedTime["BatchAdd"] += timing + } + + // Read entries + nrSearchResultsUse := nrSearchResults + if maxObjects < nrSearchResultsUse { + nrSearchResultsUse = maxObjects + } + requestRead := createRequest(url+"objects?limit="+fmt.Sprint(nrSearchResultsUse)+"&class="+class, "GET", nil) + responseReadCode, body, timeGetObjects, err := performRequest(c, requestRead) + passedTime["GetObjects"] = timeGetObjects + if err != nil { + return nil, errors.Wrap(err, "Could not read objects") + } else if responseReadCode != 200 { + return nil, errors.New("Could not read objects, http error code: " + fmt.Sprint(responseReadCode)) + } + var result map[string]interface{} + if err := json.Unmarshal(body, &result); err != nil { + return nil, errors.Wrap(err, "Could not unmarshal read response") + } + if int(result["totalResults"].(float64)) != nrSearchResultsUse { + errString := "Found " + fmt.Sprint(int(result["totalResults"].(float64))) + + " results. Expected " + fmt.Sprint(nrSearchResultsUse) + "." + return nil, errors.New(errString) + } + + // Use sample queries + for _, query := range queries { + queryString := "{Get{" + class + "(nearVector: {vector:" + fmt.Sprint(query.Vector) + " }){counter}}}" + requestQuery := createRequest(url+"graphql", "POST", models.GraphQLQuery{ + Query: queryString, + }) + responseQueryCode, body, timeQuery, err := performRequest(c, requestQuery) + passedTime["Query"] += timeQuery + if err != nil { + return nil, errors.Wrap(err, "Could not query objects") + } else if responseQueryCode != 200 { + return nil, errors.Errorf("Could not query objects, http error code: %v", responseQueryCode) + } + var result map[string]interface{} + if err := json.Unmarshal(body, &result); err != nil { + return nil, errors.Wrap(err, "Could not unmarshal query response") + } + if result["data"] == nil || result["errors"] != nil { + return nil, errors.New("GraphQL Error") + } + } + + // Delete class (with schema and all entries) to clear all entries so next round can start fresh + requestDelete := createRequest(url+"schema/"+class, "DELETE", nil) + responseDeleteCode, _, timeDelete, err := performRequest(c, requestDelete) + passedTime["Delete"] += timeDelete + if err != nil { + return nil, errors.Wrap(err, "Could not delete class") + } else if responseDeleteCode != 200 { + return nil, errors.Errorf("Could not delete class, http error code: %v", responseDeleteCode) + } + + return passedTime, nil +} diff --git a/platform/dbops/binaries/weaviate-src/test/benchmark/run_performance_tracker.sh b/platform/dbops/binaries/weaviate-src/test/benchmark/run_performance_tracker.sh new file mode 100644 index 0000000000000000000000000000000000000000..eed58eb1f8823c19ca70bc6abd95372a823284b7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/benchmark/run_performance_tracker.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +# change to script directory +cd "${0%/*}" || exit + +# create benchmark/sift directory +mkdir -p ./sift + +# check that the files sift_base.fvecs and sift_query.fvecs exist in the benchmark/sift directory. +# download them otherwise. +if [ ! -f "./sift/sift_base.fvecs" ] || [ ! -f "./sift/sift_query.fvecs" ]; then + echo "Downloading SIFT dataset" + wget ftp://ftp.irisa.fr/local/texmex/corpus/sift.tar.gz -P /tmp/ + tar -xvf /tmp/sift.tar.gz -C /tmp + mv /tmp/sift/sift_base.fvecs /tmp/sift/sift_query.fvecs ./sift/ +fi + +go run . -name "SIFT" -numberEntries 100000 -fail "-1" -numBatches "1" diff --git a/platform/dbops/binaries/weaviate-src/test/benchmark_bm25/.gitignore b/platform/dbops/binaries/weaviate-src/test/benchmark_bm25/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..e7723559d7d0e6916d8906e9bf463b73ec08106b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/benchmark_bm25/.gitignore @@ -0,0 +1,2 @@ +datasets/ +datasets.yml \ No newline at end of file diff --git a/platform/dbops/binaries/weaviate-src/test/benchmark_bm25/convert_json_to_jsonl.sh b/platform/dbops/binaries/weaviate-src/test/benchmark_bm25/convert_json_to_jsonl.sh new file mode 100644 index 0000000000000000000000000000000000000000..69b00095d0bebb675538b7c43d875b826c55611d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/benchmark_bm25/convert_json_to_jsonl.sh @@ -0,0 +1,18 @@ +if [ -z "$1" ]; then + echo "Usage: $0 input.json" + exit 1 +fi + +input_file="$1" +output_file="${input_file%.json}.jsonl" + +# Convert JSON to JSONL using jq +jq -c '.[]' "$input_file" > "$output_file" + +# Check if the output file was created and is not empty +if [ -s "$output_file" ]; then + echo "Conversion complete. Output saved to $output_file" +else + echo "Conversion failed. Please check the input file format." +fi + diff --git a/platform/dbops/binaries/weaviate-src/test/benchmark_bm25/datasets_default.yml b/platform/dbops/binaries/weaviate-src/test/benchmark_bm25/datasets_default.yml new file mode 100644 index 0000000000000000000000000000000000000000..6bd5379436e8ad42d3aa27c5c02fd23099865d23 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/benchmark_bm25/datasets_default.yml @@ -0,0 +1,54 @@ +datasets: + - id: msmarco + path: ./datasets/msmarco + corpus: + indexed_properties: + - title + - text + unindexed_properties: + - _id + queries: + property: text + - id: nfcorpus + path: ./datasets/nfcorpus + corpus: + indexed_properties: + - title + - text + unindexed_properties: + - _id + queries: + property: text + - id: fiqa + path: ./datasets/fiqa + corpus: + indexed_properties: + - title + - text + unindexed_properties: + - _id + queries: + property: query + matching_results: original_matchingDocIDs + property_with_id: _id + - id: quora + path: ./datasets/quora + corpus: + indexed_properties: + - text + unindexed_properties: + - _id + queries: + property: text + - id: webistouche + path: ./datasets/webis-touche2020 + corpus: + indexed_properties: + - text + - title + unindexed_properties: + - __id + queries: + property: query + matching_results: matchingIDs + property_with_id: __id diff --git a/platform/dbops/binaries/weaviate-src/test/benchmark_bm25/gather_stats.sh b/platform/dbops/binaries/weaviate-src/test/benchmark_bm25/gather_stats.sh new file mode 100644 index 0000000000000000000000000000000000000000..e0a7f8d1cf6e24248ad719da432e58d8702976c7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/benchmark_bm25/gather_stats.sh @@ -0,0 +1,45 @@ +#!/bin/sh + +# Check if a name parameter is provided +if [ -z "$1" ]; then + echo "Usage: $0 [-p | --png]" + exit 1 +fi + +NAME=$1 +SAVE_PNG=false + +# Check for -p or --png flag +if [ "$2" = "-p" ] || [ "$2" = "--png" ]; then + SAVE_PNG=true +fi + +if $SAVE_PNG; then + go tool pprof -png -lines http://localhost:6060/debug/pprof/heap > "${NAME}_heap.png" & + go tool pprof -png http://localhost:6060/debug/pprof/profile\?seconds\=30 > "${NAME}_profile.png" & + go tool pprof -png http://localhost:6060/debug/pprof/allocs > "${NAME}_allocs.png" & + + wait + + open "${NAME}_heap.png" + open "${NAME}_profile.png" + open "${NAME}_allocs.png" +else + curl http://127.0.0.1:6060/debug/pprof/allocs\?seconds=30 > "${NAME}_allocs.prof" + curl http://127.0.0.1:6060/debug/pprof/heap\?seconds=30 > "${NAME}_heap.prof" + curl http://127.0.0.1:6060/debug/pprof/profile\?seconds=30 > "${NAME}_profile.prof" + +# Check if the FlameGraph directory exists +if [ -d "./FlameGraph" ]; then + go tool pprof -raw -output="${NAME}_cpu.txt" 'http://localhost:6060/debug/pprof/profile?seconds=30' + # Checkout the FlameGraph git repository for really nice flame graphs + # git checkout https://github.com/brendangregg/FlameGraph + ./FlameGraph/stackcollapse-go.pl "${NAME}_cpu.txt" | ./FlameGraph/flamegraph.pl > "${NAME}_flame.svg" & + open "${NAME}_flame.svg" +else + echo "FlameGraph directory not found. Please clone the FlameGraph repository at https://github.com/brendangregg/FlameGraph to get nice flamegraphs." +fi + +fi + + diff --git a/platform/dbops/binaries/weaviate-src/test/benchmark_bm25/go.mod b/platform/dbops/binaries/weaviate-src/test/benchmark_bm25/go.mod new file mode 100644 index 0000000000000000000000000000000000000000..e042e17ed16c13a915bd1d4736c7e67005fae560 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/benchmark_bm25/go.mod @@ -0,0 +1,88 @@ +module github.com/weaviate/weaviate/test/benchmark_bm25 + +go 1.24 + +replace github.com/weaviate/weaviate => ../.. + +require ( + github.com/go-openapi/strfmt v0.23.0 + github.com/google/uuid v1.6.0 + github.com/spf13/cobra v1.8.1 + github.com/weaviate/weaviate v1.29.0 + github.com/weaviate/weaviate-go-client/v5 v5.0.2 + gopkg.in/yaml.v3 v3.0.1 +) + +require ( + github.com/armon/go-metrics v0.4.1 // indirect + github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/buger/jsonparser v1.1.1 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/getsentry/sentry-go v0.30.0 // indirect + github.com/go-openapi/analysis v0.23.0 // indirect + github.com/go-openapi/errors v0.22.0 // indirect + github.com/go-openapi/jsonpointer v0.21.0 // indirect + github.com/go-openapi/jsonreference v0.21.0 // indirect + github.com/go-openapi/loads v0.22.0 // indirect + github.com/go-openapi/runtime v0.24.2 // indirect + github.com/go-openapi/spec v0.21.0 // indirect + github.com/go-openapi/swag v0.23.0 // indirect + github.com/go-openapi/validate v0.24.0 // indirect + github.com/google/btree v1.1.3 // indirect + github.com/gregjones/httpcache v0.0.0-20171119193500-2bcd89a1743f // indirect + github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/go-immutable-radix v1.3.1 // indirect + github.com/hashicorp/go-metrics v0.5.4 // indirect + github.com/hashicorp/go-msgpack/v2 v2.1.2 // indirect + github.com/hashicorp/go-multierror v1.1.1 // indirect + github.com/hashicorp/go-sockaddr v1.0.0 // indirect + github.com/hashicorp/golang-lru v1.0.2 // indirect + github.com/hashicorp/memberlist v0.5.2 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/launchdarkly/ccache v1.1.0 // indirect + github.com/launchdarkly/eventsource v1.6.2 // indirect + github.com/launchdarkly/go-jsonstream/v3 v3.1.0 // indirect + github.com/launchdarkly/go-sdk-common/v3 v3.2.0 // indirect + github.com/launchdarkly/go-sdk-events/v3 v3.4.0 // indirect + github.com/launchdarkly/go-semver v1.0.3 // indirect + github.com/launchdarkly/go-server-sdk-evaluation/v3 v3.0.1 // indirect + github.com/launchdarkly/go-server-sdk/v7 v7.8.0 // indirect + github.com/mailru/easyjson v0.7.7 // indirect + github.com/miekg/dns v1.1.43 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect + github.com/oklog/ulid v1.3.1 // indirect + github.com/opentracing/opentracing-go v1.2.0 // indirect + github.com/patrickmn/go-cache v2.1.0+incompatible // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/prometheus/client_golang v1.20.5 // indirect + github.com/prometheus/client_model v0.6.2 // indirect + github.com/prometheus/common v0.65.0 // indirect + github.com/prometheus/procfs v0.15.1 // indirect + github.com/rogpeppe/go-internal v1.12.0 // indirect + github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 // indirect + github.com/sirupsen/logrus v1.9.3 // indirect + github.com/spf13/pflag v1.0.5 // indirect + github.com/stretchr/objx v0.5.2 // indirect + github.com/stretchr/testify v1.10.0 // indirect + github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect + github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect + github.com/weaviate/sroar v0.0.11 // indirect + go.mongodb.org/mongo-driver v1.14.0 // indirect + golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa // indirect + golang.org/x/net v0.42.0 // indirect + golang.org/x/oauth2 v0.30.0 // indirect + golang.org/x/sync v0.16.0 // indirect + golang.org/x/sys v0.34.0 // indirect + golang.org/x/text v0.27.0 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0 // indirect + google.golang.org/grpc v1.74.2 // indirect + google.golang.org/protobuf v1.36.6 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect +) diff --git a/platform/dbops/binaries/weaviate-src/test/benchmark_bm25/go.sum b/platform/dbops/binaries/weaviate-src/test/benchmark_bm25/go.sum new file mode 100644 index 0000000000000000000000000000000000000000..0029f7e8915f0d41b32776a4ebc1efdcf707b41d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/benchmark_bm25/go.sum @@ -0,0 +1,474 @@ +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= +github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= +github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/RoaringBitmap/roaring v0.6.1 h1:O36Tdaj1Fi/zyr25shTHwlQPGdq53+u4WkM08AOEjiE= +github.com/RoaringBitmap/roaring v0.6.1/go.mod h1:WZ83fjBF/7uBHi6QoFyfGL4+xuV4Qn+xFkm4+vSzrhE= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= +github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA= +github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4= +github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= +github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= +github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so= +github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= +github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= +github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= +github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/getsentry/sentry-go v0.30.0 h1:lWUwDnY7sKHaVIoZ9wYqRHJ5iEmoc0pqcRqFkosKzBo= +github.com/getsentry/sentry-go v0.30.0/go.mod h1:WU9B9/1/sHDqeV8T+3VwwbjeR5MSXs/6aqG3mqZrezA= +github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA= +github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= +github.com/go-jose/go-jose/v4 v4.0.5 h1:M6T8+mKZl/+fNNuFHvGIzDz7BTLQPIounk/b9dw3AaE= +github.com/go-jose/go-jose/v4 v4.0.5/go.mod h1:s3P1lRrkT8igV8D9OjyL4WRyHvjB6a4JSllnOrmmBOA= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-openapi/analysis v0.21.2/go.mod h1:HZwRk4RRisyG8vx2Oe6aqeSQcoxRp47Xkp3+K6q+LdY= +github.com/go-openapi/analysis v0.23.0 h1:aGday7OWupfMs+LbmLZG4k0MYXIANxcuBTYUC03zFCU= +github.com/go-openapi/analysis v0.23.0/go.mod h1:9mz9ZWaSlV8TvjQHLl2mUW2PbZtemkE8yA5v22ohupo= +github.com/go-openapi/errors v0.19.8/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M= +github.com/go-openapi/errors v0.19.9/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M= +github.com/go-openapi/errors v0.20.2/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M= +github.com/go-openapi/errors v0.22.0 h1:c4xY/OLxUBSTiepAg3j/MHuAv5mJhnf53LLMWFB+u/w= +github.com/go-openapi/errors v0.22.0/go.mod h1:J3DmZScxCDufmIMsdOuDHxJbdOGC0xtUynjIx092vXE= +github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ= +github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY= +github.com/go-openapi/jsonreference v0.19.6/go.mod h1:diGHMEHg2IqXZGKxqyvWdfWU/aim5Dprw5bqpKkTvns= +github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ= +github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4= +github.com/go-openapi/loads v0.21.1/go.mod h1:/DtAMXXneXFjbQMGEtbamCZb+4x7eGwkvZCvBmwUG+g= +github.com/go-openapi/loads v0.22.0 h1:ECPGd4jX1U6NApCGG1We+uEozOAvXvJSF4nnwHZ8Aco= +github.com/go-openapi/loads v0.22.0/go.mod h1:yLsaTCS92mnSAZX5WWoxszLj0u+Ojl+Zs5Stn1oF+rs= +github.com/go-openapi/runtime v0.24.2 h1:yX9HMGQbz32M87ECaAhGpJjBmErO3QLcgdZj9BzGx7c= +github.com/go-openapi/runtime v0.24.2/go.mod h1:AKurw9fNre+h3ELZfk6ILsfvPN+bvvlaU/M9q/r9hpk= +github.com/go-openapi/spec v0.20.4/go.mod h1:faYFR1CvsJZ0mNsmsphTMSoRrNV3TEDoAM7FOEWeq8I= +github.com/go-openapi/spec v0.21.0 h1:LTVzPc3p/RzRnkQqLRndbAzjY0d0BCL72A6j3CdL9ZY= +github.com/go-openapi/spec v0.21.0/go.mod h1:78u6VdPw81XU44qEWGhtr982gJ5BWg2c0I5XwVMotYk= +github.com/go-openapi/strfmt v0.21.0/go.mod h1:ZRQ409bWMj+SOgXofQAGTIo2Ebu72Gs+WaRADcS5iNg= +github.com/go-openapi/strfmt v0.21.1/go.mod h1:I/XVKeLc5+MM5oPNN7P6urMOpuLXEcNrCX/rPGuWb0k= +github.com/go-openapi/strfmt v0.21.2/go.mod h1:I/XVKeLc5+MM5oPNN7P6urMOpuLXEcNrCX/rPGuWb0k= +github.com/go-openapi/strfmt v0.23.0 h1:nlUS6BCqcnAk0pyhi9Y+kdDVZdZMHfEKQiS4HaMgO/c= +github.com/go-openapi/strfmt v0.23.0/go.mod h1:NrtIpfKtWIygRkKVsxh7XQMDQW5HKQl6S5ik2elW+K4= +github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= +github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= +github.com/go-openapi/swag v0.21.1/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= +github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE= +github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ= +github.com/go-openapi/validate v0.21.0/go.mod h1:rjnrwK57VJ7A8xqfpAOEKRH8yQSGUriMu5/zuPSQ1hg= +github.com/go-openapi/validate v0.24.0 h1:LdfDKwNbpB6Vn40xhTdNZAnfLECL81w+VX3BumrGD58= +github.com/go-openapi/validate v0.24.0/go.mod h1:iyeX1sEufmv3nPbBdX3ieNviWnOZaJ1+zquzJEf2BAQ= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-stack/stack v1.8.1/go.mod h1:dcoOX6HbPZSZptuspn9bctJ+N/CnF5gGygcUP3XYfe4= +github.com/gobuffalo/attrs v0.0.0-20190224210810-a9411de4debd/go.mod h1:4duuawTqi2wkkpB4ePgWMaai6/Kc6WEz83bhFwpHzj0= +github.com/gobuffalo/depgen v0.0.0-20190329151759-d478694a28d3/go.mod h1:3STtPUQYuzV0gBVOY3vy6CfMm/ljR4pABfrTeHNLHUY= +github.com/gobuffalo/depgen v0.1.0/go.mod h1:+ifsuy7fhi15RWncXQQKjWS9JPkdah5sZvtHc2RXGlg= +github.com/gobuffalo/envy v1.6.15/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI= +github.com/gobuffalo/envy v1.7.0/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI= +github.com/gobuffalo/flect v0.1.0/go.mod h1:d2ehjJqGOH/Kjqcoz+F7jHTBbmDb38yXA598Hb50EGs= +github.com/gobuffalo/flect v0.1.1/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI= +github.com/gobuffalo/flect v0.1.3/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI= +github.com/gobuffalo/genny v0.0.0-20190329151137-27723ad26ef9/go.mod h1:rWs4Z12d1Zbf19rlsn0nurr75KqhYp52EAGGxTbBhNk= +github.com/gobuffalo/genny v0.0.0-20190403191548-3ca520ef0d9e/go.mod h1:80lIj3kVJWwOrXWWMRzzdhW3DsrdjILVil/SFKBzF28= +github.com/gobuffalo/genny v0.1.0/go.mod h1:XidbUqzak3lHdS//TPu2OgiFB+51Ur5f7CSnXZ/JDvo= +github.com/gobuffalo/genny v0.1.1/go.mod h1:5TExbEyY48pfunL4QSXxlDOmdsD44RRq4mVZ0Ex28Xk= +github.com/gobuffalo/gitgen v0.0.0-20190315122116-cc086187d211/go.mod h1:vEHJk/E9DmhejeLeNt7UVvlSGv3ziL+djtTr3yyzcOw= +github.com/gobuffalo/gogen v0.0.0-20190315121717-8f38393713f5/go.mod h1:V9QVDIxsgKNZs6L2IYiGR8datgMhB577vzTDqypH360= +github.com/gobuffalo/gogen v0.1.0/go.mod h1:8NTelM5qd8RZ15VjQTFkAW6qOMx5wBbW4dSCS3BY8gg= +github.com/gobuffalo/gogen v0.1.1/go.mod h1:y8iBtmHmGc4qa3urIyo1shvOD8JftTtfcKi+71xfDNE= +github.com/gobuffalo/logger v0.0.0-20190315122211-86e12af44bc2/go.mod h1:QdxcLw541hSGtBnhUc4gaNIXRjiDppFGaDqzbrBd3v8= +github.com/gobuffalo/mapi v1.0.1/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc= +github.com/gobuffalo/mapi v1.0.2/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc= +github.com/gobuffalo/packd v0.0.0-20190315124812-a385830c7fc0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4= +github.com/gobuffalo/packd v0.1.0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4= +github.com/gobuffalo/packr/v2 v2.0.9/go.mod h1:emmyGweYTm6Kdper+iywB6YK5YzuKchGtJQZ0Odn4pQ= +github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/VCm/3ptBN+0= +github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/btree v1.1.3 h1:CVpQJjYgC4VbzxeGVHfvZrv1ctoYCAI8vbl07Fcxlyg= +github.com/google/btree v1.1.3/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gregjones/httpcache v0.0.0-20171119193500-2bcd89a1743f h1:kOkUP6rcVVqC+KlKKENKtgfFfJyDySYhqL9srXooghY= +github.com/gregjones/httpcache v0.0.0-20171119193500-2bcd89a1743f/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-immutable-radix v1.3.1 h1:DKHmCUm2hRBK510BaiZlwvpD40f8bJFeZnpfm2KLowc= +github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-metrics v0.5.4 h1:8mmPiIJkTPPEbAiV97IxdAGNdRdaWwVap1BU6elejKY= +github.com/hashicorp/go-metrics v0.5.4/go.mod h1:CG5yz4NZ/AI/aQt9Ucm/vdBnbh7fvmv4lxZ350i+QQI= +github.com/hashicorp/go-msgpack/v2 v2.1.2 h1:4Ee8FTp834e+ewB71RDrQ0VKpyFdrKOjvYtnQ/ltVj0= +github.com/hashicorp/go-msgpack/v2 v2.1.2/go.mod h1:upybraOAblm4S7rx0+jeNy+CWWhzywQsSRV5033mMu4= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= +github.com/hashicorp/go-sockaddr v1.0.0 h1:GeH6tui99pF4NJgfnhp+L6+FfobzVW3Ah46sLo0ICXs= +github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1 h1:fv1ep09latC32wFoVwnqcnKJGnMSdBanPczbHAYm1BE= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v1.0.2 h1:dV3g9Z/unq5DpblPpw+Oqcv4dU/1omnb4Ok8iPY6p1c= +github.com/hashicorp/golang-lru v1.0.2/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/hashicorp/memberlist v0.5.2 h1:rJoNPWZ0juJBgqn48gjy59K5H4rNgvUoM1kUD7bXiuI= +github.com/hashicorp/memberlist v0.5.2/go.mod h1:Ri9p/tRShbjYnpNf4FFPXG7wxEGY4Nrcn6E7jrVa//4= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= +github.com/karlseguin/expect v1.0.2-0.20190806010014-778a5f0c6003 h1:vJ0Snvo+SLMY72r5J4sEfkuE7AFbixEP2qRbEcum/wA= +github.com/karlseguin/expect v1.0.2-0.20190806010014-778a5f0c6003/go.mod h1:zNBxMY8P21owkeogJELCLeHIt+voOSduHYTFUbwRAV8= +github.com/karrick/godirwalk v1.8.0/go.mod h1:H5KPZjojv4lE+QYImBI8xVtrBRgYrIVsaRPx4tDPEn4= +github.com/karrick/godirwalk v1.10.3/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA= +github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= +github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= +github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/launchdarkly/ccache v1.1.0 h1:voD1M+ZJXR3MREOKtBwgTF9hYHl1jg+vFKS/+VAkR2k= +github.com/launchdarkly/ccache v1.1.0/go.mod h1:TlxzrlnzvYeXiLHmesMuvoZetu4Z97cV1SsdqqBJi1Q= +github.com/launchdarkly/eventsource v1.6.2 h1:5SbcIqzUomn+/zmJDrkb4LYw7ryoKFzH/0TbR0/3Bdg= +github.com/launchdarkly/eventsource v1.6.2/go.mod h1:LHxSeb4OnqznNZxCSXbFghxS/CjIQfzHovNoAqbO/Wk= +github.com/launchdarkly/go-jsonstream/v3 v3.1.0 h1:U/7/LplZO72XefBQ+FzHf6o4FwLHVqBE+4V58Ornu/E= +github.com/launchdarkly/go-jsonstream/v3 v3.1.0/go.mod h1:2Pt4BR5AwWgsuVTCcIpB6Os04JFIKWfoA+7faKkZB5E= +github.com/launchdarkly/go-sdk-common/v3 v3.2.0 h1:LzwlrXRBPC7NjdbnDxio8YGHMvDrNb4i6lbjpLgwsyk= +github.com/launchdarkly/go-sdk-common/v3 v3.2.0/go.mod h1:mXFmDGEh4ydK3QilRhrAyKuf9v44VZQWnINyhqbbOd0= +github.com/launchdarkly/go-sdk-events/v3 v3.4.0 h1:22sVSEDEXpdOEK3UBtmThwsUHqc+cbbe/pJfsliBAA4= +github.com/launchdarkly/go-sdk-events/v3 v3.4.0/go.mod h1:oepYWQ2RvvjfL2WxkE1uJJIuRsIMOP4WIVgUpXRPcNI= +github.com/launchdarkly/go-semver v1.0.3 h1:agIy/RN3SqeQDIfKkl+oFslEdeIs7pgsJBs3CdCcGQM= +github.com/launchdarkly/go-semver v1.0.3/go.mod h1:xFmMwXba5Mb+3h72Z+VeSs9ahCvKo2QFUTHRNHVqR28= +github.com/launchdarkly/go-server-sdk-evaluation/v3 v3.0.1 h1:rTgcYAFraGFj7sBMB2b7JCYCm0b9kph4FaMX02t4osQ= +github.com/launchdarkly/go-server-sdk-evaluation/v3 v3.0.1/go.mod h1:fPS5d+zOsgFnMunj+Ki6jjlZtFvo4h9iNbtNXxzYn58= +github.com/launchdarkly/go-server-sdk/v7 v7.8.0 h1:QRJmx30DqOVH81FiwFiHAi81FCiwAS/0fRBLJIGTK2U= +github.com/launchdarkly/go-server-sdk/v7 v7.8.0/go.mod h1:rf/K2E4s5OjkB8Nn3ATDOR6W6S3U7D8FJ3WAKLxSTIQ= +github.com/launchdarkly/go-test-helpers/v2 v2.2.0 h1:L3kGILP/6ewikhzhdNkHy1b5y4zs50LueWenVF0sBbs= +github.com/launchdarkly/go-test-helpers/v2 v2.2.0/go.mod h1:L7+th5govYp5oKU9iN7To5PgznBuIjBPn+ejqKR0avw= +github.com/launchdarkly/go-test-helpers/v3 v3.0.2 h1:rh0085g1rVJM5qIukdaQ8z1XTWZztbJ49vRZuveqiuU= +github.com/launchdarkly/go-test-helpers/v3 v3.0.2/go.mod h1:u2ZvJlc/DDJTFrshWW50tWMZHLVYXofuSHUfTU/eIwM= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= +github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/miekg/dns v1.1.43 h1:JKfpVSCB84vrAmHzyrsxB5NAr5kLoMXZArPSw7Qlgyg= +github.com/miekg/dns v1.1.43/go.mod h1:+evo5L0630/F6ca/Z9+GAqzhjGyn8/c+TBaOyfEl0V4= +github.com/mitchellh/mapstructure v1.3.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= +github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae h1:VeRdUYdCw49yizlSbMEn2SZ+gT+3IUKx8BqxyQdz+BY= +github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae/go.mod h1:qAyveg+e4CE+eKJXWVjKXM4ck2QobLqTDytGJbLLhJg= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= +github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= +github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs= +github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= +github.com/pascaldekloe/goe v0.1.0 h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY= +github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc= +github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ= +github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE= +github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4= +github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= +github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= +github.com/prometheus/client_golang v1.11.1/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= +github.com/prometheus/client_golang v1.20.5 h1:cxppBPuYhUnsO6yo/aoRol4L7q7UFfdm+bR9r+8l63Y= +github.com/prometheus/client_golang v1.20.5/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk= +github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= +github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= +github.com/prometheus/common v0.65.0 h1:QDwzd+G1twt//Kwj/Ww6E9FQq1iVMmODnILtW1t2VzE= +github.com/prometheus/common v0.65.0/go.mod h1:0gZns+BLRQ3V6NdaerOhMbwwRbNh9hkGINtQAsP5GS8= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= +github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc= +github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk= +github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= +github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM= +github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y= +github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= +github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IUPn0Bjt8= +github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= +github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= +github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= +github.com/weaviate/sroar v0.0.11 h1:yO25NXecuUcrcgdwTvahT91RDS6FDmaNSjgA7nCKVcw= +github.com/weaviate/sroar v0.0.11/go.mod h1:VgBRWPKPHRV/k9ABnD5w7QgdH9xe4RACzDzkrrK977g= +github.com/weaviate/weaviate-go-client/v5 v5.0.2 h1:aptmTJy6d4OxGHBTGnqHheJe0WDbzH2SVmQkvy7+EGY= +github.com/weaviate/weaviate-go-client/v5 v5.0.2/go.mod h1:CwZehIL4s3VfkzTu12Wy8VAUtELRtQFUt2ZniBF/lQM= +github.com/willf/bitset v1.1.11 h1:N7Z7E9UvjW+sGsEl7k/SJrvY2reP1A07MrGuCjIOjRE= +github.com/willf/bitset v1.1.11/go.mod h1:83CECat5yLh5zVOf4P1ErAgKA5UDvKtgyUABdr3+MjI= +github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0 h1:3UeQBvD0TFrlVjOeLOBz+CPAI8dnbqNSVwUwRrkp7vQ= +github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0/go.mod h1:IXCdmsXIht47RaVFLEdVnh1t+pgYtTAhQGj73kz+2DM= +github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= +github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs= +github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM= +github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= +go.mongodb.org/mongo-driver v1.7.3/go.mod h1:NqaYOwnXWr5Pm7AOpO5QFxKJ503nbMse/R79oO62zWg= +go.mongodb.org/mongo-driver v1.7.5/go.mod h1:VXEWRZ6URJIkUq2SCAyapmhH0ZLRBP+FT4xhp5Zvxng= +go.mongodb.org/mongo-driver v1.8.3/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= +go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80= +go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/otel v1.36.0 h1:UumtzIklRBY6cI/lllNZlALOF5nNIzJVb16APdvgTXg= +go.opentelemetry.io/otel v1.36.0/go.mod h1:/TcFMXYjyRNh8khOAO9ybYkqaDBb/70aVwkNML4pP8E= +go.opentelemetry.io/otel/metric v1.36.0 h1:MoWPKVhQvJ+eeXWHFBOPoBOi20jh6Iq2CcCREuTYufE= +go.opentelemetry.io/otel/metric v1.36.0/go.mod h1:zC7Ks+yeyJt4xig9DEw9kuUFe5C3zLbVjV2PzT6qzbs= +go.opentelemetry.io/otel/sdk v1.36.0 h1:b6SYIuLRs88ztox4EyrvRti80uXIFy+Sqzoh9kFULbs= +go.opentelemetry.io/otel/sdk v1.36.0/go.mod h1:+lC+mTgD+MUWfjJubi2vvXWcVxyr9rmlshZni72pXeY= +go.opentelemetry.io/otel/sdk/metric v1.36.0 h1:r0ntwwGosWGaa0CrSt8cuNuTcccMXERFwHX4dThiPis= +go.opentelemetry.io/otel/sdk/metric v1.36.0/go.mod h1:qTNOhFDfKRwX0yXOqJYegL5WRaW376QbB7P4Pb0qva4= +go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKrsNd4w= +go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= +golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201216223049-8b5274cf687f/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa h1:ELnwvuAXPNtPk1TJRuGkI9fDTwym6AYBu0qzT8AcHdI= +golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa/go.mod h1:akd2r19cwCdwSwWeIdzYQGa/EZZyqcOdwWiwj5L5eKQ= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM= +golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.42.0 h1:jzkYrhi3YQWD6MLBJcsklgQsoAcw89EcZbJw8Z614hs= +golang.org/x/net v0.42.0/go.mod h1:FF1RA5d3u7nAYA4z2TkclSCKh68eSXtiFwcWQpPXdt8= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= +golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190412183630-56d357773e84/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw= +golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190419153524-e8e3143a4f4a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190531175056-4c3a928424d2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210420072515-93ed5bcd2bfe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA= +golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4= +golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190329151228-23e29df326fe/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190416151739-9c9e1878f421/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190420181800-aa740d480789/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190531172133-b3315ee88b7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0 h1:MAKi5q709QWfnkkpNQ0M12hYJ1+e8qYVDyowc4U1XZM= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= +google.golang.org/grpc v1.74.2 h1:WoosgB65DlWVC9FqI82dGsZhWFNBSLjQ84bjROOpMu4= +google.golang.org/grpc v1.74.2/go.mod h1:CtQ+BGjaAIXHs/5YS3i473GqwBBa1zGQNevxdeBEXrM= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= +google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/platform/dbops/binaries/weaviate-src/test/benchmark_bm25/main.go b/platform/dbops/binaries/weaviate-src/test/benchmark_bm25/main.go new file mode 100644 index 0000000000000000000000000000000000000000..04cff61e1872b011f2f87c8ccd59980ce7995a9f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/benchmark_bm25/main.go @@ -0,0 +1,18 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package main + +import "github.com/weaviate/weaviate/test/benchmark_bm25/cmd" + +func main() { + cmd.Execute() +} diff --git a/platform/dbops/binaries/weaviate-src/test/docker/api_key.go b/platform/dbops/binaries/weaviate-src/test/docker/api_key.go new file mode 100644 index 0000000000000000000000000000000000000000..7961f1d1c5127f48353e30f35dc86a5b38fe7bdb --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/docker/api_key.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package docker + +type ApiKeyUser struct { + Key string + Username string + Admin bool + Viewer bool +} diff --git a/platform/dbops/binaries/weaviate-src/test/docker/azurite.go b/platform/dbops/binaries/weaviate-src/test/docker/azurite.go new file mode 100644 index 0000000000000000000000000000000000000000..3ac77a0d50ccacb5b8f44b085b78d774b38c2cb1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/docker/azurite.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package docker + +import ( + "context" + "fmt" + "time" + + "github.com/docker/go-connections/nat" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" +) + +const Azurite = "azurite" + +func startAzurite(ctx context.Context, networkName string) (*DockerContainer, error) { + blobPort := nat.Port("10000/tcp") + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: testcontainers.ContainerRequest{ + Image: "mcr.microsoft.com/azure-storage/azurite", + ExposedPorts: []string{"10000/tcp", "10001/tcp", "10002/tcp"}, + Hostname: Azurite, + AutoRemove: true, + Networks: []string{networkName}, + NetworkAliases: map[string][]string{ + networkName: {Azurite}, + }, + Name: Azurite, + Cmd: []string{Azurite, "--blobHost", "0.0.0.0", "--queueHost", "0.0.0.0", "--tableHost", "0.0.0.0"}, + WaitingFor: wait. + ForAll( + wait.ForLog("Azurite Blob service is successfully listening at http://0.0.0.0:10000"), + wait.ForLog("Azurite Queue service is successfully listening at http://0.0.0.0:10001"), + wait.ForLog("Azurite Table service is successfully listening at http://0.0.0.0:10002"), + wait.ForListeningPort(blobPort), + wait.ForListeningPort("10001/tcp"), + wait.ForListeningPort("10002/tcp"), + ).WithDeadline(60 * time.Second), + }, + Started: true, + Reuse: true, + }) + if err != nil { + return nil, err + } + uri, err := container.PortEndpoint(ctx, blobPort, "") + if err != nil { + return nil, err + } + envSettings := make(map[string]string) + connectionString := "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://%s/devstoreaccount1;" + blobEndpoint := fmt.Sprintf("%s:%s", Azurite, blobPort.Port()) + envSettings["AZURE_STORAGE_CONNECTION_STRING"] = fmt.Sprintf(connectionString, blobEndpoint) + endpoints := make(map[EndpointName]endpoint) + endpoints[HTTP] = endpoint{blobPort, uri} + return &DockerContainer{Azurite, endpoints, container, envSettings}, nil +} diff --git a/platform/dbops/binaries/weaviate-src/test/docker/bind.go b/platform/dbops/binaries/weaviate-src/test/docker/bind.go new file mode 100644 index 0000000000000000000000000000000000000000..90596e3de8bf41adff19be760b8359d4a569d410 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/docker/bind.go @@ -0,0 +1,66 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package docker + +import ( + "context" + "fmt" + "time" + + "github.com/docker/go-connections/nat" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" +) + +const Multi2VecBind = "multi2vec-bind" + +func startM2VBind(ctx context.Context, networkName, bindImage string) (*DockerContainer, error) { + image := "semitechnologies/multi2vec-bind:imagebind" + if len(bindImage) > 0 { + image = bindImage + } + port := nat.Port("8080/tcp") + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: testcontainers.ContainerRequest{ + Image: image, + Hostname: Multi2VecBind, + Networks: []string{networkName}, + NetworkAliases: map[string][]string{ + networkName: {Multi2VecBind}, + }, + Name: Multi2VecBind, + ExposedPorts: []string{"8080/tcp"}, + AutoRemove: true, + WaitingFor: wait. + ForHTTP("/.well-known/ready"). + WithPort(port). + WithStatusCodeMatcher(func(status int) bool { + return status == 204 + }). + WithStartupTimeout(240 * time.Second), + }, + Started: true, + Reuse: true, + }) + if err != nil { + return nil, err + } + uri, err := container.PortEndpoint(ctx, port, "") + if err != nil { + return nil, err + } + envSettings := make(map[string]string) + envSettings["BIND_INFERENCE_API"] = fmt.Sprintf("http://%s:%s", Multi2VecBind, port.Port()) + endpoints := make(map[EndpointName]endpoint) + endpoints[HTTP] = endpoint{port, uri} + return &DockerContainer{Multi2VecBind, endpoints, container, envSettings}, nil +} diff --git a/platform/dbops/binaries/weaviate-src/test/docker/clip.go b/platform/dbops/binaries/weaviate-src/test/docker/clip.go new file mode 100644 index 0000000000000000000000000000000000000000..f66ee1f3cd2b95eaa26d2c2aaf2445f4a1a8bb38 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/docker/clip.go @@ -0,0 +1,66 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package docker + +import ( + "context" + "fmt" + "time" + + "github.com/docker/go-connections/nat" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" +) + +const Multi2VecCLIP = "multi2vec-clip" + +func startM2VClip(ctx context.Context, networkName, clipImage string) (*DockerContainer, error) { + image := "semitechnologies/multi2vec-clip:sentence-transformers-clip-ViT-B-32-multilingual-v1" + if len(clipImage) > 0 { + image = clipImage + } + port := nat.Port("8080/tcp") + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: testcontainers.ContainerRequest{ + Image: image, + Hostname: Multi2VecCLIP, + Networks: []string{networkName}, + NetworkAliases: map[string][]string{ + networkName: {Multi2VecCLIP}, + }, + Name: Multi2VecCLIP, + ExposedPorts: []string{"8080/tcp"}, + AutoRemove: true, + WaitingFor: wait. + ForHTTP("/.well-known/ready"). + WithPort(port). + WithStatusCodeMatcher(func(status int) bool { + return status == 204 + }). + WithStartupTimeout(240 * time.Second), + }, + Started: true, + Reuse: true, + }) + if err != nil { + return nil, err + } + uri, err := container.PortEndpoint(ctx, port, "") + if err != nil { + return nil, err + } + envSettings := make(map[string]string) + envSettings["CLIP_INFERENCE_API"] = fmt.Sprintf("http://%s:%s", Multi2VecCLIP, port.Port()) + endpoints := make(map[EndpointName]endpoint) + endpoints[HTTP] = endpoint{port, uri} + return &DockerContainer{Multi2VecCLIP, endpoints, container, envSettings}, nil +} diff --git a/platform/dbops/binaries/weaviate-src/test/docker/compose.go b/platform/dbops/binaries/weaviate-src/test/docker/compose.go new file mode 100644 index 0000000000000000000000000000000000000000..905481f400404a3038cfeeb63364a547b627c2c4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/docker/compose.go @@ -0,0 +1,1049 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package docker + +import ( + "context" + "fmt" + "os" + "slices" + "strconv" + "strings" + "time" + + "github.com/pkg/errors" + tescontainersnetwork "github.com/testcontainers/testcontainers-go/network" + "golang.org/x/sync/errgroup" + + modstgazure "github.com/weaviate/weaviate/modules/backup-azure" + modstgfilesystem "github.com/weaviate/weaviate/modules/backup-filesystem" + modstggcs "github.com/weaviate/weaviate/modules/backup-gcs" + modstgs3 "github.com/weaviate/weaviate/modules/backup-s3" + modgenerativeanthropic "github.com/weaviate/weaviate/modules/generative-anthropic" + modgenerativeanyscale "github.com/weaviate/weaviate/modules/generative-anyscale" + modgenerativeaws "github.com/weaviate/weaviate/modules/generative-aws" + modgenerativecohere "github.com/weaviate/weaviate/modules/generative-cohere" + modgenerativefriendliai "github.com/weaviate/weaviate/modules/generative-friendliai" + modgenerativegoogle "github.com/weaviate/weaviate/modules/generative-google" + modgenerativenvidia "github.com/weaviate/weaviate/modules/generative-nvidia" + modgenerativeollama "github.com/weaviate/weaviate/modules/generative-ollama" + modgenerativeopenai "github.com/weaviate/weaviate/modules/generative-openai" + modgenerativexai "github.com/weaviate/weaviate/modules/generative-xai" + modmulti2multivecjinaai "github.com/weaviate/weaviate/modules/multi2multivec-jinaai" + modmulti2veccohere "github.com/weaviate/weaviate/modules/multi2vec-cohere" + modmulti2vecgoogle "github.com/weaviate/weaviate/modules/multi2vec-google" + modmulti2vecjinaai "github.com/weaviate/weaviate/modules/multi2vec-jinaai" + modmulti2vecnvidia "github.com/weaviate/weaviate/modules/multi2vec-nvidia" + modmulti2vecvoyageai "github.com/weaviate/weaviate/modules/multi2vec-voyageai" + modsloads3 "github.com/weaviate/weaviate/modules/offload-s3" + modqnaopenai "github.com/weaviate/weaviate/modules/qna-openai" + modrerankercohere "github.com/weaviate/weaviate/modules/reranker-cohere" + modrerankernvidia "github.com/weaviate/weaviate/modules/reranker-nvidia" + modrerankervoyageai "github.com/weaviate/weaviate/modules/reranker-voyageai" + modtext2colbertjinaai "github.com/weaviate/weaviate/modules/text2multivec-jinaai" + modaws "github.com/weaviate/weaviate/modules/text2vec-aws" + modcohere "github.com/weaviate/weaviate/modules/text2vec-cohere" + modgoogle "github.com/weaviate/weaviate/modules/text2vec-google" + modhuggingface "github.com/weaviate/weaviate/modules/text2vec-huggingface" + modjinaai "github.com/weaviate/weaviate/modules/text2vec-jinaai" + modmistral "github.com/weaviate/weaviate/modules/text2vec-mistral" + modmodel2vec "github.com/weaviate/weaviate/modules/text2vec-model2vec" + modnvidia "github.com/weaviate/weaviate/modules/text2vec-nvidia" + modollama "github.com/weaviate/weaviate/modules/text2vec-ollama" + modopenai "github.com/weaviate/weaviate/modules/text2vec-openai" + modvoyageai "github.com/weaviate/weaviate/modules/text2vec-voyageai" + modweaviateembed "github.com/weaviate/weaviate/modules/text2vec-weaviate" +) + +const ( + // envTestWeaviateImage can be passed to tests to spin up docker compose with given image + envTestWeaviateImage = "TEST_WEAVIATE_IMAGE" + // envTestText2vecTransformersImage adds ability to pass a custom image to module tests + envTestText2vecTransformersImage = "TEST_TEXT2VEC_TRANSFORMERS_IMAGE" + // envTestText2vecContextionaryImage adds ability to pass a custom image to module tests + envTestText2vecContextionaryImage = "TEST_TEXT2VEC_CONTEXTIONARY_IMAGE" + // envTestQnATransformersImage adds ability to pass a custom image to module tests + envTestQnATransformersImage = "TEST_QNA_TRANSFORMERS_IMAGE" + // envTestSUMTransformersImage adds ability to pass a custom image to module tests + envTestSUMTransformersImage = "TEST_SUM_TRANSFORMERS_IMAGE" + // envTestMulti2VecCLIPImage adds ability to pass a custom CLIP image to module tests + envTestMulti2VecCLIPImage = "TEST_MULTI2VEC_CLIP_IMAGE" + // envTestMulti2VecBindImage adds ability to pass a custom BIND image to module tests + envTestMulti2VecBindImage = "TEST_MULTI2VEC_BIND_IMAGE" + // envTestImg2VecNeuralImage adds ability to pass a custom Im2Vec Neural image to module tests + envTestImg2VecNeuralImage = "TEST_IMG2VEC_NEURAL_IMAGE" + // envTestRerankerTransformersImage adds ability to pass a custom image to module tests + envTestRerankerTransformersImage = "TEST_RERANKER_TRANSFORMERS_IMAGE" + // envTestText2vecModel2VecImage adds ability to pass a custom image to module tests + envTestText2vecModel2VecImage = "TEST_TEXT2VEC_MODEL2VEC_IMAGE" + // envTestMockOIDCImage adds ability to pass a custom image to module tests + envTestMockOIDCImage = "TEST_MOCKOIDC_IMAGE" + // envTestMockOIDCHelperImage adds ability to pass a custom image to module tests + envTestMockOIDCHelperImage = "TEST_MOCKOIDC_HELPER_IMAGE" +) + +const ( + Ref2VecCentroid = "ref2vec-centroid" +) + +type Compose struct { + enableModules []string + defaultVectorizerModule string + withMinIO bool + withGCS bool + withAzurite bool + withBackendFilesystem bool + withBackendS3 bool + withBackendS3Buckets map[string]string + withBackupS3Bucket string + withOffloadS3Bucket string + withBackendGCS bool + withBackendGCSBucket string + withBackendAzure bool + withBackendAzureContainer string + withTransformers bool + withTransformersImage string + withModel2Vec bool + withContextionary bool + withQnATransformers bool + withWeaviateExposeGRPCPort bool + withSecondWeaviate bool + withWeaviateCluster bool + withWeaviateClusterSize int + + withWeaviateAuth bool + withWeaviateBasicAuth bool + withWeaviateBasicAuthUsername string + withWeaviateBasicAuthPassword string + withWeaviateApiKey bool + weaviateApiKeyUsers []ApiKeyUser + weaviateAdminlistAdminUsers []string + weaviateAdminlistReadOnlyUsers []string + withWeaviateDbUsers bool + withWeaviateRbac bool + weaviateRbacRoots []string + weaviateRbacRootGroups []string + weaviateRbacViewerGroups []string + weaviateRbacViewers []string + withSUMTransformers bool + withCentroid bool + withCLIP bool + withGoogleApiKey string + withBind bool + withImg2Vec bool + withRerankerTransformers bool + withOllamaVectorizer bool + withOllamaGenerative bool + withAutoschema bool + withMockOIDC bool + withMockOIDCWithCertificate bool + weaviateEnvs map[string]string + removeEnvs map[string]struct{} +} + +func New() *Compose { + return &Compose{enableModules: []string{}, weaviateEnvs: make(map[string]string), removeEnvs: make(map[string]struct{}), withBackendS3Buckets: make(map[string]string)} +} + +func (d *Compose) WithGCS() *Compose { + d.withGCS = true + d.enableModules = append(d.enableModules, modstggcs.Name) + return d +} + +func (d *Compose) WithAzurite() *Compose { + d.withAzurite = true + d.enableModules = append(d.enableModules, modstgazure.Name) + return d +} + +func (d *Compose) WithText2VecTransformers() *Compose { + d.withTransformers = true + d.enableModules = append(d.enableModules, Text2VecTransformers) + d.defaultVectorizerModule = Text2VecTransformers + return d +} + +func (d *Compose) WithText2VecTransformersImage(image string) *Compose { + d.withTransformers = true + d.withTransformersImage = image + d.enableModules = append(d.enableModules, Text2VecTransformers) + d.defaultVectorizerModule = Text2VecTransformers + return d +} + +func (d *Compose) WithText2VecContextionary() *Compose { + d.withContextionary = true + d.enableModules = append(d.enableModules, Text2VecContextionary) + d.defaultVectorizerModule = Text2VecContextionary + return d +} + +func (d *Compose) WithText2VecOllama() *Compose { + d.withOllamaVectorizer = true + d.enableModules = append(d.enableModules, modollama.Name) + return d +} + +func (d *Compose) WithQnATransformers() *Compose { + d.withQnATransformers = true + d.enableModules = append(d.enableModules, QnATransformers) + return d +} + +func (d *Compose) WithBackendFilesystem() *Compose { + d.withBackendFilesystem = true + d.enableModules = append(d.enableModules, modstgfilesystem.Name) + return d +} + +// WithBackendS3 will prepare MinIO +func (d *Compose) WithBackendS3(bucket, region string) *Compose { + d.withBackendS3 = true + d.withBackupS3Bucket = bucket + d.withBackendS3Buckets[bucket] = region + d.withMinIO = true + d.enableModules = append(d.enableModules, modstgs3.Name) + return d +} + +// WithOffloadS3 will prepare MinIO +func (d *Compose) WithOffloadS3(bucket, region string) *Compose { + d.withBackendS3 = true + d.withOffloadS3Bucket = bucket + d.withBackendS3Buckets[bucket] = region + d.withMinIO = true + d.enableModules = append(d.enableModules, modsloads3.Name) + return d +} + +func (d *Compose) WithBackendGCS(bucket string) *Compose { + d.withBackendGCS = true + d.withBackendGCSBucket = bucket + d.withGCS = true + d.enableModules = append(d.enableModules, modstggcs.Name) + return d +} + +func (d *Compose) WithBackendAzure(container string) *Compose { + d.withBackendAzure = true + d.withBackendAzureContainer = container + d.withAzurite = true + d.enableModules = append(d.enableModules, modstgazure.Name) + return d +} + +func (d *Compose) WithSUMTransformers() *Compose { + d.withSUMTransformers = true + d.enableModules = append(d.enableModules, SUMTransformers) + return d +} + +func (d *Compose) WithMulti2VecCLIP() *Compose { + d.withCLIP = true + d.enableModules = append(d.enableModules, Multi2VecCLIP) + return d +} + +func (d *Compose) WithMulti2VecGoogle(apiKey string) *Compose { + d.withGoogleApiKey = apiKey + d.enableModules = append(d.enableModules, modmulti2vecgoogle.Name) + return d +} + +func (d *Compose) WithMulti2VecCohere(apiKey string) *Compose { + d.weaviateEnvs["COHERE_APIKEY"] = apiKey + d.enableModules = append(d.enableModules, modmulti2veccohere.Name) + return d +} + +func (d *Compose) WithMulti2VecNvidia(apiKey string) *Compose { + d.weaviateEnvs["NVIDIA_APIKEY"] = apiKey + d.enableModules = append(d.enableModules, modmulti2vecnvidia.Name) + return d +} + +func (d *Compose) WithMulti2VecVoyageAI(apiKey string) *Compose { + d.weaviateEnvs["VOYAGEAI_APIKEY"] = apiKey + d.enableModules = append(d.enableModules, modmulti2vecvoyageai.Name) + return d +} + +func (d *Compose) WithMulti2VecJinaAI(apiKey string) *Compose { + d.weaviateEnvs["JINAAI_APIKEY"] = apiKey + d.enableModules = append(d.enableModules, modmulti2vecjinaai.Name) + return d +} + +func (d *Compose) WithMulti2MultivecJinaAI(apiKey string) *Compose { + d.weaviateEnvs["JINAAI_APIKEY"] = apiKey + d.enableModules = append(d.enableModules, modmulti2multivecjinaai.Name) + return d +} + +func (d *Compose) WithMulti2VecBind() *Compose { + d.withBind = true + d.enableModules = append(d.enableModules, Multi2VecBind) + return d +} + +func (d *Compose) WithImg2VecNeural() *Compose { + d.withImg2Vec = true + d.enableModules = append(d.enableModules, Img2VecNeural) + return d +} + +func (d *Compose) WithRef2VecCentroid() *Compose { + d.withCentroid = true + d.enableModules = append(d.enableModules, Ref2VecCentroid) + return d +} + +func (d *Compose) WithText2VecOpenAI(openAIApiKey, openAIOrganization, azureApiKey string) *Compose { + d.weaviateEnvs["OPENAI_APIKEY"] = openAIApiKey + d.weaviateEnvs["OPENAI_ORGANIZATION"] = openAIOrganization + d.weaviateEnvs["AZURE_APIKEY"] = azureApiKey + d.enableModules = append(d.enableModules, modopenai.Name) + return d +} + +func (d *Compose) WithText2VecCohere(apiKey string) *Compose { + d.weaviateEnvs["COHERE_APIKEY"] = apiKey + d.enableModules = append(d.enableModules, modcohere.Name) + return d +} + +func (d *Compose) WithText2VecVoyageAI(apiKey string) *Compose { + d.weaviateEnvs["VOYAGEAI_APIKEY"] = apiKey + d.enableModules = append(d.enableModules, modvoyageai.Name) + return d +} + +func (d *Compose) WithText2VecGoogle(apiKey string) *Compose { + d.withGoogleApiKey = apiKey + d.enableModules = append(d.enableModules, modgoogle.Name) + return d +} + +func (d *Compose) WithText2VecAWS(accessKey, secretKey, sessionToken string) *Compose { + d.weaviateEnvs["AWS_ACCESS_KEY"] = accessKey + d.weaviateEnvs["AWS_SECRET_KEY"] = secretKey + d.weaviateEnvs["AWS_SESSION_TOKEN"] = sessionToken + d.enableModules = append(d.enableModules, modaws.Name) + return d +} + +func (d *Compose) WithText2VecHuggingFace(apiKey string) *Compose { + d.weaviateEnvs["HUGGINGFACE_APIKEY"] = apiKey + d.enableModules = append(d.enableModules, modhuggingface.Name) + return d +} + +func (d *Compose) WithText2VecWeaviate() *Compose { + d.enableModules = append(d.enableModules, modweaviateembed.Name) + return d +} + +func (d *Compose) WithGenerativeOpenAI(openAIApiKey, openAIOrganization, azureApiKey string) *Compose { + d.weaviateEnvs["OPENAI_APIKEY"] = openAIApiKey + d.weaviateEnvs["OPENAI_ORGANIZATION"] = openAIOrganization + d.weaviateEnvs["AZURE_APIKEY"] = azureApiKey + d.enableModules = append(d.enableModules, modgenerativeopenai.Name) + return d +} + +func (d *Compose) WithGenerativeNvidia(apiKey string) *Compose { + d.weaviateEnvs["NVIDIA_APIKEY"] = apiKey + d.enableModules = append(d.enableModules, modgenerativenvidia.Name) + return d +} + +func (d *Compose) WithGenerativeXAI(apiKey string) *Compose { + d.weaviateEnvs["XAI_APIKEY"] = apiKey + d.enableModules = append(d.enableModules, modgenerativexai.Name) + return d +} + +func (d *Compose) WithText2VecJinaAI(apiKey string) *Compose { + d.weaviateEnvs["JINAAI_APIKEY"] = apiKey + d.enableModules = append(d.enableModules, modjinaai.Name) + return d +} + +func (d *Compose) WithText2MultivecJinaAI(apiKey string) *Compose { + d.weaviateEnvs["JINAAI_APIKEY"] = apiKey + d.enableModules = append(d.enableModules, modtext2colbertjinaai.Name) + return d +} + +func (d *Compose) WithRerankerNvidia(apiKey string) *Compose { + d.weaviateEnvs["NVIDIA_APIKEY"] = apiKey + d.enableModules = append(d.enableModules, modrerankernvidia.Name) + return d +} + +func (d *Compose) WithText2VecNvidia(apiKey string) *Compose { + d.weaviateEnvs["NVIDIA_APIKEY"] = apiKey + d.enableModules = append(d.enableModules, modnvidia.Name) + return d +} + +func (d *Compose) WithText2VecModel2Vec() *Compose { + d.withModel2Vec = true + d.enableModules = append(d.enableModules, modmodel2vec.Name) + return d +} + +func (d *Compose) WithText2VecMistral(apiKey string) *Compose { + d.weaviateEnvs["MISTRAL_APIKEY"] = apiKey + d.enableModules = append(d.enableModules, modmistral.Name) + return d +} + +func (d *Compose) WithGenerativeAWS(accessKey, secretKey, sessionToken string) *Compose { + d.weaviateEnvs["AWS_ACCESS_KEY"] = accessKey + d.weaviateEnvs["AWS_SECRET_KEY"] = secretKey + d.weaviateEnvs["AWS_SESSION_TOKEN"] = sessionToken + d.enableModules = append(d.enableModules, modgenerativeaws.Name) + return d +} + +func (d *Compose) WithGenerativeCohere(apiKey string) *Compose { + d.weaviateEnvs["COHERE_APIKEY"] = apiKey + d.enableModules = append(d.enableModules, modgenerativecohere.Name) + return d +} + +func (d *Compose) WithGenerativeFriendliAI(apiKey string) *Compose { + d.weaviateEnvs["FRIENDLI_TOKEN"] = apiKey + d.enableModules = append(d.enableModules, modgenerativefriendliai.Name) + return d +} + +func (d *Compose) WithGenerativeGoogle(apiKey string) *Compose { + d.withGoogleApiKey = apiKey + d.enableModules = append(d.enableModules, modgenerativegoogle.Name) + return d +} + +func (d *Compose) WithGenerativeAnyscale() *Compose { + d.enableModules = append(d.enableModules, modgenerativeanyscale.Name) + return d +} + +func (d *Compose) WithGenerativeOllama() *Compose { + d.withOllamaGenerative = true + d.enableModules = append(d.enableModules, modgenerativeollama.Name) + return d +} + +func (d *Compose) WithGenerativeAnthropic(apiKey string) *Compose { + d.weaviateEnvs["ANTHROPIC_APIKEY"] = apiKey + d.enableModules = append(d.enableModules, modgenerativeanthropic.Name) + return d +} + +func (d *Compose) WithQnAOpenAI() *Compose { + d.enableModules = append(d.enableModules, modqnaopenai.Name) + return d +} + +func (d *Compose) WithRerankerCohere() *Compose { + d.enableModules = append(d.enableModules, modrerankercohere.Name) + return d +} + +func (d *Compose) WithRerankerVoyageAI() *Compose { + d.enableModules = append(d.enableModules, modrerankervoyageai.Name) + return d +} + +func (d *Compose) WithRerankerTransformers() *Compose { + d.withRerankerTransformers = true + d.enableModules = append(d.enableModules, RerankerTransformers) + return d +} + +func (d *Compose) WithOllamaVectorizer() *Compose { + d.withOllamaVectorizer = true + return d +} + +func (d *Compose) WithOllamaGenerative() *Compose { + d.withOllamaGenerative = true + return d +} + +func (d *Compose) WithWeaviate() *Compose { + return d.With1NodeCluster() +} + +func (d *Compose) WithWeaviateWithGRPC() *Compose { + d.With1NodeCluster() + d.withWeaviateExposeGRPCPort = true + return d +} + +func (d *Compose) WithWeaviateCluster(size int) *Compose { + if size%2 == 0 { + panic("it's essential for the cluster size to be an odd number to ensure a majority can be achieved for quorum decisions, even if some nodes become unavailable") + } + d.withWeaviateCluster = true + d.withWeaviateClusterSize = size + return d +} + +func (d *Compose) WithWeaviateClusterWithGRPC() *Compose { + d.With3NodeCluster() + d.withWeaviateExposeGRPCPort = true + return d +} + +func (d *Compose) WithWeaviateBasicAuth(username, password string) *Compose { + d.withWeaviateBasicAuth = true + d.withWeaviateBasicAuthUsername = username + d.withWeaviateBasicAuthPassword = password + return d +} + +func (d *Compose) WithWeaviateAuth() *Compose { + d.withWeaviateAuth = true + return d.With1NodeCluster() +} + +func (d *Compose) WithAdminListAdmins(users ...string) *Compose { + d.weaviateAdminlistAdminUsers = users + return d +} + +func (d *Compose) WithAdminListUsers(users ...string) *Compose { + d.weaviateAdminlistReadOnlyUsers = users + return d +} + +func (d *Compose) WithWeaviateEnv(name, value string) *Compose { + d.weaviateEnvs[name] = value + return d +} + +func (d *Compose) WithMockOIDC() *Compose { + d.withMockOIDC = true + return d +} + +func (d *Compose) WithMockOIDCWithCertificate() *Compose { + d.withMockOIDCWithCertificate = true + return d +} + +func (d *Compose) WithApiKey() *Compose { + d.withWeaviateApiKey = true + return d +} + +func (d *Compose) WithUserApiKey(username, key string) *Compose { + if !d.withWeaviateApiKey { + panic("RBAC is not enabled. Chain .WithRBAC() first") + } + d.weaviateApiKeyUsers = append(d.weaviateApiKeyUsers, ApiKeyUser{ + Username: username, + Key: key, + }) + return d +} + +func (d *Compose) WithRBAC() *Compose { + d.withWeaviateRbac = true + return d +} + +func (d *Compose) WithDbUsers() *Compose { + d.withWeaviateDbUsers = true + return d +} + +func (d *Compose) WithRbacRoots(usernames ...string) *Compose { + if !d.withWeaviateRbac { + panic("RBAC is not enabled. Chain .WithRBAC() first") + } + d.weaviateRbacRoots = append(d.weaviateRbacRoots, usernames...) + return d +} + +func (d *Compose) WithRbacRootGroups(groups ...string) *Compose { + if !d.withWeaviateRbac { + panic("RBAC is not enabled. Chain .WithRBAC() first") + } + d.weaviateRbacRootGroups = append(d.weaviateRbacRootGroups, groups...) + return d +} + +func (d *Compose) WithRbacViewerGroups(groups ...string) *Compose { + if !d.withWeaviateRbac { + panic("RBAC is not enabled. Chain .WithRBAC() first") + } + d.weaviateRbacViewerGroups = append(d.weaviateRbacViewerGroups, groups...) + return d +} + +func (d *Compose) WithRbacViewers(usernames ...string) *Compose { + if !d.withWeaviateRbac { + panic("RBAC is not enabled. Chain .WithRBAC() first") + } + d.weaviateRbacViewers = append(d.weaviateRbacViewers, usernames...) + return d +} + +func (d *Compose) WithoutWeaviateEnvs(names ...string) *Compose { + for _, name := range names { + d.removeEnvs[name] = struct{}{} + } + return d +} + +func (d *Compose) WithAutoschema() *Compose { + d.withAutoschema = true + return d +} + +func (d *Compose) Start(ctx context.Context) (*DockerCompose, error) { + d.weaviateEnvs["DISABLE_TELEMETRY"] = "true" + network, err := tescontainersnetwork.New( + ctx, + tescontainersnetwork.WithAttachable(), + ) + if err != nil { + return nil, errors.Wrapf(err, "connecting to network") + } + + networkName := network.Name + + envSettings := make(map[string]string) + envSettings["network"] = networkName + envSettings["DISABLE_TELEMETRY"] = "true" + containers := []*DockerContainer{} + if d.withMinIO { + container, err := startMinIO(ctx, networkName, d.withBackendS3Buckets) + if err != nil { + return nil, errors.Wrapf(err, "start %s", MinIO) + } + containers = append(containers, container) + + if d.withBackendS3 { + if d.withBackupS3Bucket != "" { + envSettings["BACKUP_S3_BUCKET"] = d.withBackupS3Bucket + } + + if d.withOffloadS3Bucket != "" { + envSettings["OFFLOAD_S3_BUCKET"] = d.withOffloadS3Bucket + envSettings["OFFLOAD_S3_BUCKET_AUTO_CREATE"] = "true" + } + + for k, v := range container.envSettings { + envSettings[k] = v + } + } + } + if d.withGCS { + container, err := startGCS(ctx, networkName) + if err != nil { + return nil, errors.Wrapf(err, "start %s", GCS) + } + containers = append(containers, container) + if d.withBackendGCS { + for k, v := range container.envSettings { + envSettings[k] = v + } + envSettings["BACKUP_GCS_BUCKET"] = d.withBackendGCSBucket + } + } + if d.withAzurite { + container, err := startAzurite(ctx, networkName) + if err != nil { + return nil, errors.Wrapf(err, "start %s", Azurite) + } + containers = append(containers, container) + if d.withBackendAzure { + for k, v := range container.envSettings { + envSettings[k] = v + } + envSettings["BACKUP_AZURE_CONTAINER"] = d.withBackendAzureContainer + } + } + if d.withBackendFilesystem { + envSettings["BACKUP_FILESYSTEM_PATH"] = "/tmp/backups" + } + if d.withModel2Vec { + image := os.Getenv(envTestText2vecModel2VecImage) + container, err := startT2VModel2Vec(ctx, networkName, image) + if err != nil { + return nil, errors.Wrapf(err, "start %s", Text2VecModel2Vec) + } + for k, v := range container.envSettings { + envSettings[k] = v + } + containers = append(containers, container) + } + if d.withTransformers { + image := os.Getenv(envTestText2vecTransformersImage) + if d.withTransformersImage != "" { + image = d.withTransformersImage + } + container, err := startT2VTransformers(ctx, networkName, image) + if err != nil { + return nil, errors.Wrapf(err, "start %s", Text2VecTransformers) + } + for k, v := range container.envSettings { + envSettings[k] = v + } + containers = append(containers, container) + } + if d.withContextionary { + image := os.Getenv(envTestText2vecContextionaryImage) + container, err := startT2VContextionary(ctx, networkName, image) + if err != nil { + return nil, errors.Wrapf(err, "start %s", Text2VecContextionary) + } + for k, v := range container.envSettings { + envSettings[k] = v + } + containers = append(containers, container) + } + if d.withOllamaVectorizer { + container, err := startOllamaVectorizer(ctx, networkName) + if err != nil { + return nil, errors.Wrapf(err, "start %s", OllamaVectorizer) + } + for k, v := range container.envSettings { + envSettings[k] = v + } + containers = append(containers, container) + } + if d.withOllamaGenerative { + container, err := startOllamaGenerative(ctx, networkName) + if err != nil { + return nil, errors.Wrapf(err, "start %s", OllamaGenerative) + } + for k, v := range container.envSettings { + envSettings[k] = v + } + containers = append(containers, container) + } + if d.withQnATransformers { + image := os.Getenv(envTestQnATransformersImage) + container, err := startQnATransformers(ctx, networkName, image) + if err != nil { + return nil, errors.Wrapf(err, "start %s", QnATransformers) + } + for k, v := range container.envSettings { + envSettings[k] = v + } + containers = append(containers, container) + } + if d.withSUMTransformers { + image := os.Getenv(envTestSUMTransformersImage) + container, err := startSUMTransformers(ctx, networkName, image) + if err != nil { + return nil, errors.Wrapf(err, "start %s", SUMTransformers) + } + for k, v := range container.envSettings { + envSettings[k] = v + } + containers = append(containers, container) + } + if d.withCLIP { + image := os.Getenv(envTestMulti2VecCLIPImage) + container, err := startM2VClip(ctx, networkName, image) + if err != nil { + return nil, errors.Wrapf(err, "start %s", Multi2VecCLIP) + } + for k, v := range container.envSettings { + envSettings[k] = v + } + containers = append(containers, container) + } + if d.withGoogleApiKey != "" { + envSettings["GOOGLE_APIKEY"] = d.withGoogleApiKey + } + if d.withBind { + image := os.Getenv(envTestMulti2VecBindImage) + container, err := startM2VBind(ctx, networkName, image) + if err != nil { + return nil, errors.Wrapf(err, "start %s", Multi2VecBind) + } + for k, v := range container.envSettings { + envSettings[k] = v + } + containers = append(containers, container) + } + if d.withImg2Vec { + image := os.Getenv(envTestImg2VecNeuralImage) + container, err := startI2VNeural(ctx, networkName, image) + if err != nil { + return nil, errors.Wrapf(err, "start %s", Img2VecNeural) + } + for k, v := range container.envSettings { + envSettings[k] = v + } + containers = append(containers, container) + } + if d.withRerankerTransformers { + image := os.Getenv(envTestRerankerTransformersImage) + container, err := startRerankerTransformers(ctx, networkName, image) + if err != nil { + return nil, errors.Wrapf(err, "start %s", RerankerTransformers) + } + for k, v := range container.envSettings { + envSettings[k] = v + } + containers = append(containers, container) + } + if d.withMockOIDC || d.withMockOIDCWithCertificate { + var certificate, certificateKey string + if d.withMockOIDCWithCertificate { + // Generate certifcate and certificate's private key + certificate, certificateKey, err = GenerateCertificateAndKey(MockOIDC) + if err != nil { + return nil, errors.Wrapf(err, "cannot generate mock certificates for %s", MockOIDC) + } + } + image := os.Getenv(envTestMockOIDCImage) + container, err := startMockOIDC(ctx, networkName, image, certificate, certificateKey) + if err != nil { + return nil, errors.Wrapf(err, "start %s", MockOIDC) + } + for k, v := range container.envSettings { + if k == "AUTHENTICATION_OIDC_CERTIFICATE" && envSettings[k] != "" { + // allow to pass some other certificate using WithWeaviateEnv method + continue + } + envSettings[k] = v + } + containers = append(containers, container) + helperImage := os.Getenv(envTestMockOIDCHelperImage) + helperContainer, err := startMockOIDCHelper(ctx, networkName, helperImage, certificate) + if err != nil { + return nil, errors.Wrapf(err, "start %s", MockOIDCHelper) + } + containers = append(containers, helperContainer) + } + + if d.withWeaviateCluster { + cs, err := d.startCluster(ctx, d.withWeaviateClusterSize, envSettings) + for _, c := range cs { + if c != nil { + containers = append(containers, c) + } + } + return &DockerCompose{network, containers}, err + } + + if d.withSecondWeaviate { + image := os.Getenv(envTestWeaviateImage) + hostname := SecondWeaviate + secondWeaviateSettings := envSettings + // Ensure second weaviate doesn't get cluster settings from the first cluster if any. + delete(secondWeaviateSettings, "CLUSTER_HOSTNAME") + delete(secondWeaviateSettings, "CLUSTER_GOSSIP_BIND_PORT") + delete(secondWeaviateSettings, "CLUSTER_DATA_BIND_PORT") + delete(secondWeaviateSettings, "CLUSTER_JOIN") + for k, v := range d.weaviateEnvs { + envSettings[k] = v + } + delete(secondWeaviateSettings, "RAFT_PORT") + delete(secondWeaviateSettings, "RAFT_INTERNAL_PORT") + delete(secondWeaviateSettings, "RAFT_JOIN") + container, err := startWeaviate(ctx, d.enableModules, d.defaultVectorizerModule, envSettings, networkName, image, hostname, d.withWeaviateExposeGRPCPort, "/v1/.well-known/ready") + if err != nil { + return nil, errors.Wrapf(err, "start %s", hostname) + } + containers = append(containers, container) + if err != nil { + return &DockerCompose{network, containers}, errors.Wrapf(err, "start %s", hostname) + } + } + + return &DockerCompose{network, containers}, nil +} + +func (d *Compose) With1NodeCluster() *Compose { + d.withWeaviateCluster = true + d.withWeaviateClusterSize = 1 + return d +} + +func (d *Compose) With3NodeCluster() *Compose { + d.withWeaviateCluster = true + d.withWeaviateClusterSize = 3 + return d +} + +func (d *Compose) startCluster(ctx context.Context, size int, settings map[string]string) ([]*DockerContainer, error) { + if size == 0 || size > 3 { + return nil, nil + } + for k, v := range d.weaviateEnvs { + settings[k] = v + } + + for k := range d.removeEnvs { + delete(settings, k) + } + + raft_join := "node1,node2,node3" + if size == 1 { + raft_join = "node1" + } else if size == 2 { + raft_join = "node1,node2" + } + + cs := make([]*DockerContainer, size) + image := os.Getenv(envTestWeaviateImage) + networkName := settings["network"] + settings["DISABLE_TELEMETRY"] = "true" + if d.withWeaviateBasicAuth { + settings["CLUSTER_BASIC_AUTH_USERNAME"] = d.withWeaviateBasicAuthUsername + settings["CLUSTER_BASIC_AUTH_PASSWORD"] = d.withWeaviateBasicAuthPassword + } + if d.withWeaviateAuth { + settings["AUTHENTICATION_OIDC_ENABLED"] = "true" + settings["AUTHENTICATION_OIDC_CLIENT_ID"] = "wcs" + settings["AUTHENTICATION_OIDC_ISSUER"] = "https://auth.wcs.api.weaviate.io/auth/realms/SeMI" + settings["AUTHENTICATION_OIDC_USERNAME_CLAIM"] = "email" + settings["AUTHENTICATION_OIDC_GROUPS_CLAIM"] = "groups" + settings["AUTHORIZATION_ADMINLIST_ENABLED"] = "true" + settings["AUTHORIZATION_ADMINLIST_USERS"] = "oidc-test-user@weaviate.io" + } + if len(d.weaviateAdminlistAdminUsers) > 0 { + settings["AUTHORIZATION_ADMINLIST_ENABLED"] = "true" + settings["AUTHORIZATION_ADMINLIST_USERS"] = strings.Join(d.weaviateAdminlistAdminUsers, ",") + if len(d.weaviateAdminlistReadOnlyUsers) > 0 { + settings["AUTHORIZATION_ADMINLIST_READONLY_USERS"] = strings.Join(d.weaviateAdminlistReadOnlyUsers, ",") + } + } + + if d.withWeaviateApiKey { + usernames := make([]string, 0, len(d.weaviateApiKeyUsers)) + keys := make([]string, 0, len(d.weaviateApiKeyUsers)) + + for _, user := range d.weaviateApiKeyUsers { + usernames = append(usernames, user.Username) + keys = append(keys, user.Key) + } + if len(keys) > 0 { + settings["AUTHENTICATION_APIKEY_ALLOWED_KEYS"] = strings.Join(keys, ",") + settings["AUTHENTICATION_APIKEY_ENABLED"] = "true" + } + if len(usernames) > 0 { + settings["AUTHENTICATION_APIKEY_USERS"] = strings.Join(usernames, ",") + settings["AUTHENTICATION_APIKEY_ENABLED"] = "true" + } + } + + if d.withWeaviateRbac { + settings["AUTHORIZATION_RBAC_ENABLED"] = "true" + settings["AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED"] = "false" // incompatible + + if len(d.weaviateRbacRoots) > 0 { + settings["AUTHORIZATION_RBAC_ROOT_USERS"] = strings.Join(d.weaviateRbacRoots, ",") + } + if len(d.weaviateRbacViewers) > 0 { + settings["AUTHORIZATION_VIEWER_USERS"] = strings.Join(d.weaviateRbacViewers, ",") + } + + if len(d.weaviateRbacRootGroups) > 0 { + settings["AUTHORIZATION_RBAC_ROOT_GROUPS"] = strings.Join(d.weaviateRbacRootGroups, ",") + } + if len(d.weaviateRbacViewerGroups) > 0 { + settings["AUTHORIZATION_RBAC_READONLY_GROUPS"] = strings.Join(d.weaviateRbacViewerGroups, ",") + } + + } + + if d.withWeaviateDbUsers { + settings["AUTHENTICATION_DB_USERS_ENABLED"] = "true" + } + + if d.withAutoschema { + settings["AUTOSCHEMA_ENABLED"] = "true" + } + + settings["RAFT_PORT"] = "8300" + settings["RAFT_INTERNAL_RPC_PORT"] = "8301" + settings["RAFT_JOIN"] = raft_join + settings["RAFT_BOOTSTRAP_EXPECT"] = strconv.Itoa(d.withWeaviateClusterSize) + + // first node + config1 := copySettings(settings) + config1["CLUSTER_HOSTNAME"] = "node1" + config1["CLUSTER_GOSSIP_BIND_PORT"] = "7100" + config1["CLUSTER_DATA_BIND_PORT"] = "7101" + eg := errgroup.Group{} + wellKnownEndpointFunc := func(hostname string) string { + if slices.Contains(strings.Split(settings["MAINTENANCE_NODES"], ","), hostname) { + return "/v1/.well-known/live" + } + return "/v1/.well-known/ready" + } + eg.Go(func() (err error) { + cs[0], err = startWeaviate(ctx, d.enableModules, d.defaultVectorizerModule, + config1, networkName, image, Weaviate1, d.withWeaviateExposeGRPCPort, wellKnownEndpointFunc("node1")) + if err != nil { + return errors.Wrapf(err, "start %s", Weaviate1) + } + return nil + }) + + if size > 1 { + config2 := copySettings(settings) + config2["CLUSTER_HOSTNAME"] = "node2" + config2["CLUSTER_GOSSIP_BIND_PORT"] = "7102" + config2["CLUSTER_DATA_BIND_PORT"] = "7103" + config2["CLUSTER_JOIN"] = fmt.Sprintf("%s:7100", Weaviate1) + eg.Go(func() (err error) { + time.Sleep(time.Second * 10) // node1 needs to be up before we can start this node + cs[1], err = startWeaviate(ctx, d.enableModules, d.defaultVectorizerModule, + config2, networkName, image, Weaviate2, d.withWeaviateExposeGRPCPort, wellKnownEndpointFunc("node2")) + if err != nil { + return errors.Wrapf(err, "start %s", Weaviate2) + } + return nil + }) + } + + if size > 2 { + config3 := copySettings(settings) + config3["CLUSTER_HOSTNAME"] = "node3" + config3["CLUSTER_GOSSIP_BIND_PORT"] = "7104" + config3["CLUSTER_DATA_BIND_PORT"] = "7105" + config3["CLUSTER_JOIN"] = fmt.Sprintf("%s:7100", Weaviate1) + eg.Go(func() (err error) { + time.Sleep(time.Second * 10) // node1 needs to be up before we can start this node + cs[2], err = startWeaviate(ctx, d.enableModules, d.defaultVectorizerModule, + config3, networkName, image, Weaviate3, d.withWeaviateExposeGRPCPort, wellKnownEndpointFunc("node3")) + if err != nil { + return errors.Wrapf(err, "start %s", Weaviate3) + } + return nil + }) + } + + return cs, eg.Wait() +} + +func copySettings(s map[string]string) map[string]string { + copy := make(map[string]string, len(s)) + for k, v := range s { + copy[k] = v + } + return copy +} diff --git a/platform/dbops/binaries/weaviate-src/test/docker/container.go b/platform/dbops/binaries/weaviate-src/test/docker/container.go new file mode 100644 index 0000000000000000000000000000000000000000..f280e25b065c44c180e44885d6615c7066832546 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/docker/container.go @@ -0,0 +1,59 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package docker + +import ( + "github.com/docker/go-connections/nat" + "github.com/testcontainers/testcontainers-go" +) + +type EndpointName string + +var ( + HTTP EndpointName = "http" + GRPC EndpointName = "grpc" +) + +type endpoint struct { + port nat.Port + uri string +} + +type DockerContainer struct { + name string + endpoints map[EndpointName]endpoint + container testcontainers.Container + envSettings map[string]string +} + +func (d *DockerContainer) Name() string { + return d.name +} + +func (d *DockerContainer) URI() string { + return d.GetEndpoint(HTTP) +} + +func (d *DockerContainer) GrpcURI() string { + return d.GetEndpoint(GRPC) +} + +func (d *DockerContainer) GetEndpoint(name EndpointName) string { + if endpoint, ok := d.endpoints[name]; ok { + return endpoint.uri + } + return "" +} + +func (d *DockerContainer) Container() testcontainers.Container { + return d.container +} diff --git a/platform/dbops/binaries/weaviate-src/test/docker/contextionary.go b/platform/dbops/binaries/weaviate-src/test/docker/contextionary.go new file mode 100644 index 0000000000000000000000000000000000000000..640a838b47840ace44904c7cc2775fea737affd7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/docker/contextionary.go @@ -0,0 +1,64 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package docker + +import ( + "context" + "fmt" + + "github.com/docker/go-connections/nat" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" +) + +const Text2VecContextionary = "text2vec-contextionary" + +func startT2VContextionary(ctx context.Context, networkName, contextionaryImage string) (*DockerContainer, error) { + image := "semitechnologies/contextionary:en0.16.0-v1.2.1" + if len(contextionaryImage) > 0 { + image = contextionaryImage + } + port := nat.Port("9999/tcp") + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: testcontainers.ContainerRequest{ + Image: image, + Hostname: Text2VecContextionary, + Networks: []string{networkName}, + NetworkAliases: map[string][]string{ + networkName: {Text2VecContextionary}, + }, + Name: Text2VecContextionary, + Env: map[string]string{ + "OCCURRENCE_WEIGHT_LINEAR_FACTOR": "0.75", + "EXTENSIONS_STORAGE_MODE": "weaviate", + "EXTENSIONS_STORAGE_ORIGIN": fmt.Sprintf("http://%s:8080", Weaviate1), + }, + ExposedPorts: []string{"9999/tcp"}, + AutoRemove: true, + WaitingFor: wait.ForListeningPort(port), + }, + Started: true, + Reuse: true, + }) + if err != nil { + return nil, err + } + uri, err := container.PortEndpoint(ctx, port, "") + if err != nil { + return nil, err + } + envSettings := make(map[string]string) + envSettings["CONTEXTIONARY_URL"] = fmt.Sprintf("%s:%s", Text2VecContextionary, port.Port()) + endpoints := make(map[EndpointName]endpoint) + endpoints[HTTP] = endpoint{port, uri} + return &DockerContainer{Text2VecContextionary, endpoints, container, envSettings}, nil +} diff --git a/platform/dbops/binaries/weaviate-src/test/docker/docker.go b/platform/dbops/binaries/weaviate-src/test/docker/docker.go new file mode 100644 index 0000000000000000000000000000000000000000..3cd58fa295c43e915e52f3631a1de9296960071a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/docker/docker.go @@ -0,0 +1,271 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package docker + +import ( + "context" + "fmt" + "os/exec" + "time" + + "github.com/docker/go-connections/nat" + "github.com/pkg/errors" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" +) + +type DockerCompose struct { + network *testcontainers.DockerNetwork + containers []*DockerContainer +} + +func (d *DockerCompose) Containers() []*DockerContainer { + return d.containers +} + +func (d *DockerCompose) Terminate(ctx context.Context) error { + var errs error + for _, c := range d.containers { + if err := testcontainers.TerminateContainer(c.container, testcontainers.StopContext(ctx)); err != nil { + errs = errors.Wrapf(err, "cannot terminate: %v", c.name) + } + } + if d.network != nil { + if err := d.network.Remove(ctx); err != nil { + errs = errors.Wrapf(err, "cannot remove network") + } + } + return errs +} + +func (d *DockerCompose) Stop(ctx context.Context, container string, timeout *time.Duration) error { + for _, c := range d.containers { + if c.name == container { + if err := c.container.Stop(ctx, timeout); err != nil { + return fmt.Errorf("cannot stop %q: %w", c.name, err) + } + break + } + } + return nil +} + +func (d *DockerCompose) TerminateContainer(ctx context.Context, container string) error { + for idx, c := range d.containers { + if c.name == container { + if err := testcontainers.TerminateContainer(c.container, testcontainers.StopContext(ctx)); err != nil { + return fmt.Errorf("cannot stop %q: %w", c.name, err) + } + d.containers = append(d.containers[:idx], d.containers[idx+1:]...) + break + } + } + return nil +} + +func (d *DockerCompose) Start(ctx context.Context, container string) error { + idx := -1 + for i, c := range d.containers { + if c.name == container { + idx = i + break + } + } + if idx == -1 { + return fmt.Errorf("container %q does not exist ", container) + } + return d.StartAt(ctx, idx) +} + +func (d *DockerCompose) StopAt(ctx context.Context, nodeIndex int, timeout *time.Duration) error { + if nodeIndex >= len(d.containers) { + return fmt.Errorf("node index: %v is greater than available nodes: %v", nodeIndex, len(d.containers)) + } + if err := d.containers[nodeIndex].container.Stop(ctx, timeout); err != nil { + return err + } + + // sleep to make sure that the off node is detected by memberlist and marked failed + // it shall be used with combination of "FAST_FAILURE_DETECTION" env flag + time.Sleep(3 * time.Second) + + return nil +} + +func (d *DockerCompose) StartAt(ctx context.Context, nodeIndex int) error { + if nodeIndex >= len(d.containers) { + return errors.Errorf("node index is greater than available nodes") + } + + c := d.containers[nodeIndex] + if err := c.container.Start(ctx); err != nil { + return fmt.Errorf("cannot start container at index %d : %w", nodeIndex, err) + } + + endPoints := map[EndpointName]endpoint{} + for name, e := range c.endpoints { + newURI, err := c.container.PortEndpoint(context.Background(), nat.Port(e.port), "") + if err != nil { + return fmt.Errorf("failed to get new uri for container %q: %w", c.name, err) + } + endPoints[name] = endpoint{e.port, newURI} + + // wait until node is ready + if name != HTTP { + continue + } + waitStrategy := wait.ForHTTP("/v1/.well-known/ready").WithPort(nat.Port(e.port)) + if err := waitStrategy.WaitUntilReady(ctx, c.container); err != nil { + return err + } + } + c.endpoints = endPoints + return nil +} + +func (d *DockerCompose) ContainerURI(index int) string { + return d.containers[index].URI() +} + +func (d *DockerCompose) ContainerAt(index int) (*DockerContainer, error) { + if index > len(d.containers) { + return nil, fmt.Errorf("container at index %d does not exit", index) + } + return d.containers[index], nil +} + +func (d *DockerCompose) GetMinIO() *DockerContainer { + return d.getContainerByName(MinIO) +} + +func (d *DockerCompose) StopMinIO(ctx context.Context) error { + minio := d.getContainerByName(MinIO) + + return minio.container.Stop(ctx, nil) +} + +func (d *DockerCompose) GetGCS() *DockerContainer { + return d.getContainerByName(GCS) +} + +func (d *DockerCompose) GetAzurite() *DockerContainer { + return d.getContainerByName(Azurite) +} + +func (d *DockerCompose) GetWeaviate() *DockerContainer { + return d.getContainerByName(Weaviate1) +} + +func (d *DockerCompose) GetSecondWeaviate() *DockerContainer { + return d.getContainerByName(SecondWeaviate) +} + +func (d *DockerCompose) GetWeaviateNode2() *DockerContainer { + return d.getContainerByName(Weaviate2) +} + +func (d *DockerCompose) GetWeaviateNode3() *DockerContainer { + return d.getContainerByName(Weaviate3) +} + +func (d *DockerCompose) GetWeaviateNode(n int) *DockerContainer { + if n == 1 { + return d.GetWeaviate() + } + return d.getContainerByName(fmt.Sprintf("%s%d", Weaviate, n)) +} + +func (d *DockerCompose) GetText2VecTransformers() *DockerContainer { + return d.getContainerByName(Text2VecTransformers) +} + +func (d *DockerCompose) GetText2VecContextionary() *DockerContainer { + return d.getContainerByName(Text2VecContextionary) +} + +func (d *DockerCompose) GetQnATransformers() *DockerContainer { + return d.getContainerByName(QnATransformers) +} + +func (d *DockerCompose) GetOllamaVectorizer() *DockerContainer { + return d.getContainerByName(OllamaVectorizer) +} + +func (d *DockerCompose) GetOllamaGenerative() *DockerContainer { + return d.getContainerByName(OllamaGenerative) +} + +func (d *DockerCompose) GetMockOIDC() *DockerContainer { + return d.getContainerByName(MockOIDC) +} + +func (d *DockerCompose) GetMockOIDCHelper() *DockerContainer { + return d.getContainerByName(MockOIDCHelper) +} + +func (d *DockerCompose) getContainerByName(name string) *DockerContainer { + for _, c := range d.containers { + if c.name == name { + return c + } + } + return nil +} + +// DisconnectFromNetwork disconnects a container from the network by its index +func (d *DockerCompose) DisconnectFromNetwork(ctx context.Context, nodeIndex int) error { + if nodeIndex >= len(d.containers) { + return fmt.Errorf("node index: %v is greater than available nodes: %v", nodeIndex, len(d.containers)) + } + + container := d.containers[nodeIndex] + if d.network == nil { + return fmt.Errorf("network is nil") + } + + // Get the network name + networkName := d.network.Name + + // Execute docker network disconnect command + cmd := exec.CommandContext(ctx, "docker", "network", "disconnect", networkName, container.name) + if err := cmd.Run(); err != nil { + return fmt.Errorf("failed to disconnect container %s from network: %w", container.name, err) + } + // sleep to make sure that the off node is detected by memberlist and marked failed + time.Sleep(3 * time.Second) + return nil +} + +// ConnectToNetwork connects a container to the network by its index +func (d *DockerCompose) ConnectToNetwork(ctx context.Context, nodeIndex int) error { + if nodeIndex >= len(d.containers) { + return fmt.Errorf("node index: %v is greater than available nodes: %v", nodeIndex, len(d.containers)) + } + + container := d.containers[nodeIndex] + if d.network == nil { + return fmt.Errorf("network is nil") + } + + // Get the network name + networkName := d.network.Name + + // Execute docker network connect command + cmd := exec.CommandContext(ctx, "docker", "network", "connect", networkName, container.name) + if err := cmd.Run(); err != nil { + return fmt.Errorf("failed to connect container %s to network: %w", container.name, err) + } + + // sleep to make sure that the off node is detected by memberlist and connected to the network + time.Sleep(3 * time.Second) + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/test/docker/gcs.go b/platform/dbops/binaries/weaviate-src/test/docker/gcs.go new file mode 100644 index 0000000000000000000000000000000000000000..a81d2e6f964eba418a0a2781b415a5da1553a1bb --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/docker/gcs.go @@ -0,0 +1,66 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package docker + +import ( + "context" + "fmt" + "os" + "time" + + "github.com/docker/go-connections/nat" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" +) + +const GCS = "gcp-storage-emulator" + +func startGCS(ctx context.Context, networkName string) (*DockerContainer, error) { + port := nat.Port("9090/tcp") + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: testcontainers.ContainerRequest{ + Image: "oittaa/gcp-storage-emulator", + ExposedPorts: []string{"9090/tcp"}, + Name: GCS, + Hostname: GCS, + AutoRemove: true, + Networks: []string{networkName}, + NetworkAliases: map[string][]string{ + networkName: {GCS}, + }, + Env: map[string]string{ + "PORT": port.Port(), + }, + WaitingFor: wait.ForAll( + wait.ForListeningPort(port), + wait.ForHTTP("/").WithPort(port), + ).WithStartupTimeoutDefault(60 * time.Second), + }, + Started: true, + Reuse: true, + }) + if err != nil { + return nil, err + } + uri, err := container.PortEndpoint(ctx, port, "") + if err != nil { + return nil, err + } + envSettings := make(map[string]string) + projectID := os.Getenv("GOOGLE_CLOUD_PROJECT") + envSettings["GOOGLE_CLOUD_PROJECT"] = projectID + envSettings["STORAGE_EMULATOR_HOST"] = fmt.Sprintf("%s:%s", GCS, port.Port()) + envSettings["BACKUP_GCS_USE_AUTH"] = "false" + endpoints := make(map[EndpointName]endpoint) + endpoints[HTTP] = endpoint{port, uri} + return &DockerContainer{GCS, endpoints, container, envSettings}, nil +} diff --git a/platform/dbops/binaries/weaviate-src/test/docker/image.go b/platform/dbops/binaries/weaviate-src/test/docker/image.go new file mode 100644 index 0000000000000000000000000000000000000000..cc31e495f12306736013381ea468991dc265be3c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/docker/image.go @@ -0,0 +1,66 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package docker + +import ( + "context" + "fmt" + "time" + + "github.com/docker/go-connections/nat" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" +) + +const Img2VecNeural = "img2vec-neural" + +func startI2VNeural(ctx context.Context, networkName, img2vecImage string) (*DockerContainer, error) { + image := "semitechnologies/img2vec-pytorch:resnet50" + if len(img2vecImage) > 0 { + image = img2vecImage + } + port := nat.Port("8080/tcp") + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: testcontainers.ContainerRequest{ + Image: image, + Hostname: Img2VecNeural, + Networks: []string{networkName}, + NetworkAliases: map[string][]string{ + networkName: {Img2VecNeural}, + }, + Name: Img2VecNeural, + ExposedPorts: []string{"8080/tcp"}, + AutoRemove: true, + WaitingFor: wait. + ForHTTP("/.well-known/ready"). + WithPort(port). + WithStatusCodeMatcher(func(status int) bool { + return status == 204 + }). + WithStartupTimeout(240 * time.Second), + }, + Started: true, + Reuse: true, + }) + if err != nil { + return nil, err + } + uri, err := container.PortEndpoint(ctx, port, "") + if err != nil { + return nil, err + } + envSettings := make(map[string]string) + envSettings["IMAGE_INFERENCE_API"] = fmt.Sprintf("http://%s:%s", Img2VecNeural, port.Port()) + endpoints := make(map[EndpointName]endpoint) + endpoints[HTTP] = endpoint{port, uri} + return &DockerContainer{Img2VecNeural, endpoints, container, envSettings}, nil +} diff --git a/platform/dbops/binaries/weaviate-src/test/docker/minio.go b/platform/dbops/binaries/weaviate-src/test/docker/minio.go new file mode 100644 index 0000000000000000000000000000000000000000..de8c6dc0403eb1be5db666eab4cda69e3406584f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/docker/minio.go @@ -0,0 +1,80 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package docker + +import ( + "context" + "fmt" + "time" + + "github.com/docker/go-connections/nat" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" +) + +const MinIO = "test-minio" + +func startMinIO(ctx context.Context, networkName string, buckets map[string]string) (*DockerContainer, error) { + port := nat.Port("9000/tcp") + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: testcontainers.ContainerRequest{ + Image: "minio/minio", + ExposedPorts: []string{"9000/tcp"}, + Name: MinIO, + Hostname: MinIO, + AutoRemove: true, + Networks: []string{networkName}, + NetworkAliases: map[string][]string{ + networkName: {MinIO}, + }, + Env: map[string]string{ + "MINIO_ROOT_USER": "aws_access_key", + "MINIO_ROOT_PASSWORD": "aws_secret_key", + }, + Cmd: []string{"server", "/data"}, + WaitingFor: wait.ForAll( + wait.ForListeningPort(port), + wait.ForHTTP("/minio/health/ready").WithPort(port), + ).WithDeadline(60 * time.Second), + }, + Started: true, + Reuse: true, + }) + if err != nil { + return nil, err + } + + if len(buckets) > 0 { + for bName, region := range buckets { + if bName == "" { + continue + } + _, _, err = container.Exec(ctx, []string{"mc", "mb", "--region", region, fmt.Sprintf("data/%s", bName)}) + if err != nil { + return nil, fmt.Errorf("failed to create bucket %s: %s", bName, err.Error()) + } + } + } + uri, err := container.PortEndpoint(ctx, port, "") + if err != nil { + return nil, err + } + envSettings := make(map[string]string) + envSettings["BACKUP_S3_ENDPOINT"] = fmt.Sprintf("%s:%s", MinIO, port.Port()) + envSettings["OFFLOAD_S3_ENDPOINT"] = fmt.Sprintf("http://%s:%s", MinIO, port.Port()) + envSettings["BACKUP_S3_USE_SSL"] = "false" + envSettings["AWS_ACCESS_KEY_ID"] = "aws_access_key" + envSettings["AWS_SECRET_KEY"] = "aws_secret_key" + endpoints := make(map[EndpointName]endpoint) + endpoints[HTTP] = endpoint{port, uri} + return &DockerContainer{MinIO, endpoints, container, envSettings}, nil +} diff --git a/platform/dbops/binaries/weaviate-src/test/docker/mockoidc.go b/platform/dbops/binaries/weaviate-src/test/docker/mockoidc.go new file mode 100644 index 0000000000000000000000000000000000000000..86b10cc70ca6b9c5f630c0b5ede866f2abc495e5 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/docker/mockoidc.go @@ -0,0 +1,142 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package docker + +import ( + "context" + "crypto/rand" + "crypto/rsa" + "crypto/x509" + "crypto/x509/pkix" + "encoding/pem" + "fmt" + "math/big" + "net" + "os" + "strings" + "time" + + "github.com/docker/go-connections/nat" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" +) + +const MockOIDC = "mock-oidc" + +func startMockOIDC(ctx context.Context, networkName, mockoidcImage, certificate, certificatePrivateKey string) (*DockerContainer, error) { + path, err := os.Getwd() + if err != nil { + return nil, err + } + getContextPath := func(path string) string { + if strings.Contains(path, "test/acceptance_with_go_client") { + return path[:strings.Index(path, "/test/acceptance_with_go_client")] + } + if strings.Contains(path, "test/acceptance") { + return path[:strings.Index(path, "/test/acceptance")] + } + return path[:strings.Index(path, "/test/modules")] + } + fromDockerFile := testcontainers.FromDockerfile{} + if mockoidcImage == "" { + contextPath := fmt.Sprintf("%s/test/docker/mockoidc", getContextPath(path)) + fromDockerFile = testcontainers.FromDockerfile{ + Context: contextPath, + Dockerfile: "Dockerfile", + PrintBuildLog: true, + KeepImage: false, + } + } + containerEnvs := map[string]string{ + "MOCK_HOSTNAME": MockOIDC, + } + if certificate != "" && certificatePrivateKey != "" { + containerEnvs["MOCK_CERTIFICATE"] = certificate + containerEnvs["MOCK_CERTIFICATE_PRIVATE_KEY"] = certificatePrivateKey + } + port := nat.Port("48001/tcp") + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: testcontainers.ContainerRequest{ + FromDockerfile: fromDockerFile, + Image: mockoidcImage, + ExposedPorts: []string{"48001/tcp"}, + Name: MockOIDC, + Hostname: MockOIDC, + AutoRemove: true, + Networks: []string{networkName}, + NetworkAliases: map[string][]string{ + networkName: {MockOIDC}, + }, + Env: containerEnvs, + WaitingFor: wait.ForAll( + wait.ForListeningPort(port), + ).WithStartupTimeoutDefault(60 * time.Second), + }, + Started: true, + Reuse: true, + }) + if err != nil { + return nil, err + } + uri, err := container.PortEndpoint(ctx, port, "") + if err != nil { + return nil, err + } + endpoints := make(map[EndpointName]endpoint) + endpoints[HTTP] = endpoint{port, uri} + envSettings := make(map[string]string) + envSettings["AUTHENTICATION_OIDC_ENABLED"] = "true" + envSettings["AUTHENTICATION_OIDC_CLIENT_ID"] = "mock-oidc-test" + envSettings["AUTHENTICATION_OIDC_USERNAME_CLAIM"] = "sub" + envSettings["AUTHENTICATION_OIDC_GROUPS_CLAIM"] = "groups" + envSettings["AUTHENTICATION_OIDC_SCOPES"] = "openid" + if certificate != "" && certificatePrivateKey != "" { + envSettings["AUTHENTICATION_OIDC_ISSUER"] = fmt.Sprintf("https://%s:48001/oidc", MockOIDC) + envSettings["AUTHENTICATION_OIDC_CERTIFICATE"] = certificate + } else { + envSettings["AUTHENTICATION_OIDC_ISSUER"] = fmt.Sprintf("http://%s:48001/oidc", MockOIDC) + } + return &DockerContainer{MockOIDC, endpoints, container, envSettings}, nil +} + +func GenerateCertificateAndKey(dnsName string) (string, string, error) { + // Generate a private key + privateKey, err := rsa.GenerateKey(rand.Reader, 2048) + if err != nil { + return "", "", err + } + + // Create a certificate template + template := x509.Certificate{ + SerialNumber: big.NewInt(1), + Subject: pkix.Name{ + CommonName: dnsName, + }, + NotBefore: time.Now(), + NotAfter: time.Now().Add(365 * 24 * time.Hour), // Valid for 1 year + KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature, + ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth}, + DNSNames: []string{dnsName}, // SAN for localhost + IPAddresses: []net.IP{net.ParseIP("127.0.0.1")}, // SAN for 127.0.0.1 + } + + // Create the self-signed certificate + certDER, err := x509.CreateCertificate(rand.Reader, &template, &template, &privateKey.PublicKey, privateKey) + if err != nil { + return "", "", err + } + + cert := pem.EncodeToMemory(&pem.Block{Type: "CERTIFICATE", Bytes: certDER}) + certKey := pem.EncodeToMemory(&pem.Block{Type: "RSA PRIVATE KEY", Bytes: x509.MarshalPKCS1PrivateKey(privateKey)}) + + return string(cert), string(certKey), nil +} diff --git a/platform/dbops/binaries/weaviate-src/test/docker/mockoidchelper.go b/platform/dbops/binaries/weaviate-src/test/docker/mockoidchelper.go new file mode 100644 index 0000000000000000000000000000000000000000..fa60aaa48512e8bc533b2d9e098235c5f2a29ee8 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/docker/mockoidchelper.go @@ -0,0 +1,182 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package docker + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "os" + "strings" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/docker/go-connections/nat" + "github.com/stretchr/testify/assert" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" +) + +const MockOIDCHelper = "mock-oidc-helper" + +func startMockOIDCHelper(ctx context.Context, networkName, mockoidcHelperImage, certificate string) (*DockerContainer, error) { + path, err := os.Getwd() + if err != nil { + return nil, err + } + getContextPath := func(path string) string { + if strings.Contains(path, "test/acceptance_with_go_client") { + return path[:strings.Index(path, "/test/acceptance_with_go_client")] + } + if strings.Contains(path, "test/acceptance") { + return path[:strings.Index(path, "/test/acceptance")] + } + return path[:strings.Index(path, "/test/modules")] + } + fromDockerFile := testcontainers.FromDockerfile{} + if mockoidcHelperImage == "" { + contextPath := fmt.Sprintf("%s/test/docker/mockoidchelper", getContextPath(path)) + fromDockerFile = testcontainers.FromDockerfile{ + Context: contextPath, + Dockerfile: "Dockerfile", + PrintBuildLog: true, + KeepImage: false, + } + } + port := nat.Port("8080/tcp") + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: testcontainers.ContainerRequest{ + FromDockerfile: fromDockerFile, + Image: mockoidcHelperImage, + ExposedPorts: []string{"8080/tcp"}, + Name: MockOIDCHelper, + Hostname: MockOIDCHelper, + AutoRemove: true, + Networks: []string{networkName}, + NetworkAliases: map[string][]string{ + networkName: {MockOIDCHelper}, + }, + Env: map[string]string{ + "MOCK_HOSTNAME": fmt.Sprintf("%s:48001", MockOIDC), + "MOCK_CERTIFICATE": certificate, + }, + WaitingFor: wait.ForAll( + wait.ForListeningPort(port), + ).WithStartupTimeoutDefault(60 * time.Second), + }, + Started: true, + Reuse: true, + }) + if err != nil { + return nil, err + } + uri, err := container.PortEndpoint(ctx, port, "") + if err != nil { + return nil, err + } + endpoints := make(map[EndpointName]endpoint) + endpoints[HTTP] = endpoint{port, uri} + return &DockerContainer{MockOIDCHelper, endpoints, container, nil}, nil +} + +func GetTokensFromMockOIDCWithHelper(t *testing.T, mockOIDCHelperURI string) (string, string) { + url := "http://" + mockOIDCHelperURI + "/tokens" + client := &http.Client{} + req, err := http.NewRequest("GET", url, nil) + assert.NoError(t, err) + req.Header.Set("Content-Type", "application/json") + resp, err := client.Do(req) + assert.NoError(t, err) + defer resp.Body.Close() + assert.Equal(t, 200, resp.StatusCode) + body, _ := io.ReadAll(resp.Body) + var tokensResponse map[string]interface{} + err = json.Unmarshal(body, &tokensResponse) + assert.NoError(t, err) + accessToken, ok := tokensResponse["accessToken"].(string) + if !ok { + t.Fatalf("failed to get access token from: %v", tokensResponse) + } + refreshToken, ok := tokensResponse["refreshToken"].(string) + if !ok { + t.Fatalf("failed to get refresh token from: %v", tokensResponse) + } + return accessToken, refreshToken +} + +const ( + authCode = "auth" + clientSecret = "Secret" + clientID = "mock-oidc-test" +) + +func GetTokensFromMockOIDCWithHelperManualTest(t *testing.T, mockOIDCHelperURI string) (string, string) { + client := &http.Client{} + + authEndpoint := "http://" + mockOIDCHelperURI + "/oidc/authorize" + tokenEndpoint := "http://" + mockOIDCHelperURI + "/oidc/token" + + data := url.Values{} + data.Set("response_type", "code") + data.Set("code", authCode) + data.Set("redirect_uri", "google.com") // needs to be present + data.Set("client_id", clientID) + data.Set("client_secret", clientSecret) + data.Set("state", "email") + data.Set("scope", "openid groups") + req, err := http.NewRequest("POST", authEndpoint, bytes.NewBufferString(data.Encode())) + if err != nil { + return "", "" + } + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + + // not getting a useful return value as we dont provide a valid redirect + resp, _ := client.Do(req) + require.NotNil(t, resp.Body) + defer resp.Body.Close() + + data2 := url.Values{} + data2.Set("grant_type", "authorization_code") + data2.Set("client_id", clientID) + data2.Set("client_secret", clientSecret) + data2.Set("code", authCode) + data2.Set("scope", "email") + data2.Set("state", "email") + req2, err := http.NewRequest("POST", tokenEndpoint, bytes.NewBufferString(data2.Encode())) + if err != nil { + return "", "" + } + req2.Header.Set("Content-Type", "application/x-www-form-urlencoded") + resp2, err := client.Do(req2) + if err != nil { + return "", "" + } + defer resp2.Body.Close() + body, _ := io.ReadAll(resp2.Body) + var tokenResponse map[string]interface{} + err = json.Unmarshal(body, &tokenResponse) + if err != nil { + return "", "" + } + accessToken, ok := tokenResponse["id_token"].(string) + require.True(t, ok, "failed to get access token from: %v", tokenResponse) + refreshToken, ok := tokenResponse["refresh_token"].(string) + require.True(t, ok, "failed to get refresh token from: %v", tokenResponse) + + return accessToken, refreshToken +} diff --git a/platform/dbops/binaries/weaviate-src/test/docker/model2vec.go b/platform/dbops/binaries/weaviate-src/test/docker/model2vec.go new file mode 100644 index 0000000000000000000000000000000000000000..f7616c6127f656ddc7c1009a06316999e86fe960 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/docker/model2vec.go @@ -0,0 +1,66 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package docker + +import ( + "context" + "fmt" + "time" + + "github.com/docker/go-connections/nat" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" +) + +const Text2VecModel2Vec = "text2vec-model2vec" + +func startT2VModel2Vec(ctx context.Context, networkName, model2vecImage string) (*DockerContainer, error) { + image := "semitechnologies/model2vec-inference:minishlab-potion-retrieval-32M" + if len(model2vecImage) > 0 { + image = model2vecImage + } + port := nat.Port("8080/tcp") + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: testcontainers.ContainerRequest{ + Image: image, + Hostname: Text2VecModel2Vec, + Networks: []string{networkName}, + NetworkAliases: map[string][]string{ + networkName: {Text2VecModel2Vec}, + }, + Name: Text2VecModel2Vec, + ExposedPorts: []string{"8080/tcp"}, + AutoRemove: true, + WaitingFor: wait. + ForHTTP("/.well-known/ready"). + WithPort(port). + WithStatusCodeMatcher(func(status int) bool { + return status == 204 + }). + WithStartupTimeout(240 * time.Second), + }, + Started: true, + Reuse: true, + }) + if err != nil { + return nil, err + } + uri, err := container.PortEndpoint(ctx, port, "") + if err != nil { + return nil, err + } + envSettings := make(map[string]string) + envSettings["MODEL2VEC_INFERENCE_API"] = fmt.Sprintf("http://%s:%s", Text2VecModel2Vec, port.Port()) + endpoints := make(map[EndpointName]endpoint) + endpoints[HTTP] = endpoint{port, uri} + return &DockerContainer{Text2VecModel2Vec, endpoints, container, envSettings}, nil +} diff --git a/platform/dbops/binaries/weaviate-src/test/docker/ollama.go b/platform/dbops/binaries/weaviate-src/test/docker/ollama.go new file mode 100644 index 0000000000000000000000000000000000000000..f77bce75463d412f0dfdefa6ac8dd6d1b3533ccc --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/docker/ollama.go @@ -0,0 +1,73 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package docker + +import ( + "context" + "fmt" + "time" + + "github.com/docker/go-connections/nat" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" +) + +const ( + OllamaVectorizer = "ollamavectorizer" + OllamaGenerative = "ollamagenerative" +) + +func startOllamaVectorizer(ctx context.Context, networkName string) (*DockerContainer, error) { + return startOllama(ctx, networkName, OllamaVectorizer, "nomic-embed-text") +} + +func startOllamaGenerative(ctx context.Context, networkName string) (*DockerContainer, error) { + return startOllama(ctx, networkName, OllamaGenerative, "tinyllama") +} + +func startOllama(ctx context.Context, networkName, hostname, model string) (*DockerContainer, error) { + port := nat.Port("11434/tcp") + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: testcontainers.ContainerRequest{ + Image: "ollama/ollama:0.11.4", + Hostname: hostname, + Networks: []string{networkName}, + NetworkAliases: map[string][]string{ + networkName: {hostname}, + }, + Name: hostname, + ExposedPorts: []string{"11434/tcp"}, + AutoRemove: true, + WaitingFor: wait.ForListeningPort(port).WithStartupTimeout(60 * time.Second), + }, + Started: true, + Reuse: true, + }) + if err != nil { + return nil, err + } + if model != "" { + // pull a given model + _, _, err = container.Exec(ctx, []string{"ollama", "pull", model}) + if err != nil { + return nil, fmt.Errorf("failed to pull model %s: %w", model, err) + } + } + uri, err := container.PortEndpoint(ctx, port, "") + if err != nil { + return nil, err + } + endpoints := make(map[EndpointName]endpoint) + endpoints[HTTP] = endpoint{port, uri} + endpoints["apiEndpoint"] = endpoint{uri: fmt.Sprintf("http://%s:%s", hostname, port.Port())} + return &DockerContainer{hostname, endpoints, container, nil}, nil +} diff --git a/platform/dbops/binaries/weaviate-src/test/docker/qna.go b/platform/dbops/binaries/weaviate-src/test/docker/qna.go new file mode 100644 index 0000000000000000000000000000000000000000..ad14f749c1b46cf390503da0fcc8dbadfbb7efa6 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/docker/qna.go @@ -0,0 +1,66 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package docker + +import ( + "context" + "fmt" + "time" + + "github.com/docker/go-connections/nat" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" +) + +const QnATransformers = "qna-transformers" + +func startQnATransformers(ctx context.Context, networkName, qnaImage string) (*DockerContainer, error) { + image := "semitechnologies/qna-transformers:distilbert-base-uncased-distilled-squad" + if len(qnaImage) > 0 { + image = qnaImage + } + port := nat.Port("8080/tcp") + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: testcontainers.ContainerRequest{ + Image: image, + Hostname: QnATransformers, + Networks: []string{networkName}, + NetworkAliases: map[string][]string{ + networkName: {QnATransformers}, + }, + Name: QnATransformers, + ExposedPorts: []string{"8080/tcp"}, + AutoRemove: true, + WaitingFor: wait. + ForHTTP("/.well-known/ready"). + WithPort(port). + WithStatusCodeMatcher(func(status int) bool { + return status == 204 + }). + WithStartupTimeout(240 * time.Second), + }, + Started: true, + Reuse: true, + }) + if err != nil { + return nil, err + } + uri, err := container.PortEndpoint(ctx, port, "") + if err != nil { + return nil, err + } + envSettings := make(map[string]string) + envSettings["QNA_INFERENCE_API"] = fmt.Sprintf("http://%s:%s", QnATransformers, port.Port()) + endpoints := make(map[EndpointName]endpoint) + endpoints[HTTP] = endpoint{port, uri} + return &DockerContainer{QnATransformers, endpoints, container, envSettings}, nil +} diff --git a/platform/dbops/binaries/weaviate-src/test/docker/reranker.go b/platform/dbops/binaries/weaviate-src/test/docker/reranker.go new file mode 100644 index 0000000000000000000000000000000000000000..dca11551c5733db84a34e5003fae9816253d8279 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/docker/reranker.go @@ -0,0 +1,66 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package docker + +import ( + "context" + "fmt" + "time" + + "github.com/docker/go-connections/nat" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" +) + +const RerankerTransformers = "reranker-transformers" + +func startRerankerTransformers(ctx context.Context, networkName, rerankerTransformersImage string) (*DockerContainer, error) { + image := "semitechnologies/reranker-transformers:cross-encoder-ms-marco-MiniLM-L-6-v2" + if len(rerankerTransformersImage) > 0 { + image = rerankerTransformersImage + } + port := nat.Port("8080/tcp") + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: testcontainers.ContainerRequest{ + Image: image, + Hostname: RerankerTransformers, + Networks: []string{networkName}, + NetworkAliases: map[string][]string{ + networkName: {RerankerTransformers}, + }, + Name: RerankerTransformers, + ExposedPorts: []string{"8080/tcp"}, + AutoRemove: true, + WaitingFor: wait. + ForHTTP("/.well-known/ready"). + WithPort(port). + WithStatusCodeMatcher(func(status int) bool { + return status == 204 + }). + WithStartupTimeout(240 * time.Second), + }, + Started: true, + Reuse: true, + }) + if err != nil { + return nil, err + } + uri, err := container.PortEndpoint(ctx, port, "") + if err != nil { + return nil, err + } + envSettings := make(map[string]string) + envSettings["RERANKER_INFERENCE_API"] = fmt.Sprintf("http://%s:%s", RerankerTransformers, port.Port()) + endpoints := make(map[EndpointName]endpoint) + endpoints[HTTP] = endpoint{port, uri} + return &DockerContainer{RerankerTransformers, endpoints, container, envSettings}, nil +} diff --git a/platform/dbops/binaries/weaviate-src/test/docker/sum.go b/platform/dbops/binaries/weaviate-src/test/docker/sum.go new file mode 100644 index 0000000000000000000000000000000000000000..21ea294dbca927545e32f84f5c870f94439a0fab --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/docker/sum.go @@ -0,0 +1,66 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package docker + +import ( + "context" + "fmt" + "time" + + "github.com/docker/go-connections/nat" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" +) + +const SUMTransformers = "sum-transformers" + +func startSUMTransformers(ctx context.Context, networkName, sumImage string) (*DockerContainer, error) { + image := "semitechnologies/sum-transformers:facebook-bart-large-cnn" + if len(sumImage) > 0 { + image = sumImage + } + port := nat.Port("8080/tcp") + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: testcontainers.ContainerRequest{ + Image: image, + Hostname: SUMTransformers, + Networks: []string{networkName}, + NetworkAliases: map[string][]string{ + networkName: {SUMTransformers}, + }, + Name: SUMTransformers, + ExposedPorts: []string{"8080/tcp"}, + AutoRemove: true, + WaitingFor: wait. + ForHTTP("/.well-known/ready"). + WithPort(port). + WithStatusCodeMatcher(func(status int) bool { + return status == 204 + }). + WithStartupTimeout(240 * time.Second), + }, + Started: true, + Reuse: true, + }) + if err != nil { + return nil, err + } + uri, err := container.PortEndpoint(ctx, port, "") + if err != nil { + return nil, err + } + envSettings := make(map[string]string) + envSettings["SUM_INFERENCE_API"] = fmt.Sprintf("http://%s:%s", SUMTransformers, port.Port()) + endpoints := make(map[EndpointName]endpoint) + endpoints[HTTP] = endpoint{port, uri} + return &DockerContainer{SUMTransformers, endpoints, container, envSettings}, nil +} diff --git a/platform/dbops/binaries/weaviate-src/test/docker/transformers.go b/platform/dbops/binaries/weaviate-src/test/docker/transformers.go new file mode 100644 index 0000000000000000000000000000000000000000..5f0d70e09687b3b2884cf0979adca8f3026fc9d4 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/docker/transformers.go @@ -0,0 +1,66 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package docker + +import ( + "context" + "fmt" + "time" + + "github.com/docker/go-connections/nat" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" +) + +const Text2VecTransformers = "text2vec-transformers" + +func startT2VTransformers(ctx context.Context, networkName, transformersImage string) (*DockerContainer, error) { + image := "semitechnologies/transformers-inference:baai-bge-small-en-v1.5-onnx-1.9.4" + if len(transformersImage) > 0 { + image = transformersImage + } + port := nat.Port("8080/tcp") + container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: testcontainers.ContainerRequest{ + Image: image, + Hostname: Text2VecTransformers, + Networks: []string{networkName}, + NetworkAliases: map[string][]string{ + networkName: {Text2VecTransformers}, + }, + Name: Text2VecTransformers, + ExposedPorts: []string{"8080/tcp"}, + AutoRemove: true, + WaitingFor: wait. + ForHTTP("/.well-known/ready"). + WithPort(port). + WithStatusCodeMatcher(func(status int) bool { + return status == 204 + }). + WithStartupTimeout(240 * time.Second), + }, + Started: true, + Reuse: true, + }) + if err != nil { + return nil, err + } + uri, err := container.PortEndpoint(ctx, port, "") + if err != nil { + return nil, err + } + envSettings := make(map[string]string) + envSettings["TRANSFORMERS_INFERENCE_API"] = fmt.Sprintf("http://%s:%s", Text2VecTransformers, port.Port()) + endpoints := make(map[EndpointName]endpoint) + endpoints[HTTP] = endpoint{port, uri} + return &DockerContainer{Text2VecTransformers, endpoints, container, envSettings}, nil +} diff --git a/platform/dbops/binaries/weaviate-src/test/docker/weaviate.go b/platform/dbops/binaries/weaviate-src/test/docker/weaviate.go new file mode 100644 index 0000000000000000000000000000000000000000..21142bf052542229d33bbd2f4f941a288c9df1c9 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/docker/weaviate.go @@ -0,0 +1,171 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package docker + +import ( + "context" + "fmt" + "os" + "os/exec" + "runtime" + "strings" + "time" + + "github.com/docker/go-connections/nat" + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" +) + +const ( + Weaviate1 = "weaviate" + Weaviate2 = "weaviate2" + Weaviate3 = "weaviate3" + Weaviate = "weaviate" + SecondWeaviate = "second-weaviate" +) + +func startWeaviate(ctx context.Context, + enableModules []string, defaultVectorizerModule string, + extraEnvSettings map[string]string, networkName string, + weaviateImage, hostname string, exposeGRPCPort bool, + wellKnownEndpoint string, +) (*DockerContainer, error) { + fromDockerFile := testcontainers.FromDockerfile{} + if len(weaviateImage) == 0 { + path, err := os.Getwd() + if err != nil { + return nil, err + } + getContextPath := func(path string) string { + if strings.Contains(path, "test/acceptance_with_go_client") { + return path[:strings.Index(path, "/test/acceptance_with_go_client")] + } + if strings.Contains(path, "test/acceptance") { + return path[:strings.Index(path, "/test/acceptance")] + } + return path[:strings.Index(path, "/test/modules")] + } + targetArch := runtime.GOARCH + gitHashBytes, err := exec.Command("git", "rev-parse", "--short", "HEAD").CombinedOutput() + if err != nil { + return nil, err + } + gitHash := strings.ReplaceAll(string(gitHashBytes), "\n", "") + contextPath := getContextPath(path) + fromDockerFile = testcontainers.FromDockerfile{ + Context: contextPath, + Dockerfile: "Dockerfile", + BuildArgs: map[string]*string{ + "TARGETARCH": &targetArch, + "GIT_REVISION": &gitHash, + }, + PrintBuildLog: true, + KeepImage: false, + } + } + containerName := Weaviate1 + if hostname != "" { + containerName = hostname + } + env := map[string]string{ + "AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED": "true", + "LOG_LEVEL": "debug", + "QUERY_DEFAULTS_LIMIT": "20", + "PERSISTENCE_DATA_PATH": "./data", + "DEFAULT_VECTORIZER_MODULE": "none", + "FAST_FAILURE_DETECTION": "true", + "DISABLE_TELEMETRY": "true", + } + if len(enableModules) > 0 { + env["ENABLE_MODULES"] = strings.Join(enableModules, ",") + } + if len(defaultVectorizerModule) > 0 { + env["DEFAULT_VECTORIZER_MODULE"] = defaultVectorizerModule + } + for key, value := range extraEnvSettings { + env[key] = value + } + + httpPort := nat.Port("8080/tcp") + exposedPorts := []string{"8080/tcp"} + waitStrategies := []wait.Strategy{ + wait.ForListeningPort(httpPort), + wait.ForHTTP(wellKnownEndpoint).WithPort(httpPort), + } + grpcPort := nat.Port("50051/tcp") + if exposeGRPCPort { + exposedPorts = append(exposedPorts, "50051/tcp") + waitStrategies = append(waitStrategies, wait.ForListeningPort(grpcPort)) + } + req := testcontainers.ContainerRequest{ + FromDockerfile: fromDockerFile, + Image: weaviateImage, + Hostname: containerName, + Name: containerName, + Networks: []string{networkName}, + NetworkAliases: map[string][]string{ + networkName: {containerName}, + }, + ExposedPorts: exposedPorts, + WaitingFor: wait.ForAll(waitStrategies...), + Env: env, + LifecycleHooks: []testcontainers.ContainerLifecycleHooks{ + { + // Use wait strategies as part of the lifecycle hooks as this gets propagated to the underlying container, + // which survives stop/start commands + PostStarts: []testcontainers.ContainerHook{ + func(ctx context.Context, container testcontainers.Container) error { + for _, waitStrategy := range waitStrategies { + ctx, cancel := context.WithTimeout(ctx, 180*time.Second) + defer cancel() + + if err := waitStrategy.WaitUntilReady(ctx, container); err != nil { + return err + } + } + return nil + }, + }, + }, + }, + } + c, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{ + ContainerRequest: req, + Started: true, + Reuse: false, + }) + if err != nil { + if terminateErr := testcontainers.TerminateContainer(c); terminateErr != nil { + return nil, fmt.Errorf("%w: failed to terminate: %w", err, terminateErr) + } + return nil, err + } + httpUri, err := c.PortEndpoint(ctx, httpPort, "") + if err != nil { + return nil, err + } + endpoints := make(map[EndpointName]endpoint) + endpoints[HTTP] = endpoint{httpPort, httpUri} + if exposeGRPCPort { + grpcUri, err := c.PortEndpoint(ctx, grpcPort, "") + if err != nil { + return nil, err + } + endpoints[GRPC] = endpoint{grpcPort, grpcUri} + } + return &DockerContainer{ + name: containerName, + endpoints: endpoints, + container: c, + envSettings: nil, + }, nil +} diff --git a/platform/dbops/binaries/weaviate-src/test/helper/assertions.go b/platform/dbops/binaries/weaviate-src/test/helper/assertions.go new file mode 100644 index 0000000000000000000000000000000000000000..567063aa9620331611de96779a6b5fc4d8fb1cfd --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/helper/assertions.go @@ -0,0 +1,55 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package helper + +import ( + "encoding/json" + "reflect" + "testing" +) + +// Asserts that the request did not return an error. +// Optionally perform some checks only if the request did not fail +func AssertRequestOk(t *testing.T, response interface{}, err error, checkFn func()) { + t.Helper() + if err != nil { + responseJson, _ := json.MarshalIndent(response, "", " ") + errorPayload, _ := json.MarshalIndent(err, "", " ") + t.Fatalf("Failed to perform request! Error: %s %s (Original error %s). Response: %s", getType(err), errorPayload, err, responseJson) + } else { + if checkFn != nil { + checkFn() + } + } +} + +// Asserts that the request _did_ return an error. +// Optionally perform some checks only if the request failed +func AssertRequestFail(t *testing.T, response interface{}, err error, checkFn func()) { + if err == nil { + responseJson, _ := json.MarshalIndent(response, "", " ") + t.Fatalf("Request succeeded unexpectedly. Response:\n%s", responseJson) + } else { + if checkFn != nil { + checkFn() + } + } +} + +// Get type name of some value, according to https://stackoverflow.com/questions/35790935/using-reflection-in-go-to-get-the-name-of-a-struct +func getType(myvar interface{}) string { + if t := reflect.TypeOf(myvar); t.Kind() == reflect.Ptr { + return "*" + t.Elem().Name() + } else { + return t.Name() + } +} diff --git a/platform/dbops/binaries/weaviate-src/test/helper/backups.go b/platform/dbops/binaries/weaviate-src/test/helper/backups.go new file mode 100644 index 0000000000000000000000000000000000000000..a3af3017ab857b0b46cc038c816618961cea1a5a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/helper/backups.go @@ -0,0 +1,228 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package helper + +import ( + "testing" + "time" + + "github.com/go-openapi/runtime" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/client/backups" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/usecases/backup" +) + +func DefaultBackupConfig() *models.BackupConfig { + return &models.BackupConfig{ + CompressionLevel: models.BackupConfigCompressionLevelDefaultCompression, + CPUPercentage: backup.DefaultCPUPercentage, + ChunkSize: 128, + } +} + +func DefaultRestoreConfig() *models.RestoreConfig { + return &models.RestoreConfig{ + CPUPercentage: backup.DefaultCPUPercentage, + } +} + +func CreateBackup(t *testing.T, cfg *models.BackupConfig, className, backend, backupID string) (*backups.BackupsCreateOK, error) { + params := backups.NewBackupsCreateParams(). + WithBackend(backend). + WithBody(&models.BackupCreateRequest{ + ID: backupID, + Include: []string{className}, + Config: cfg, + }) + t.Logf("Creating backup with ID: %s, backend: %s, className: %s, config: %+v\n", backupID, backend, className, cfg) + return Client(t).Backups.BackupsCreate(params, nil) +} + +func CreateBackupWithAuthz(t *testing.T, cfg *models.BackupConfig, className, backend, backupID string, authInfo runtime.ClientAuthInfoWriter) (*backups.BackupsCreateOK, error) { + params := backups.NewBackupsCreateParams(). + WithBackend(backend). + WithBody(&models.BackupCreateRequest{ + ID: backupID, + Include: []string{className}, + Config: cfg, + }) + t.Logf("Creating backup with ID: %s, backend: %s, className: %s, config: %+v\n", backupID, backend, className, cfg) + return Client(t).Backups.BackupsCreate(params, authInfo) +} + +func ListBackup(t *testing.T, backend string) (*backups.BackupsListOK, error) { + params := backups.NewBackupsListParams(). + WithBackend(backend) + return Client(t).Backups.BackupsList(params, nil) +} + +func ListBackupsWithAuthz(t *testing.T, backend string, authInfo runtime.ClientAuthInfoWriter) (*backups.BackupsListOK, error) { + params := backups.NewBackupsListParams(). + WithBackend(backend) + return Client(t).Backups.BackupsList(params, authInfo) +} + +func CancelBackup(t *testing.T, backend, backupID string) error { + params := backups.NewBackupsCancelParams(). + WithBackend(backend). + WithID(backupID) + _, err := Client(t).Backups.BackupsCancel(params, nil) + return err +} + +func CancelBackupWithAuthz(t *testing.T, backend, backupID string, authInfo runtime.ClientAuthInfoWriter) error { + params := backups.NewBackupsCancelParams(). + WithBackend(backend). + WithID(backupID) + _, err := Client(t).Backups.BackupsCancel(params, authInfo) + return err +} + +func CreateBackupStatus(t *testing.T, backend, backupID, overrideBucket, overridePath string) (*backups.BackupsCreateStatusOK, error) { + params := backups.NewBackupsCreateStatusParams(). + WithBackend(backend). + WithID(backupID). + WithBucket(&overrideBucket). + WithPath(&overridePath) + return Client(t).Backups.BackupsCreateStatus(params, nil) +} + +func CreateBackupStatusWithAuthz(t *testing.T, backend, backupID, overrideBucket, overridePath string, authInfo runtime.ClientAuthInfoWriter) (*backups.BackupsCreateStatusOK, error) { + params := backups.NewBackupsCreateStatusParams(). + WithBackend(backend). + WithID(backupID). + WithBucket(&overrideBucket). + WithPath(&overridePath) + return Client(t).Backups.BackupsCreateStatus(params, authInfo) +} + +func RestoreBackup(t *testing.T, cfg *models.RestoreConfig, className, backend, backupID string, nodeMapping map[string]string, overwriteAlias bool) (*backups.BackupsRestoreOK, error) { + params := backups.NewBackupsRestoreParams(). + WithBackend(backend). + WithID(backupID). + WithBody(&models.BackupRestoreRequest{ + Include: []string{className}, + NodeMapping: nodeMapping, + Config: cfg, + OverwriteAlias: overwriteAlias, + }) + return Client(t).Backups.BackupsRestore(params, nil) +} + +func RestoreBackupWithAuthz(t *testing.T, cfg *models.RestoreConfig, className, backend, backupID string, nodeMapping map[string]string, authInfo runtime.ClientAuthInfoWriter) (*backups.BackupsRestoreOK, error) { + params := backups.NewBackupsRestoreParams(). + WithBackend(backend). + WithID(backupID). + WithBody(&models.BackupRestoreRequest{ + Include: []string{className}, + NodeMapping: nodeMapping, + Config: cfg, + }) + return Client(t).Backups.BackupsRestore(params, authInfo) +} + +func RestoreBackupStatus(t *testing.T, backend, backupID, overrideBucket, overridePath string) (*backups.BackupsRestoreStatusOK, error) { + params := backups.NewBackupsRestoreStatusParams(). + WithBackend(backend). + WithID(backupID). + WithBucket(&overrideBucket). + WithPath(&overridePath) + return Client(t).Backups.BackupsRestoreStatus(params, nil) +} + +func RestoreBackupStatusWithAuthz(t *testing.T, backend, backupID, overrideBucket, overridePath string, authInfo runtime.ClientAuthInfoWriter) (*backups.BackupsRestoreStatusOK, error) { + params := backups.NewBackupsRestoreStatusParams(). + WithBackend(backend). + WithID(backupID). + WithBucket(&overrideBucket). + WithPath(&overridePath) + return Client(t).Backups.BackupsRestoreStatus(params, authInfo) +} + +const ( + MinPollInterval = 100 * time.Millisecond // Minimun interval for polling backup status. + MaxDeadline = 10 * time.Minute // Maxium timeout for polling backup status. +) + +// [backupExpectOpt.WithOptions] copies the struct, so it is safe to derive options +// from defaultBackupExpect directly: +// +// defaultBackupExpect.WithOptions(opts) +var defaultBackupExpect = backupExpectOpt{ + Interval: 500 * time.Millisecond, + Deadline: 30 * time.Second, +} + +type backupExpectOpt struct { + Interval time.Duration + Deadline time.Duration +} + +// WithOptions applies options to the copy of backupExpectOpt and returns it. +func (b backupExpectOpt) WithOptions(opts ...BackupExpectOpt) *backupExpectOpt { + for _, opt := range opts { + opt(&b) + } + return &b +} + +type BackupExpectOpt func(*backupExpectOpt) + +// Set the interval for polling backup create/restore status. Pass [MinPollInterval] for rapid checks. +func WithPollInterval(d time.Duration) BackupExpectOpt { + return func(opt *backupExpectOpt) { opt.Interval = max(d, MinPollInterval) } +} + +// Set the deadline for receiving status SUCCESS. Waiting indefinitely is not allowed, use [MaxDeadline] instead. +func WithDeadline(d time.Duration) BackupExpectOpt { + return func(opt *backupExpectOpt) { opt.Deadline = min(d, MaxDeadline) } +} + +// Expect creation status to report SUCCESS within 30s and with 500ms polling interval (default). +// Change polling configuration by passing [WithPollInterval] and [WithDeadline]. +// To use in no-authz context, pass nil to the authz parameter. +func ExpectBackupEventuallyCreated(t *testing.T, backupID, backend string, authz runtime.ClientAuthInfoWriter, opts ...BackupExpectOpt) { + t.Helper() + opt := defaultBackupExpect.WithOptions(opts...) + + require.EventuallyWithTf(t, func(c *assert.CollectT) { + // Calling -WithAuthz with nil-auth is equivalent to using its no-authz counterpart + resp, err := CreateBackupStatusWithAuthz(t, backend, backupID, "", "", authz) + + require.NoError(c, err, "fetch backup create status") + require.NotNil(c, resp.Payload, "empty response") + + status := *resp.Payload.Status + require.Equal(c, "SUCCESS", status, "backup create status") + }, opt.Deadline, opt.Interval, "backup %s not created after %s", backupID, opt.Deadline) +} + +// Expect restore status to report SUCCESS within 30s and with 500ms polling interval (default). +// Change polling configuration by passing [WithPollInterval] and [WithDeadline]. +// To use in no-authz context, pass nil to the authz parameter. +func ExpectBackupEventuallyRestored(t *testing.T, backupID, backend string, authz runtime.ClientAuthInfoWriter, opts ...BackupExpectOpt) { + t.Helper() + opt := defaultBackupExpect.WithOptions(opts...) + + require.EventuallyWithTf(t, func(c *assert.CollectT) { + // Calling -WithAuthz with nil-auth is equivalent to using its no-authz counterpart + resp, err := RestoreBackupStatusWithAuthz(t, backend, backupID, "", "", authz) + + require.NoError(c, err, "fetch backup restore status") + require.NotNil(c, resp.Payload, "empty response") + + status := *resp.Payload.Status + require.Equal(c, "SUCCESS", status, "backup restore status") + }, opt.Deadline, opt.Interval, "backup %s not restored after %s", backupID, opt.Deadline) +} diff --git a/platform/dbops/binaries/weaviate-src/test/helper/client.go b/platform/dbops/binaries/weaviate-src/test/helper/client.go new file mode 100644 index 0000000000000000000000000000000000000000..ec21d9c2374ee8f3f33b512cf5d5b61e4d6fdc64 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/helper/client.go @@ -0,0 +1,74 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package helper + +// This file contains the Client(t *testing.T) function, that can be used to construct a client that talks to +// the Weaviate server that is configured using command line arguments (see init.go). +// +// We pass in the test (*testing.T), to be able to log HTTP traffic to that specific test case. +// This allows us to get detailed logs of the performed HTTP requests if a acceptance test fails. + +// The CreateAuth returns a function that attaches the key and token headers to each HTTP call. + +// Example: +// func TestSomething(t *testing.T) { +// // Use specific key & token +// auth := helper.CreateAuth(key, token) +// helper.Client(t).SomeScope.SomeOperation(&someParams, auth) +// +// // Use root key & token +// helper.Client(t).SomeScope.SomeOperation(&someParams, helper.RootAuth) +// } + +import ( + "fmt" + "testing" + + "github.com/go-openapi/runtime" + httptransport "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/require" + apiclient "github.com/weaviate/weaviate/client" + pb "github.com/weaviate/weaviate/grpc/generated/protocol/v1" +) + +// Create a client that logs with t.Logf, if a *testing.T is provided. +// If there is no test case at hand, pass in nil to disable logging. +func Client(t *testing.T) *apiclient.Weaviate { + transport := httptransport.New(fmt.Sprintf("%s:%s", ServerHost, ServerPort), "/v1", []string{ServerScheme}) + + // If a test case is provided, and we want to dump HTTP traffic, + // create a simple logger that logs HTTP traffic to the test case. + if t != nil && DebugHTTP { + transport.SetDebug(true) + transport.SetLogger(&testLogger{t: t}) + } + + client := apiclient.New(transport, strfmt.Default) + return client +} + +// Create a auth writer for the Weaviate client with a specific key +func CreateAuth(apiKey string) runtime.ClientAuthInfoWriterFunc { + return func(r runtime.ClientRequest, _ strfmt.Registry) error { + return r.SetHeaderParam("Authorization", fmt.Sprintf("Bearer %s", apiKey)) + } +} + +func ClientGRPC(t *testing.T) pb.WeaviateClient { + conn, err := CreateGrpcConnectionClient(fmt.Sprintf("%s:%s", ServerGRPCHost, ServerGRPCPort)) + require.NoError(t, err) + require.NotNil(t, conn) + grpcClient := CreateGrpcWeaviateClient(conn) + require.NotNil(t, grpcClient) + return grpcClient +} diff --git a/platform/dbops/binaries/weaviate-src/test/helper/compression.go b/platform/dbops/binaries/weaviate-src/test/helper/compression.go new file mode 100644 index 0000000000000000000000000000000000000000..cb5d6d2295b72327e15305fb90b48fcf915fb723 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/helper/compression.go @@ -0,0 +1,27 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package helper + +import ( + "testing" + "time" +) + +func EnablePQ(t *testing.T, className string, pq map[string]interface{}) { + class := GetClass(t, className) + cfg := class.VectorIndexConfig.(map[string]interface{}) + cfg["pq"] = pq + class.VectorIndexConfig = cfg + UpdateClass(t, class) + // Time for compression to complete + time.Sleep(2 * time.Second) +} diff --git a/platform/dbops/binaries/weaviate-src/test/helper/data.go b/platform/dbops/binaries/weaviate-src/test/helper/data.go new file mode 100644 index 0000000000000000000000000000000000000000..e79cf5b6747421663c7b0eced6fb64dfcaf3d582 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/helper/data.go @@ -0,0 +1,32 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package helper + +import ( + "encoding/base64" + "io" + "os" +) + +// Helper methods +// get image and video blob fns +func GetBase64EncodedData(path string) (string, error) { + f, err := os.Open(path) + if err != nil { + return "", err + } + content, err := io.ReadAll(f) + if err != nil { + return "", err + } + return base64.StdEncoding.EncodeToString(content), nil +} diff --git a/platform/dbops/binaries/weaviate-src/test/helper/distance.go b/platform/dbops/binaries/weaviate-src/test/helper/distance.go new file mode 100644 index 0000000000000000000000000000000000000000..4d44716fa56425b35a743e4c3bc2f9fafc60e4ae --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/helper/distance.go @@ -0,0 +1,28 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package helper + +import ( + "testing" + + "github.com/weaviate/weaviate/entities/additional" +) + +func CertaintyToDist(t *testing.T, in float32) float32 { + asFloat64 := float64(in) + dist := additional.CertaintyToDistPtr(&asFloat64) + if dist == nil { + t.Fatalf( + "somehow %+v of type %T failed to produce a non-null *float64", in, in) + } + return float32(*dist) +} diff --git a/platform/dbops/binaries/weaviate-src/test/helper/eventually_equal.go b/platform/dbops/binaries/weaviate-src/test/helper/eventually_equal.go new file mode 100644 index 0000000000000000000000000000000000000000..3cc4d73d2e39e6afe8fbf82e392a9521bf7fd1bf --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/helper/eventually_equal.go @@ -0,0 +1,80 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package helper + +import ( + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +type fakeT struct { + lastError error +} + +func (f *fakeT) Reset() { + f.lastError = nil +} + +func (f *fakeT) Errorf(msg string, args ...interface{}) { + f.lastError = fmt.Errorf(msg, args...) +} + +// AssertEventuallyEqual retries the 'actual' thunk every 10ms for a total of +// 300ms. If a single one succeeds, it returns, if all fails it eventually +// fails +func AssertEventuallyEqual(t *testing.T, expected interface{}, actualThunk func() interface{}, msg ...interface{}) { + t.Helper() + interval := 10 * time.Millisecond + timeout := 4000 * time.Millisecond + elapsed := 0 * time.Millisecond + fakeT := &fakeT{} + + for elapsed < timeout { + fakeT.Reset() + actual := actualThunk() + assert.Equal(fakeT, expected, actual, msg...) + + if fakeT.lastError == nil { + return + } + + time.Sleep(interval) + elapsed += interval + } + + t.Errorf("waiting for %s, but never succeeded:\n\n%s", elapsed, fakeT.lastError) +} + +func AssertEventuallyEqualWithFrequencyAndTimeout(t *testing.T, expected interface{}, actualThunk func() interface{}, + interval time.Duration, timeout time.Duration, msg ...interface{}, +) { + elapsed := 0 * time.Millisecond + fakeT := &fakeT{} + + for elapsed < timeout { + fakeT.Reset() + actual := actualThunk() + assert.Equal(fakeT, expected, actual, msg...) + + if fakeT.lastError == nil { + return + } + + time.Sleep(interval) + elapsed += interval + } + + t.Errorf("waiting for %s, but never succeeded:\n\n%s", elapsed, fakeT.lastError) +} diff --git a/platform/dbops/binaries/weaviate-src/test/helper/grpc_client.go b/platform/dbops/binaries/weaviate-src/test/helper/grpc_client.go new file mode 100644 index 0000000000000000000000000000000000000000..9b4f9a59f8c0e65a90ec9aa49e29c88ae315b5a7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/helper/grpc_client.go @@ -0,0 +1,44 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package helper + +import ( + "crypto/tls" + "fmt" + "strings" + + pb "github.com/weaviate/weaviate/grpc/generated/protocol/v1" + "google.golang.org/grpc" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/credentials/insecure" +) + +func CreateGrpcConnectionClient(host string) (*grpc.ClientConn, error) { + var opts []grpc.DialOption + if strings.HasSuffix(host, ":443") { + tlsConfig := &tls.Config{ + InsecureSkipVerify: true, + } + opts = append(opts, grpc.WithTransportCredentials(credentials.NewTLS(tlsConfig))) + } else { + opts = append(opts, grpc.WithTransportCredentials(insecure.NewCredentials())) + } + conn, err := grpc.NewClient(host, opts...) + if err != nil { + return nil, fmt.Errorf("failed to dial: %w", err) + } + return conn, nil +} + +func CreateGrpcWeaviateClient(conn *grpc.ClientConn) pb.WeaviateClient { + return pb.NewWeaviateClient(conn) +} diff --git a/platform/dbops/binaries/weaviate-src/test/helper/init.go b/platform/dbops/binaries/weaviate-src/test/helper/init.go new file mode 100644 index 0000000000000000000000000000000000000000..a674974dc178b769be8683768c0e6ef73ac601c0 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/helper/init.go @@ -0,0 +1,65 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package helper + +// This file contains the init() function for the helper package. +// In go, each package can have an init() function that runs whenever a package is "imported" in a program, before +// the main function runs. +// +// In our case, we use it to parse additional flags that are used to configure the helper to point to the right +// Weaviate instance, with the correct key and token. + +import ( + "fmt" + + "github.com/go-openapi/runtime" +) + +// Configuration flags provided by the user that runs an acceptance test. +var ( + ServerPort string + ServerHost string + ServerGRPCPort string + ServerGRPCHost string + ServerScheme string + DebugHTTP bool +) + +// Credentials for the root key +var RootAuth runtime.ClientAuthInfoWriterFunc + +func init() { + if ServerScheme == "" { + ServerScheme = "http" + } + + if ServerPort == "" { + ServerPort = "8080" + } + + RootAuth = nil +} + +func ResetClient() { + ServerScheme = "http" + ServerPort = "8080" + ServerGRPCPort = "" + RootAuth = nil +} + +func GetWeaviateURL() string { + return fmt.Sprintf("%s://%s:%s", ServerScheme, ServerHost, ServerPort) +} + +func GetWeaviateGRPCURL() string { + return fmt.Sprintf("%s:%s", ServerGRPCHost, ServerGRPCPort) +} diff --git a/platform/dbops/binaries/weaviate-src/test/helper/logger.go b/platform/dbops/binaries/weaviate-src/test/helper/logger.go new file mode 100644 index 0000000000000000000000000000000000000000..a07a81250fabb5f3ee44cb9fa05d44ff35adfadf --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/helper/logger.go @@ -0,0 +1,28 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package helper + +import "testing" + +// Internal struct to link the HTTP client logging of the Weaviate API client to the test's logging output. + +type testLogger struct { + t *testing.T +} + +func (tl *testLogger) Printf(format string, args ...interface{}) { + tl.t.Logf("HTTP LOG:\n"+format, args...) +} + +func (tl *testLogger) Debugf(format string, args ...interface{}) { + tl.t.Logf("HTTP DEBUG:\n"+format, args...) +} diff --git a/platform/dbops/binaries/weaviate-src/test/helper/objects.go b/platform/dbops/binaries/weaviate-src/test/helper/objects.go new file mode 100644 index 0000000000000000000000000000000000000000..e7cc882eb801c28b4879d4281732d5dcef775bf0 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/helper/objects.go @@ -0,0 +1,627 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package helper + +import ( + "context" + "strings" + "testing" + "time" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/modelsext" + + "github.com/weaviate/weaviate/client/batch" + "github.com/weaviate/weaviate/client/meta" + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/cluster/router/types" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema/crossref" + pb "github.com/weaviate/weaviate/grpc/generated/protocol/v1" +) + +func SetupClient(uri string) { + host, port := "", "" + res := strings.Split(uri, ":") + if len(res) == 2 { + host, port = res[0], res[1] + } + ServerHost = host + ServerPort = port +} + +func SetupGRPCClient(t *testing.T, uri string) { + host, port := "", "" + res := strings.Split(uri, ":") + if len(res) == 2 { + host, port = res[0], res[1] + } + ServerGRPCHost = host + ServerGRPCPort = port +} + +func CreateClass(t *testing.T, class *models.Class) { + t.Helper() + + // if the schema has mixed vectors, we have to create it in two steps as single step creation is forbidden + var capturedVectorConfig map[string]models.VectorConfig + if modelsext.ClassHasLegacyVectorIndex(class) && class.VectorConfig != nil { + capturedVectorConfig = class.VectorConfig + class.VectorConfig = nil + } + + params := schema.NewSchemaObjectsCreateParams().WithObjectClass(class) + resp, err := Client(t).Schema.SchemaObjectsCreate(params, nil) + AssertRequestOk(t, resp, err, nil) + + if capturedVectorConfig != nil { + class.VectorConfig = capturedVectorConfig + updateParams := schema.NewSchemaObjectsUpdateParams().WithClassName(class.Class).WithObjectClass(class) + updateResp, err := Client(t).Schema.SchemaObjectsUpdate(updateParams, nil) + AssertRequestOk(t, updateResp, err, nil) + } +} + +func CreateClassAuth(t *testing.T, class *models.Class, key string) { + t.Helper() + params := schema.NewSchemaObjectsCreateParams().WithObjectClass(class) + resp, err := Client(t).Schema.SchemaObjectsCreate(params, CreateAuth(key)) + AssertRequestOk(t, resp, err, nil) +} + +func GetClass(t *testing.T, class string) *models.Class { + t.Helper() + params := schema.NewSchemaObjectsGetParams().WithClassName(class) + resp, err := Client(t).Schema.SchemaObjectsGet(params, nil) + AssertRequestOk(t, resp, err, nil) + return resp.Payload +} + +func GetClassAuth(t *testing.T, class string, key string) *models.Class { + t.Helper() + params := schema.NewSchemaObjectsGetParams().WithClassName(class) + resp, err := Client(t).Schema.SchemaObjectsGet(params, CreateAuth(key)) + AssertRequestOk(t, resp, err, nil) + return resp.Payload +} + +func GetClassWithoutAssert(t *testing.T, class string) (*models.Class, error) { + t.Helper() + params := schema.NewSchemaObjectsGetParams().WithClassName(class) + resp, err := Client(t).Schema.SchemaObjectsGet(params, nil) + if err != nil { + return nil, err + } + return resp.Payload, nil +} + +func UpdateClass(t *testing.T, class *models.Class) { + t.Helper() + params := schema.NewSchemaObjectsUpdateParams(). + WithObjectClass(class).WithClassName(class.Class) + resp, err := Client(t).Schema.SchemaObjectsUpdate(params, nil) + AssertRequestOk(t, resp, err, nil) +} + +func CreateObject(t *testing.T, object *models.Object) error { + t.Helper() + params := objects.NewObjectsCreateParams().WithBody(object) + resp, err := Client(t).Objects.ObjectsCreate(params, nil) + AssertRequestOk(t, resp, err, nil) + return err +} + +func CreateObjectWithTimeout(t *testing.T, object *models.Object, timeout time.Duration) error { + t.Helper() + params := objects.NewObjectsCreateParamsWithTimeout(timeout).WithBody(object) + resp, err := Client(t).Objects.ObjectsCreate(params, nil) + AssertRequestOk(t, resp, err, nil) + return err +} + +func CreateObjectAuth(t *testing.T, object *models.Object, key string) error { + t.Helper() + params := objects.NewObjectsCreateParams().WithBody(object) + _, err := Client(t).Objects.ObjectsCreate(params, CreateAuth(key)) + return err +} + +func CreateObjectWithResponse(t *testing.T, object *models.Object) (*models.Object, error) { + t.Helper() + params := objects.NewObjectsCreateParams().WithBody(object) + resp, err := Client(t).Objects.ObjectsCreate(params, nil) + if err != nil { + return nil, err + } + return resp.Payload, nil +} + +func CreateObjectCL(t *testing.T, object *models.Object, cl types.ConsistencyLevel) error { + t.Helper() + cls := string(cl) + params := objects.NewObjectsCreateParams().WithBody(object).WithConsistencyLevel(&cls) + resp, err := Client(t).Objects.ObjectsCreate(params, nil) + AssertRequestOk(t, resp, err, nil) + return nil +} + +func CreateObjectsBatchWithResponse(t *testing.T, objects []*models.Object) []*models.ObjectsGetResponse { + t.Helper() + params := batch.NewBatchObjectsCreateParams(). + WithBody(batch.BatchObjectsCreateBody{ + Objects: objects, + }) + resp, err := Client(t).Batch.BatchObjectsCreate(params, nil) + AssertRequestOk(t, resp, err, nil) + CheckObjectsBatchResponse(t, resp.Payload, err) + return resp.Payload +} + +func CreateObjectsBatch(t *testing.T, objects []*models.Object) { + t.Helper() + CreateObjectsBatchWithResponse(t, objects) +} + +func CreateObjectsBatchAuth(t *testing.T, objects []*models.Object, key string) { + t.Helper() + params := batch.NewBatchObjectsCreateParams(). + WithBody(batch.BatchObjectsCreateBody{ + Objects: objects, + }) + resp, err := Client(t).Batch.BatchObjectsCreate(params, CreateAuth(key)) + AssertRequestOk(t, resp, err, nil) + CheckObjectsBatchResponse(t, resp.Payload, err) +} + +func CreateObjectsBatchCL(t *testing.T, objects []*models.Object, cl types.ConsistencyLevel) { + cls := string(cl) + params := batch.NewBatchObjectsCreateParams(). + WithBody(batch.BatchObjectsCreateBody{ + Objects: objects, + }).WithConsistencyLevel(&cls) + resp, err := Client(t).Batch.BatchObjectsCreate(params, nil) + AssertRequestOk(t, resp, err, nil) + CheckObjectsBatchResponse(t, resp.Payload, err) +} + +func CheckObjectsBatchResponse(t *testing.T, resp []*models.ObjectsGetResponse, err error) { + t.Helper() + AssertRequestOk(t, resp, err, nil) + for _, elem := range resp { + if !assert.Nil(t, elem.Result.Errors) { + t.Logf("expected nil, got: %v", + elem.Result.Errors.Error[0].Message) + } + } +} + +func UpdateObjectWithResponse(t *testing.T, object *models.Object) (*models.Object, error) { + t.Helper() + params := objects.NewObjectsUpdateParams().WithID(object.ID).WithBody(object) + resp, err := Client(t).Objects.ObjectsUpdate(params, nil) + AssertRequestOk(t, resp, err, nil) + if err != nil { + return nil, err + } + return resp.Payload, err +} + +func UpdateObject(t *testing.T, object *models.Object) error { + t.Helper() + _, err := UpdateObjectWithResponse(t, object) + return err +} + +func UpdateObjectCL(t *testing.T, object *models.Object, cl types.ConsistencyLevel) error { + t.Helper() + cls := string(cl) + params := objects.NewObjectsClassPutParams().WithClassName(object.Class). + WithID(object.ID).WithBody(object).WithConsistencyLevel(&cls) + resp, err := Client(t).Objects.ObjectsClassPut(params, nil) + AssertRequestOk(t, resp, err, nil) + return err +} + +func PatchObject(t *testing.T, object *models.Object) error { + t.Helper() + params := objects.NewObjectsPatchParams().WithID(object.ID).WithBody(object) + resp, err := Client(t).Objects.ObjectsPatch(params, nil) + AssertRequestOk(t, resp, err, nil) + return err +} + +func HeadObject(t *testing.T, id strfmt.UUID) error { + t.Helper() + params := objects.NewObjectsHeadParams().WithID(id) + resp, err := Client(t).Objects.ObjectsHead(params, nil) + AssertRequestOk(t, resp, err, nil) + return err +} + +func ValidateObject(t *testing.T, object *models.Object) error { + t.Helper() + params := objects.NewObjectsValidateParams().WithBody(object) + resp, err := Client(t).Objects.ObjectsValidate(params, nil) + AssertRequestOk(t, resp, err, nil) + return err +} + +func DeleteClass(t *testing.T, class string) { + t.Helper() + delParams := schema.NewSchemaObjectsDeleteParams().WithClassName(class) + delRes, err := Client(t).Schema.SchemaObjectsDelete(delParams, nil) + AssertRequestOk(t, delRes, err, nil) +} + +func DeleteClassWithAuthz(t *testing.T, class string, authInfo runtime.ClientAuthInfoWriter) { + t.Helper() + delParams := schema.NewSchemaObjectsDeleteParams().WithClassName(class) + delRes, err := Client(t).Schema.SchemaObjectsDelete(delParams, authInfo) + AssertRequestOk(t, delRes, err, nil) +} + +func DeleteClassAuth(t *testing.T, class string, key string) { + t.Helper() + delParams := schema.NewSchemaObjectsDeleteParams().WithClassName(class) + delRes, err := Client(t).Schema.SchemaObjectsDelete(delParams, CreateAuth(key)) + AssertRequestOk(t, delRes, err, nil) +} + +func DeleteObject(t *testing.T, object *models.Object) { + t.Helper() + params := objects.NewObjectsClassDeleteParams(). + WithClassName(object.Class).WithID(object.ID) + resp, err := Client(t).Objects.ObjectsClassDelete(params, nil) + AssertRequestOk(t, resp, err, nil) +} + +func DeleteObjectCL(t *testing.T, class string, id strfmt.UUID, cl types.ConsistencyLevel) { + cls := string(cl) + params := objects.NewObjectsClassDeleteParams(). + WithClassName(class).WithID(id).WithConsistencyLevel(&cls) + resp, err := Client(t).Objects.ObjectsClassDelete(params, nil) + AssertRequestOk(t, resp, err, nil) +} + +func DeleteObjectsBatchWithResponse(t *testing.T, body *models.BatchDelete, cl types.ConsistencyLevel) *models.BatchDeleteResponse { + t.Helper() + cls := string(cl) + params := batch.NewBatchObjectsDeleteParams().WithBody(body).WithConsistencyLevel(&cls) + resp, err := Client(t).Batch.BatchObjectsDelete(params, nil) + AssertRequestOk(t, resp, err, nil) + return resp.GetPayload() +} + +func DeleteObjectsBatch(t *testing.T, body *models.BatchDelete, cl types.ConsistencyLevel) { + t.Helper() + DeleteObjectsBatchWithResponse(t, body, cl) +} + +func DeleteTenantObjectsBatch(t *testing.T, body *models.BatchDelete, + tenant string, +) (*models.BatchDeleteResponse, error) { + t.Helper() + params := batch.NewBatchObjectsDeleteParams(). + WithBody(body).WithTenant(&tenant) + resp, err := Client(t).Batch.BatchObjectsDelete(params, nil) + if err != nil { + return nil, err + } + return resp.Payload, nil +} + +func DeleteTenantObjectsBatchCL(t *testing.T, body *models.BatchDelete, + tenant string, cl types.ConsistencyLevel, +) (*models.BatchDeleteResponse, error) { + cls := string(cl) + params := batch.NewBatchObjectsDeleteParams(). + WithBody(body).WithTenant(&tenant).WithConsistencyLevel(&cls) + resp, err := Client(t).Batch.BatchObjectsDelete(params, nil) + if err != nil { + return nil, err + } + return resp.Payload, nil +} + +func AddReferences(t *testing.T, refs []*models.BatchReference) ([]*models.BatchReferenceResponse, error) { + t.Helper() + params := batch.NewBatchReferencesCreateParams().WithBody(refs) + resp, err := Client(t).Batch.BatchReferencesCreate(params, nil) + if err != nil { + return nil, err + } + return resp.Payload, nil +} + +func CheckReferencesBatchResponse(t *testing.T, resp []*models.BatchReferenceResponse, err error) { + t.Helper() + AssertRequestOk(t, resp, err, nil) + for _, elem := range resp { + if !assert.Nil(t, elem.Result.Errors) { + t.Logf("expected nil, got: %v", elem.Result.Errors.Error[0].Message) + } + } +} + +func AddReference(t *testing.T, object *models.Object, ref *models.SingleRef, prop string) { + t.Helper() + params := objects.NewObjectsClassReferencesCreateParams(). + WithClassName(object.Class).WithID(object.ID).WithBody(ref).WithPropertyName(prop) + resp, err := Client(t).Objects.ObjectsClassReferencesCreate(params, nil) + AssertRequestOk(t, resp, err, nil) +} + +func AddReferenceReturn(t *testing.T, ref *models.SingleRef, id strfmt.UUID, class, prop, tenant string, auth runtime.ClientAuthInfoWriter) (*objects.ObjectsClassReferencesCreateOK, error) { + t.Helper() + params := objects.NewObjectsClassReferencesCreateParams(). + WithClassName(class).WithID(id).WithBody(ref).WithPropertyName(prop) + if tenant != "" { + params.WithTenant(&tenant) + } + return Client(t).Objects.ObjectsClassReferencesCreate(params, auth) +} + +func ReplaceReferencesReturn(t *testing.T, refs []*models.SingleRef, id strfmt.UUID, class, prop, tenant string, auth runtime.ClientAuthInfoWriter) (*objects.ObjectsClassReferencesPutOK, error) { + t.Helper() + params := objects.NewObjectsClassReferencesPutParams(). + WithClassName(class).WithID(id).WithBody(refs).WithPropertyName(prop) + if tenant != "" { + params.WithTenant(&tenant) + } + return Client(t).Objects.ObjectsClassReferencesPut(params, auth) +} + +func DeleteReferenceReturn(t *testing.T, ref *models.SingleRef, id strfmt.UUID, class, prop, tenant string, auth runtime.ClientAuthInfoWriter) (*objects.ObjectsClassReferencesDeleteNoContent, error) { + t.Helper() + params := objects.NewObjectsClassReferencesDeleteParams(). + WithClassName(class).WithID(id).WithBody(ref).WithPropertyName(prop) + if tenant != "" { + params.WithTenant(&tenant) + } + return Client(t).Objects.ObjectsClassReferencesDelete(params, auth) +} + +func AddReferenceTenant(t *testing.T, object *models.Object, ref *models.SingleRef, prop string, tenant string) { + t.Helper() + params := objects.NewObjectsClassReferencesCreateParams(). + WithClassName(object.Class).WithID(object.ID).WithBody(ref).WithPropertyName(prop).WithTenant(&tenant) + resp, err := Client(t).Objects.ObjectsClassReferencesCreate(params, nil) + AssertRequestOk(t, resp, err, nil) +} + +func DeleteReference(t *testing.T, object *models.Object, ref *models.SingleRef, prop string) { + t.Helper() + params := objects.NewObjectsClassReferencesDeleteParams(). + WithClassName(object.Class).WithID(object.ID).WithBody(ref).WithPropertyName(prop) + resp, err := Client(t).Objects.ObjectsClassReferencesDelete(params, nil) + AssertRequestOk(t, resp, err, nil) +} + +func DeleteReferenceTenant(t *testing.T, object *models.Object, ref *models.SingleRef, prop string, tenant string) { + t.Helper() + params := objects.NewObjectsClassReferencesDeleteParams(). + WithClassName(object.Class).WithID(object.ID).WithBody(ref).WithPropertyName(prop).WithTenant(&tenant) + resp, err := Client(t).Objects.ObjectsClassReferencesDelete(params, nil) + AssertRequestOk(t, resp, err, nil) +} + +func UpdateReferenceTenant(t *testing.T, object *models.Object, ref models.MultipleRef, prop string, tenant string) { + t.Helper() + params := objects.NewObjectsClassReferencesPutParams(). + WithClassName(object.Class).WithID(object.ID).WithBody(ref).WithPropertyName(prop).WithTenant(&tenant) + resp, err := Client(t).Objects.ObjectsClassReferencesPut(params, nil) + AssertRequestOk(t, resp, err, nil) +} + +func CreateTenants(t *testing.T, class string, tenants []*models.Tenant) { + t.Helper() + params := schema.NewTenantsCreateParams().WithClassName(class).WithBody(tenants) + resp, err := Client(t).Schema.TenantsCreate(params, nil) + AssertRequestOk(t, resp, err, nil) +} + +func CreateTenantsAuth(t *testing.T, class string, tenants []*models.Tenant, key string) { + t.Helper() + params := schema.NewTenantsCreateParams().WithClassName(class).WithBody(tenants) + resp, err := Client(t).Schema.TenantsCreate(params, CreateAuth(key)) + AssertRequestOk(t, resp, err, nil) +} + +func UpdateTenants(t *testing.T, class string, tenants []*models.Tenant) { + t.Helper() + params := schema.NewTenantsUpdateParams().WithClassName(class).WithBody(tenants) + resp, err := Client(t).Schema.TenantsUpdate(params, nil) + AssertRequestOk(t, resp, err, nil) +} + +func UpdateTenantsWithAuthz(t *testing.T, class string, tenants []*models.Tenant, authInfo runtime.ClientAuthInfoWriter) { + t.Helper() + params := schema.NewTenantsUpdateParams().WithClassName(class).WithBody(tenants) + resp, err := Client(t).Schema.TenantsUpdate(params, authInfo) + AssertRequestOk(t, resp, err, nil) +} + +func CreateTenantsReturnError(t *testing.T, class string, tenants []*models.Tenant) error { + t.Helper() + params := schema.NewTenantsCreateParams().WithClassName(class).WithBody(tenants) + _, err := Client(t).Schema.TenantsCreate(params, nil) + return err +} + +func UpdateTenantsReturnError(t *testing.T, class string, tenants []*models.Tenant) error { + t.Helper() + params := schema.NewTenantsUpdateParams().WithClassName(class).WithBody(tenants) + _, err := Client(t).Schema.TenantsUpdate(params, nil) + return err +} + +func GetTenants(t *testing.T, class string) (*schema.TenantsGetOK, error) { + t.Helper() + params := schema.NewTenantsGetParams().WithClassName(class) + resp, err := Client(t).Schema.TenantsGet(params, nil) + return resp, err +} + +func GetTenantsWithAuthz(t *testing.T, class string, authInfo runtime.ClientAuthInfoWriter) (*schema.TenantsGetOK, error) { + t.Helper() + params := schema.NewTenantsGetParams().WithClassName(class) + resp, err := Client(t).Schema.TenantsGet(params, authInfo) + return resp, err +} + +func GetOneTenant(t *testing.T, class, tenant string) (*schema.TenantsGetOneOK, error) { + t.Helper() + params := schema.NewTenantsGetOneParams().WithClassName(class).WithTenantName(tenant) + resp, err := Client(t).Schema.TenantsGetOne(params, nil) + return resp, err +} + +func GetTenantsGRPC(t *testing.T, class string) (*pb.TenantsGetReply, error) { + t.Helper() + return ClientGRPC(t).TenantsGet(context.TODO(), &pb.TenantsGetRequest{Collection: class}) +} + +func TenantExists(t *testing.T, class string, tenant string) (*schema.TenantExistsOK, error) { + params := schema.NewTenantExistsParams().WithClassName(class).WithTenantName(tenant) + resp, err := Client(t).Schema.TenantExists(params, nil) + return resp, err +} + +func DeleteTenants(t *testing.T, class string, tenants []string) error { + t.Helper() + params := schema.NewTenantsDeleteParams().WithClassName(class).WithTenants(tenants) + _, err := Client(t).Schema.TenantsDelete(params, nil) + return err +} + +func DeleteTenantsWithContext(t *testing.T, ctx context.Context, class string, tenants []string) error { + t.Helper() + params := schema.NewTenantsDeleteParams().WithContext(ctx).WithClassName(class).WithTenants(tenants) + _, err := Client(t).Schema.TenantsDelete(params, nil) + return err +} + +func NewBeacon(className string, id strfmt.UUID) strfmt.URI { + return crossref.New("localhost", className, id).SingleRef().Beacon +} + +func GetMeta(t *testing.T) *models.Meta { + t.Helper() + params := meta.NewMetaGetParams() + resp, err := Client(t).Meta.MetaGet(params, nil) + AssertRequestOk(t, resp, err, nil) + return resp.Payload +} + +func ObjectContentsProp(contents string) map[string]interface{} { + props := map[string]interface{}{} + props["contents"] = contents + return props +} + +func CreateAlias(t *testing.T, alias *models.Alias) { + CreateAliasWithAuthz(t, alias, nil) +} + +func CreateAliasWithReturn(t *testing.T, alias *models.Alias) (*schema.AliasesCreateOK, error) { + t.Helper() + params := schema.NewAliasesCreateParams().WithBody(alias) + resp, err := Client(t).Schema.AliasesCreate(params, nil) + return resp, err +} + +func CreateAliasWithAuthz(t *testing.T, alias *models.Alias, authInfo runtime.ClientAuthInfoWriter) { + t.Helper() + params := schema.NewAliasesCreateParams().WithBody(alias) + resp, err := Client(t).Schema.AliasesCreate(params, authInfo) + AssertRequestOk(t, resp, err, nil) +} + +func CreateAliasAuth(t *testing.T, alias *models.Alias, key string) { + t.Helper() + CreateAliasWithAuthz(t, alias, CreateAuth(key)) +} + +func GetAliases(t *testing.T, className *string) *models.AliasResponse { + return GetAliasesWithAuthz(t, className, nil) +} + +func GetAliasesWithAuthz(t *testing.T, className *string, authInfo runtime.ClientAuthInfoWriter) *models.AliasResponse { + t.Helper() + params := schema.NewAliasesGetParams().WithClass(className) + resp, err := Client(t).Schema.AliasesGet(params, authInfo) + AssertRequestOk(t, resp, err, nil) + return resp.GetPayload() +} + +func GetAlias(t *testing.T, aliasName string) *models.Alias { + return GetAliasWithAuthz(t, aliasName, nil) +} + +func GetAliasNotFound(t *testing.T, aliasName string) *models.Alias { + return GetAliasWithAuthzNotFound(t, aliasName, nil) +} + +func GetAliasWithAuthz(t *testing.T, aliasName string, authInfo runtime.ClientAuthInfoWriter) *models.Alias { + t.Helper() + params := schema.NewAliasesGetAliasParams().WithAliasName(aliasName) + resp, err := Client(t).Schema.AliasesGetAlias(params, authInfo) + AssertRequestOk(t, resp, err, nil) + return resp.GetPayload() +} + +func GetAliasWithAuthzNotFound(t *testing.T, aliasName string, authInfo runtime.ClientAuthInfoWriter) *models.Alias { + t.Helper() + params := schema.NewAliasesGetAliasParams().WithAliasName(aliasName) + resp, err := Client(t).Schema.AliasesGetAlias(params, authInfo) + AssertRequestFail(t, resp, err, nil) + return nil +} + +func UpdateAlias(t *testing.T, aliasName, targetClassName string) { + UpdateAliasWithAuthz(t, aliasName, targetClassName, nil) +} + +func UpdateAliasWithReturn(t *testing.T, aliasName, targetClassName string) (*schema.AliasesUpdateOK, error) { + t.Helper() + params := schema.NewAliasesUpdateParams().WithAliasName(aliasName).WithBody(schema.AliasesUpdateBody{Class: targetClassName}) + resp, err := Client(t).Schema.AliasesUpdate(params, nil) + return resp, err +} + +func UpdateAliasWithAuthz(t *testing.T, aliasName, targetClassName string, authInfo runtime.ClientAuthInfoWriter) { + t.Helper() + params := schema.NewAliasesUpdateParams().WithAliasName(aliasName).WithBody(schema.AliasesUpdateBody{Class: targetClassName}) + resp, err := Client(t).Schema.AliasesUpdate(params, authInfo) + AssertRequestOk(t, resp, err, nil) +} + +func DeleteAlias(t *testing.T, aliasName string) { + DeleteAliasWithAuthz(t, aliasName, nil) +} + +func DeleteAliasWithReturn(t *testing.T, aliasName string) (*schema.AliasesDeleteNoContent, error) { + t.Helper() + params := schema.NewAliasesDeleteParams().WithAliasName(aliasName) + resp, err := Client(t).Schema.AliasesDelete(params, nil) + return resp, err +} + +func DeleteAliasWithAuthz(t *testing.T, aliasName string, authInfo runtime.ClientAuthInfoWriter) { + t.Helper() + params := schema.NewAliasesDeleteParams().WithAliasName(aliasName) + resp, err := Client(t).Schema.AliasesDelete(params, authInfo) + AssertRequestOk(t, resp, err, nil) +} diff --git a/platform/dbops/binaries/weaviate-src/test/helper/objects_assertions.go b/platform/dbops/binaries/weaviate-src/test/helper/objects_assertions.go new file mode 100644 index 0000000000000000000000000000000000000000..66604819f7f61be16e65079a6dea0a143089d0da --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/helper/objects_assertions.go @@ -0,0 +1,264 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package helper + +import ( + "errors" + "net/http" + "testing" + + "github.com/go-openapi/strfmt" + "github.com/weaviate/weaviate/client/objects" + "github.com/weaviate/weaviate/client/schema" + "github.com/weaviate/weaviate/cluster/router/types" + "github.com/weaviate/weaviate/entities/models" +) + +func AssertCreateObject(t *testing.T, className string, schema map[string]interface{}) strfmt.UUID { + t.Helper() + params := objects.NewObjectsCreateParams().WithBody( + &models.Object{ + Class: className, + Properties: schema, + }) + + resp, err := Client(t).Objects.ObjectsCreate(params, nil) + + var objectID strfmt.UUID + + // Ensure that the response is OK + AssertRequestOk(t, resp, err, func() { + objectID = resp.Payload.ID + }) + + return objectID +} + +func AssertGetObject(t *testing.T, class string, uuid strfmt.UUID, include ...string) *models.Object { + t.Helper() + obj, err := GetObject(t, class, uuid, include...) + AssertRequestOk(t, obj, err, nil) + return obj +} + +func AssertGetObjectEventually(t *testing.T, class string, uuid strfmt.UUID) *models.Object { + var ( + resp *objects.ObjectsClassGetOK + err error + ) + + checkThunk := func() interface{} { + resp, err = Client(t).Objects.ObjectsClassGet(objects.NewObjectsClassGetParams().WithClassName(class).WithID(uuid), nil) + return err == nil + } + + AssertEventuallyEqual(t, true, checkThunk) + + var object *models.Object + + AssertRequestOk(t, resp, err, func() { + object = resp.Payload + }) + + return object +} + +func AssertGetObjectFailsEventually(t *testing.T, class string, uuid strfmt.UUID) error { + var err error + + checkThunk := func() interface{} { + _, err = Client(t).Objects.ObjectsClassGet(objects.NewObjectsClassGetParams().WithClassName(class).WithID(uuid), nil) + return err != nil + } + + AssertEventuallyEqual(t, true, checkThunk) + + return err +} + +func AssertCreateObjectClass(t *testing.T, class *models.Class) { + t.Helper() + params := schema.NewSchemaObjectsCreateParams().WithObjectClass(class) + resp, err := Client(t).Schema.SchemaObjectsCreate(params, nil) + AssertRequestOk(t, resp, err, nil) +} + +func AssertDeleteObjectClass(t *testing.T, class string) { + delRes, err := DeleteClassObject(t, class) + AssertRequestOk(t, delRes, err, nil) +} + +func GetObject(t *testing.T, class string, uuid strfmt.UUID, include ...string) (*models.Object, error) { + req := objects.NewObjectsClassGetParams().WithID(uuid) + if class != "" { + req.WithClassName(class) + } + if len(include) > 0 { + req.WithInclude(&include[0]) + } + getResp, err := Client(t).Objects.ObjectsClassGet(req, nil) + if err != nil { + return nil, err + } + return getResp.Payload, nil +} + +func AssertCreateObjectTenantVector(t *testing.T, className string, schema map[string]interface{}, tenant string, vector []float32) strfmt.UUID { + t.Helper() + params := objects.NewObjectsCreateParams().WithBody( + &models.Object{ + Class: className, + Properties: schema, + Tenant: tenant, + Vector: vector, + }) + + resp, err := Client(t).Objects.ObjectsCreate(params, nil) + + var objectID strfmt.UUID + + AssertRequestOk(t, resp, err, func() { + objectID = resp.Payload.ID + }) + + return objectID +} + +func TenantObject(t *testing.T, class string, id strfmt.UUID, tenant string) (*models.Object, error) { + req := objects.NewObjectsClassGetParams(). + WithClassName(class).WithID(id).WithTenant(&tenant) + getResp, err := Client(t).Objects.ObjectsClassGet(req, nil) + if err != nil { + return nil, err + } + return getResp.Payload, nil +} + +func TenantObjectWithInclude(t *testing.T, class string, id strfmt.UUID, tenant string, includes string) (*models.Object, error) { + req := objects.NewObjectsClassGetParams(). + WithClassName(class).WithID(id).WithTenant(&tenant).WithInclude(&includes) + getResp, err := Client(t).Objects.ObjectsClassGet(req, nil) + if err != nil { + return nil, err + } + return getResp.Payload, nil +} + +func GetObjectCL(t *testing.T, class string, uuid strfmt.UUID, + cl types.ConsistencyLevel, include ...string, +) (*models.Object, error) { + req := objects.NewObjectsClassGetParams().WithID(uuid) + if class != "" { + req.WithClassName(class) + } + if len(include) > 0 { + req.WithInclude(&include[0]) + } + cls := string(cl) + req.ConsistencyLevel = &cls + getResp, err := Client(t).Objects.ObjectsClassGet(req, nil) + if err != nil { + return nil, err + } + return getResp.Payload, nil +} + +func ObjectExistsCL(t *testing.T, class string, id strfmt.UUID, cl types.ConsistencyLevel) (bool, error) { + cls := string(cl) + req := objects.NewObjectsClassHeadParams(). + WithClassName(class).WithID(id).WithConsistencyLevel(&cls) + resp, err := Client(t).Objects.ObjectsClassHead(req, nil) + notFoundErr := objects.NewObjectsClassHeadNotFound() + if errors.As(err, ¬FoundErr) { + return false, nil + } + if err != nil { + return false, err + } + return resp.IsCode(http.StatusNoContent), nil +} + +func TenantObjectExists(t *testing.T, class string, id strfmt.UUID, tenant string) (bool, error) { + req := objects.NewObjectsClassHeadParams(). + WithClassName(class).WithID(id).WithTenant(&tenant) + resp, err := Client(t).Objects.ObjectsClassHead(req, nil) + if err != nil { + return false, err + } + return resp.IsCode(http.StatusNoContent), nil +} + +func GetObjectFromNode(t *testing.T, class string, uuid strfmt.UUID, nodename string) (*models.Object, error) { + req := objects.NewObjectsClassGetParams().WithID(uuid) + if class != "" { + req.WithClassName(class) + } + if nodename != "" { + req.WithNodeName(&nodename) + } + getResp, err := Client(t).Objects.ObjectsClassGet(req, nil) + if err != nil { + return nil, err + } + return getResp.Payload, nil +} + +func GetTenantObjectFromNode(t *testing.T, class string, uuid strfmt.UUID, nodename, tenant string) (*models.Object, error) { + req := objects.NewObjectsClassGetParams().WithID(uuid). + WithClassName(class). + WithNodeName(&nodename). + WithTenant(&tenant) + getResp, err := Client(t).Objects.ObjectsClassGet(req, nil) + if err != nil { + return nil, err + } + return getResp.Payload, nil +} + +func DeleteClassObject(t *testing.T, class string) (*schema.SchemaObjectsDeleteOK, error) { + delParams := schema.NewSchemaObjectsDeleteParams().WithClassName(class) + return Client(t).Schema.SchemaObjectsDelete(delParams, nil) +} + +func DeleteTenantObject(t *testing.T, class string, id strfmt.UUID, tenant string, cl types.ConsistencyLevel) { + cls := string(cl) + params := objects.NewObjectsClassDeleteParams(). + WithClassName(class).WithID(id).WithTenant(&tenant).WithConsistencyLevel(&cls) + resp, err := Client(t).Objects.ObjectsClassDelete(params, nil) + AssertRequestOk(t, resp, err, nil) +} + +func ListObjects(t *testing.T, class string) (*models.ObjectsListResponse, error) { + params := objects.NewObjectsListParams() + if class != "" { + params.WithClass(&class) + } + + resp, err := Client(t).Objects.ObjectsList(params, nil) + if err != nil { + return nil, err + } + return resp.Payload, nil +} + +func TenantListObjects(t *testing.T, class string, tenant string) (*models.ObjectsListResponse, error) { + params := objects.NewObjectsListParams().WithTenant(&tenant) + if class != "" { + params.WithClass(&class) + } + + resp, err := Client(t).Objects.ObjectsList(params, nil) + if err != nil { + return nil, err + } + return resp.Payload, nil +} diff --git a/platform/dbops/binaries/weaviate-src/test/helper/operations_client.go b/platform/dbops/binaries/weaviate-src/test/helper/operations_client.go new file mode 100644 index 0000000000000000000000000000000000000000..9773c838253bcdd9798c52e40ebec81f659c9dd3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/helper/operations_client.go @@ -0,0 +1,72 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +// This file contains the Client(t *testing.T) function, that can be used to construct a client that talks to +// the Weaviate server that is configured using command line arguments (see init.go). +// +// We pass in the test (*testing.T), to be able to log HTTP traffic to that specific test case. +// This allows us to get detailed logs of the performed HTTP requests if a acceptance test fails. + +// The CreateAuth returns a function that attaches the key and token headers to each HTTP call. + +// Example: +// func TestSomething(t *testing.T) { +// // Use specific key & token +// auth := helper.CreateAuth(key, token) +// helper.Client(t).SomeScope.SomeOperation(&someParams, auth) +// +// // Use root key & token +// helper.Client(t).SomeScope.SomeOperation(&someParams, helper.RootAuth) +// } + +package helper + +import ( + "fmt" + "testing" + + httptransport "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" + "github.com/weaviate/weaviate/client/batch" + operations_apiclient "github.com/weaviate/weaviate/client/operations" +) + +// Create a client that logs with t.Logf, if a *testing.T is provided. +// If there is no test case at hand, pass in nil to disable logging. +func OperationsClient(t *testing.T) operations_apiclient.ClientService { + transport := httptransport.New(fmt.Sprintf("%s:%s", ServerHost, ServerPort), "/v1", []string{ServerScheme}) + + // If a test case is provided, and we want to dump HTTP traffic, + // create a simple logger that logs HTTP traffic to the test case. + if t != nil && DebugHTTP { + transport.SetDebug(true) + transport.SetLogger(&testLogger{t: t}) + } + + client := operations_apiclient.New(transport, strfmt.Default) + return client +} + +// Create a client that logs with t.Logf, if a *testing.T is provided. +// If there is no test case at hand, pass in nil to disable logging. +func BatchClient(t *testing.T) batch.ClientService { + transport := httptransport.New(fmt.Sprintf("%s:%s", ServerHost, ServerPort), "/v1", []string{ServerScheme}) + + // If a test case is provided, and we want to dump HTTP traffic, + // create a simple logger that logs HTTP traffic to the test case. + if t != nil && DebugHTTP { + transport.SetDebug(true) + transport.SetLogger(&testLogger{t: t}) + } + + client := batch.New(transport, strfmt.Default) + return client +} diff --git a/platform/dbops/binaries/weaviate-src/test/helper/race_off.go b/platform/dbops/binaries/weaviate-src/test/helper/race_off.go new file mode 100644 index 0000000000000000000000000000000000000000..6ca6b02ca45e7d395881b0133382ecfedc12bf98 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/helper/race_off.go @@ -0,0 +1,17 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +//go:build !race +// +build !race + +package helper + +var RaceDetectorEnabled = false diff --git a/platform/dbops/binaries/weaviate-src/test/helper/race_on.go b/platform/dbops/binaries/weaviate-src/test/helper/race_on.go new file mode 100644 index 0000000000000000000000000000000000000000..a7f71da8bd25d2b4a17cf614f4e27552200c54a7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/helper/race_on.go @@ -0,0 +1,17 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +//go:build race +// +build race + +package helper + +var RaceDetectorEnabled = true diff --git a/platform/dbops/binaries/weaviate-src/test/helper/rbac.go b/platform/dbops/binaries/weaviate-src/test/helper/rbac.go new file mode 100644 index 0000000000000000000000000000000000000000..a3fb87871db63c04b9dc526eead752fb56f58da3 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/helper/rbac.go @@ -0,0 +1,500 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package helper + +import ( + "errors" + "testing" + "time" + + "github.com/go-openapi/strfmt" + + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/client/authz" + "github.com/weaviate/weaviate/client/users" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func CreateRole(t *testing.T, key string, role *models.Role) { + t.Helper() + resp, err := Client(t).Authz.CreateRole(authz.NewCreateRoleParams().WithBody(role), CreateAuth(key)) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) +} + +func GetRoles(t *testing.T, key string) []*models.Role { + resp, err := Client(t).Authz.GetRoles(authz.NewGetRolesParams(), CreateAuth(key)) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) + return resp.Payload +} + +func GetRolesForUser(t *testing.T, user, key string, includeRoles bool) []*models.Role { + t.Helper() + userType := models.UserTypeInputDb + resp, err := Client(t).Authz.GetRolesForUser(authz.NewGetRolesForUserParams().WithID(user).WithUserType(string(userType)).WithIncludeFullRoles(&includeRoles), CreateAuth(key)) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) + return resp.Payload +} + +func GetRolesForUserOIDC(t *testing.T, user, key string) []*models.Role { + t.Helper() + truep := true + userType := models.UserTypeInputOidc + resp, err := Client(t).Authz.GetRolesForUser(authz.NewGetRolesForUserParams().WithID(user).WithUserType(string(userType)).WithIncludeFullRoles(&truep), CreateAuth(key)) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) + return resp.Payload +} + +func GetUserForRoles(t *testing.T, roleName, key string) []string { + t.Helper() + resp, err := Client(t).Authz.GetUsersForRole(authz.NewGetUsersForRoleParams().WithID(roleName), CreateAuth(key)) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) + userIds := make([]string, 0, len(resp.Payload)) + for _, user := range resp.Payload { + if *user.UserType == models.UserTypeOutputOidc { + continue + } + userIds = append(userIds, user.UserID) + } + return userIds +} + +func GetUserForRolesBoth(t *testing.T, roleName, key string) []*authz.GetUsersForRoleOKBodyItems0 { + t.Helper() + resp, err := Client(t).Authz.GetUsersForRole(authz.NewGetUsersForRoleParams().WithID(roleName), CreateAuth(key)) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) + return resp.Payload +} + +func GetInfoForOwnUser(t *testing.T, key string) *models.UserOwnInfo { + t.Helper() + resp, err := Client(t).Users.GetOwnInfo(users.NewGetOwnInfoParams(), CreateAuth(key)) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) + return resp.Payload +} + +func DeleteUser(t *testing.T, userId, key string) { + t.Helper() + resp, err := Client(t).Users.DeleteUser(users.NewDeleteUserParams().WithUserID(userId), CreateAuth(key)) + if err != nil { + var parsed *users.DeleteUserNotFound + require.True(t, errors.As(err, &parsed)) + } else { + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) + } +} + +func GetUser(t *testing.T, userId, key string) *models.DBUserInfo { + t.Helper() + resp, err := Client(t).Users.GetUserInfo(users.NewGetUserInfoParams().WithUserID(userId), CreateAuth(key)) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) + require.NotNil(t, resp.Payload) + return resp.Payload +} + +func GetUserWithLastUsedTime(t *testing.T, userId, key string, lastUsedTime bool) *models.DBUserInfo { + t.Helper() + resp, err := Client(t).Users.GetUserInfo(users.NewGetUserInfoParams().WithUserID(userId).WithIncludeLastUsedTime(&lastUsedTime), CreateAuth(key)) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) + require.NotNil(t, resp.Payload) + return resp.Payload +} + +func CreateUser(t *testing.T, userId, key string) string { + t.Helper() + resp, err := Client(t).Users.CreateUser(users.NewCreateUserParams().WithUserID(userId), CreateAuth(key)) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.Payload) + require.NotNil(t, resp.Payload.Apikey) + return *resp.Payload.Apikey +} + +func CreateUserWithApiKey(t *testing.T, userId, key string, createdAt *time.Time) string { + t.Helper() + tp := true + if createdAt == nil { + createdAt = &time.Time{} + } + + resp, err := Client(t).Users.CreateUser(users.NewCreateUserParams().WithUserID(userId).WithBody(users.CreateUserBody{Import: &tp, CreateTime: strfmt.DateTime(*createdAt)}), CreateAuth(key)) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.Payload) + require.NotNil(t, resp.Payload.Apikey) + return *resp.Payload.Apikey +} + +func RotateKey(t *testing.T, userId, key string) string { + t.Helper() + resp, err := Client(t).Users.RotateUserAPIKey(users.NewRotateUserAPIKeyParams().WithUserID(userId), CreateAuth(key)) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) + require.NotNil(t, resp) + require.NotNil(t, resp.Payload) + require.NotNil(t, resp.Payload.Apikey) + return *resp.Payload.Apikey +} + +func DeactivateUser(t *testing.T, key, userId string, revokeKey bool) { + t.Helper() + resp, err := Client(t).Users.DeactivateUser(users.NewDeactivateUserParams().WithUserID(userId).WithBody(users.DeactivateUserBody{RevokeKey: &revokeKey}), CreateAuth(key)) + AssertRequestOk(t, resp, err, nil) + require.NoError(t, err) +} + +func ActivateUser(t *testing.T, key, userId string) { + t.Helper() + resp, err := Client(t).Users.ActivateUser(users.NewActivateUserParams().WithUserID(userId), CreateAuth(key)) + AssertRequestOk(t, resp, err, nil) + require.NoError(t, err) +} + +func ListAllUsers(t *testing.T, key string) []*models.DBUserInfo { + t.Helper() + resp, err := Client(t).Users.ListAllUsers(users.NewListAllUsersParams(), CreateAuth(key)) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) + return resp.Payload +} + +func ListAllUsersWithIncludeTime(t *testing.T, key string, includeLastUsedTime bool) []*models.DBUserInfo { + t.Helper() + resp, err := Client(t).Users.ListAllUsers(users.NewListAllUsersParams().WithIncludeLastUsedTime(&includeLastUsedTime), CreateAuth(key)) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) + return resp.Payload +} + +func DeleteRole(t *testing.T, key, role string) { + t.Helper() + resp, err := Client(t).Authz.DeleteRole(authz.NewDeleteRoleParams().WithID(role), CreateAuth(key)) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) +} + +func GetRoleByName(t *testing.T, key, role string) *models.Role { + t.Helper() + resp, err := Client(t).Authz.GetRole(authz.NewGetRoleParams().WithID(role), CreateAuth(key)) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) + require.NotNil(t, resp.Payload) + return resp.Payload +} + +func AssignRoleToUser(t *testing.T, key, role, user string) { + t.Helper() + userType := models.UserTypeInputDb + resp, err := Client(t).Authz.AssignRoleToUser( + authz.NewAssignRoleToUserParams().WithID(user).WithBody(authz.AssignRoleToUserBody{Roles: []string{role}, UserType: userType}), + CreateAuth(key), + ) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) +} + +func AssignRoleToUserOIDC(t *testing.T, key, role, user string) { + t.Helper() + userType := models.UserTypeInputOidc + resp, err := Client(t).Authz.AssignRoleToUser( + authz.NewAssignRoleToUserParams().WithID(user).WithBody(authz.AssignRoleToUserBody{Roles: []string{role}, UserType: userType}), + CreateAuth(key), + ) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) +} + +func RevokeRoleFromUser(t *testing.T, key, role, user string) { + userType := models.UserTypeInputDb + + resp, err := Client(t).Authz.RevokeRoleFromUser( + authz.NewRevokeRoleFromUserParams().WithID(user).WithBody(authz.RevokeRoleFromUserBody{Roles: []string{role}, UserType: userType}), + CreateAuth(key), + ) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) +} + +func AssignRoleToGroup(t *testing.T, key, role, group string) { + t.Helper() + resp, err := Client(t).Authz.AssignRoleToGroup( + authz.NewAssignRoleToGroupParams().WithID(group).WithBody(authz.AssignRoleToGroupBody{Roles: []string{role}, GroupType: models.GroupTypeOidc}), + CreateAuth(key), + ) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) +} + +func RevokeRoleFromGroup(t *testing.T, key, role, group string) { + t.Helper() + resp, err := Client(t).Authz.RevokeRoleFromGroup( + authz.NewRevokeRoleFromGroupParams().WithID(group).WithBody(authz.RevokeRoleFromGroupBody{Roles: []string{role}, GroupType: models.GroupTypeOidc}), + CreateAuth(key), + ) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) +} + +func GetRolesForGroup(t *testing.T, key, group string, includeRoles bool) []*models.Role { + includeRolesP := &includeRoles + resp, err := Client(t).Authz.GetRolesForGroup( + authz.NewGetRolesForGroupParams().WithID(group).WithGroupType(string(models.GroupTypeOidc)).WithIncludeFullRoles(includeRolesP), + CreateAuth(key), + ) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) + return resp.Payload +} + +func GetKnownGroups(t *testing.T, key string) []string { + resp, err := Client(t).Authz.GetGroups( + authz.NewGetGroupsParams().WithGroupType(string(models.GroupTypeOidc)), CreateAuth(key), + ) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) + return resp.Payload +} + +func GetGroupsForRole(t *testing.T, key, roleName string) []string { + resp, err := Client(t).Authz.GetGroupsForRole( + authz.NewGetGroupsForRoleParams().WithID(roleName), + CreateAuth(key), + ) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) + + // there are only OIDC groups right now + groupNames := make([]string, 0) + for _, group := range resp.Payload { + groupNames = append(groupNames, group.GroupID) + } + return groupNames +} + +func AddPermissions(t *testing.T, key, role string, permissions ...*models.Permission) { + resp, err := Client(t).Authz.AddPermissions( + authz.NewAddPermissionsParams().WithID(role).WithBody(authz.AddPermissionsBody{ + Permissions: permissions, + }), + CreateAuth(key), + ) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) +} + +func RemovePermissions(t *testing.T, key, role string, permissions ...*models.Permission) { + resp, err := Client(t).Authz.RemovePermissions( + authz.NewRemovePermissionsParams().WithID(role).WithBody(authz.RemovePermissionsBody{ + Permissions: permissions, + }), + CreateAuth(key), + ) + AssertRequestOk(t, resp, err, nil) + require.Nil(t, err) +} + +type BackupPermission models.Permission + +func NewBackupPermission() *BackupPermission { + return &BackupPermission{} +} + +func (p *BackupPermission) WithAction(action string) *BackupPermission { + p.Action = authorization.String(action) + return p +} + +func (p *BackupPermission) WithCollection(collection string) *BackupPermission { + if p.Backups == nil { + p.Backups = &models.PermissionBackups{} + } + p.Backups.Collection = authorization.String(collection) + return p +} + +func (p *BackupPermission) Permission() *models.Permission { + perm := models.Permission(*p) + return &perm +} + +type CollectionsPermission models.Permission + +func NewCollectionsPermission() *CollectionsPermission { + return &CollectionsPermission{} +} + +func (p *CollectionsPermission) WithAction(action string) *CollectionsPermission { + p.Action = authorization.String(action) + return p +} + +func (p *CollectionsPermission) WithCollection(collection string) *CollectionsPermission { + if p.Collections == nil { + p.Collections = &models.PermissionCollections{} + } + p.Collections.Collection = authorization.String(collection) + return p +} + +func (p *CollectionsPermission) Permission() *models.Permission { + perm := models.Permission(*p) + return &perm +} + +type TenantsPermission models.Permission + +func NewTenantsPermission() *TenantsPermission { + return &TenantsPermission{} +} + +func (p *TenantsPermission) WithAction(action string) *TenantsPermission { + p.Action = authorization.String(action) + return p +} + +func (p *TenantsPermission) WithCollection(collection string) *TenantsPermission { + if p.Tenants == nil { + p.Tenants = &models.PermissionTenants{} + } + p.Tenants.Collection = authorization.String(collection) + return p +} + +func (p *TenantsPermission) WithTenant(tenant string) *TenantsPermission { + if p.Tenants == nil { + p.Tenants = &models.PermissionTenants{} + } + p.Tenants.Tenant = authorization.String(tenant) + return p +} + +func (p *TenantsPermission) Permission() *models.Permission { + perm := models.Permission(*p) + return &perm +} + +type DataPermission models.Permission + +func NewDataPermission() *DataPermission { + return &DataPermission{} +} + +func (p *DataPermission) WithAction(action string) *DataPermission { + p.Action = authorization.String(action) + return p +} + +func (p *DataPermission) WithCollection(collection string) *DataPermission { + if p.Data == nil { + p.Data = &models.PermissionData{} + } + p.Data.Collection = authorization.String(collection) + return p +} + +func (p *DataPermission) WithTenant(tenant string) *DataPermission { + if p.Data == nil { + p.Data = &models.PermissionData{} + } + p.Data.Tenant = authorization.String(tenant) + return p +} + +func (p *DataPermission) WithObject(object string) *DataPermission { + if p.Data == nil { + p.Data = &models.PermissionData{} + } + p.Data.Object = authorization.String(object) + return p +} + +func (p *DataPermission) Permission() *models.Permission { + perm := models.Permission(*p) + return &perm +} + +type NodesPermission models.Permission + +func NewNodesPermission() *NodesPermission { + return &NodesPermission{} +} + +func (p *NodesPermission) WithAction(action string) *NodesPermission { + p.Action = authorization.String(action) + return p +} + +func (p *NodesPermission) WithVerbosity(verbosity string) *NodesPermission { + if p.Nodes == nil { + p.Nodes = &models.PermissionNodes{} + } + p.Nodes.Verbosity = authorization.String(verbosity) + return p +} + +func (p *NodesPermission) WithCollection(collection string) *NodesPermission { + if p.Nodes == nil { + p.Nodes = &models.PermissionNodes{} + } + p.Nodes.Collection = authorization.String(collection) + return p +} + +func (p *NodesPermission) Permission() *models.Permission { + perm := models.Permission(*p) + return &perm +} + +type AliasesPermission models.Permission + +func NewAliasesPermission() *AliasesPermission { + return &AliasesPermission{} +} + +func (p *AliasesPermission) WithAction(action string) *AliasesPermission { + p.Action = authorization.String(action) + return p +} + +func (p *AliasesPermission) WithCollection(collection string) *AliasesPermission { + if p.Aliases == nil { + p.Aliases = &models.PermissionAliases{} + } + p.Aliases.Collection = authorization.String(collection) + return p +} + +func (p *AliasesPermission) WithAlias(alias string) *AliasesPermission { + if p.Aliases == nil { + p.Aliases = &models.PermissionAliases{} + } + p.Aliases.Alias = authorization.String(alias) + return p +} + +func (p *AliasesPermission) Permission() *models.Permission { + perm := models.Permission(*p) + return &perm +} diff --git a/platform/dbops/binaries/weaviate-src/test/helper/test_data.go b/platform/dbops/binaries/weaviate-src/test/helper/test_data.go new file mode 100644 index 0000000000000000000000000000000000000000..d132124659adf5ec0999c433f12c9de1c5220474 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/helper/test_data.go @@ -0,0 +1,33 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package helper + +import ( + "math/rand" + "time" +) + +// GetRandomString returns a string comprised of random +// samplings of charset, of length specified by caller +func GetRandomString(length int) string { + const charset = "abcdefghijklmnopqrstuvwxyz" + + "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*" + + seededRand := rand.New(rand.NewSource(time.Now().UnixNano())) + + s := make([]byte, length) + for n := range s { + s[n] = charset[seededRand.Intn(len(charset))] + } + + return string(s) +} diff --git a/platform/dbops/binaries/weaviate-src/test/helper/uuid.go b/platform/dbops/binaries/weaviate-src/test/helper/uuid.go new file mode 100644 index 0000000000000000000000000000000000000000..eab290e93fc49223b1f186499065d61f34c45651 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/helper/uuid.go @@ -0,0 +1,31 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package helper + +import ( + "encoding/binary" + + "github.com/go-openapi/strfmt" + "github.com/google/uuid" +) + +// InToUUID takes an unsigned int64 and places it in BigEndian fashion into the +// upper 8 bytes of a 16 byte UUID. This makes it easy to produce easy-to-read +// UUIDs in test scenarios. For example: +// +// IntToUUID(1) +// // returns "00000000-0000-0000-0000-000000000001" +func IntToUUID(in uint64) strfmt.UUID { + id := [16]byte{} + binary.BigEndian.PutUint64(id[8:16], in) + return strfmt.UUID(uuid.UUID(id).String()) +} diff --git a/platform/dbops/binaries/weaviate-src/test/integration/run.sh b/platform/dbops/binaries/weaviate-src/test/integration/run.sh new file mode 100644 index 0000000000000000000000000000000000000000..3481b2683ecd76102973c924f14593cfeee7d633 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/test/integration/run.sh @@ -0,0 +1,55 @@ +#!/bin/bash + +set -e + +function echo_yellow() { + yellow='\033[0;33m' + nc='\033[0m' + echo -e "${yellow}${*}${nc}" +} + +export DISABLE_RECOVERY_ON_PANIC=true + +includeslow=false +onlyvectorpkg=false +withoutvectorpkg=false + +for arg in "$@"; do + if [[ $arg == --include-slow ]]; then + includeslow=true + shift + fi + if [[ $arg == --only-vector-pkg ]]; then + onlyvectorpkg=true + shift + fi + if [[ $arg == --without-vector-pkg ]]; then + withoutvectorpkg=true + shift + fi +done + +tags=integrationTest +if [ $includeslow = true ]; then + echo_yellow "Found --include-slow flag, running all tests, including the slow ones" + tags="$tags,integrationTestSlow" +else + echo_yellow "Found no --include-slow flag, skipping the slow ones" +fi + +pkgs="" +if [ $withoutvectorpkg = true ]; then + echo_yellow "Running integration tests without adapters/repos/db/vector package" + pkgs=$(go list ./adapters/repos/... | grep -v "adapters/repos/db/vector") +elif [ $onlyvectorpkg = true ]; then + echo_yellow "Running only adapters/repos/db/vector package integration tests" + pkgs="./adapters/repos/db/vector/..." +fi + + +echo_yellow "Run the regular integration tests with race detector ON" +go test $pkgs -count 1 -timeout 3000s -coverpkg=./adapters/repos/... -coverprofile=coverage-integration.txt -race -tags=$tags "$@" ./adapters/repos/... +echo_yellow "Run the !race integration tests with race detector OFF" +go test $pkgs -count 1 -coverpkg=./adapters/repos/... -tags=$tags "$@" -run Test_NoRace ./adapters/repos/... +echo_yellow "Run the classification integration tests with race detector ON" +go test -count 1 -race -tags=$tags "$@" ./usecases/classification/...