diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/anonymous/middleware.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/anonymous/middleware.go new file mode 100644 index 0000000000000000000000000000000000000000..caa3b8f9a610aa801702e3108c699a595f59f519 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/anonymous/middleware.go @@ -0,0 +1,94 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package anonymous + +import ( + "fmt" + "net/http" + "strings" + + "github.com/go-openapi/runtime" + "github.com/weaviate/weaviate/usecases/config" +) + +// Client for anonymous access +type Client struct { + config config.AnonymousAccess + apiKeyEnabled bool + oidcEnabled bool +} + +// New anonymous access client. Client.Middleware can be used as a regular +// golang http-middleware +func New(cfg config.Config) *Client { + return &Client{config: cfg.Authentication.AnonymousAccess, apiKeyEnabled: cfg.Authentication.AnyApiKeyAvailable(), oidcEnabled: cfg.Authentication.OIDC.Enabled} +} + +// Middleware will fail unauthenticated requests if anonymous access is +// disabled. This middleware should run after all previous middlewares. +func (c *Client) Middleware(next http.Handler) http.Handler { + if c.config.Enabled { + // Anonymous Access is allowed, this means we don't have to validate any + // further, let's just return the original middleware stack + + return next + } + + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if hasBearerAuth(r) { + // if an OIDC-Header is present we can be sure that the OIDC + // Authenticator has already validated the token, so we don't have to do + // anything and can call the next handler. + next.ServeHTTP(w, r) + return + } + + w.WriteHeader(401) + var authSchemas []string + if c.apiKeyEnabled { + authSchemas = append(authSchemas, "API-keys") + } + if c.oidcEnabled { + authSchemas = append(authSchemas, "OIDC") + } + + w.Write([]byte( + fmt.Sprintf( + `{"code":401,"message": "anonymous access not enabled. Please authenticate through one of the available methods: [%s]" }`, strings.Join(authSchemas, ", "), + ), + )) + }) +} + +func hasBearerAuth(r *http.Request) bool { + // The following logic to decide whether OIDC information is set is taken + // straight from go-swagger to make sure the decision matches: + // https://github.com/go-openapi/runtime/blob/109737172424d8a656fd1199e28c9f5cc89b0cca/security/authenticator.go#L208-L225 + const prefix = "Bearer " + var token string + hdr := r.Header.Get("Authorization") + if strings.HasPrefix(hdr, prefix) { + token = strings.TrimPrefix(hdr, prefix) + } + if token == "" { + qs := r.URL.Query() + token = qs.Get("access_token") + } + //#nosec + ct, _, _ := runtime.ContentType(r.Header) + if token == "" && (ct == "application/x-www-form-urlencoded" || ct == "multipart/form-data") { + token = r.FormValue("access_token") + } + // End of go-swagger logic + + return token != "" +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/anonymous/middleware_test.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/anonymous/middleware_test.go new file mode 100644 index 0000000000000000000000000000000000000000..9cae1a77540bda31ae3045621e59432e3274dd9c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/anonymous/middleware_test.go @@ -0,0 +1,102 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package anonymous + +import ( + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_AnonymousMiddleware_Enabled(t *testing.T) { + // when anonymous access is enabled, we don't need to do anything and can + // safely call the next next handler + + r := httptest.NewRequest("GET", "/foo", nil) + w := httptest.NewRecorder() + + next := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(900) + }) + + cfg := config.Config{ + Authentication: config.Authentication{ + AnonymousAccess: config.AnonymousAccess{ + Enabled: true, + }, + }, + } + + New(cfg).Middleware(next).ServeHTTP(w, r) + response := w.Result() + defer response.Body.Close() + + assert.Equal(t, response.StatusCode, 900) +} + +func Test_AnonymousMiddleware_Disabled(t *testing.T) { + t.Run("when OIDC is enabled, but no token provided", func(t *testing.T) { + r := httptest.NewRequest("GET", "/foo", nil) + w := httptest.NewRecorder() + + next := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(900) + }) + + cfg := config.Config{ + Authentication: config.Authentication{ + AnonymousAccess: config.AnonymousAccess{ + Enabled: false, + }, + OIDC: config.OIDC{ + Enabled: true, + }, + }, + } + + New(cfg).Middleware(next).ServeHTTP(w, r) + response := w.Result() + defer response.Body.Close() + + assert.Equal(t, response.StatusCode, 401) + }) + + t.Run("when OIDC is enabled, and a Bearer Header provided", func(t *testing.T) { + r := httptest.NewRequest("GET", "/foo", nil) + r.Header.Add("Authorization", "Bearer foo") + w := httptest.NewRecorder() + + next := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(900) + }) + + cfg := config.Config{ + Authentication: config.Authentication{ + AnonymousAccess: config.AnonymousAccess{ + Enabled: false, + }, + OIDC: config.OIDC{ + Enabled: true, + }, + }, + } + + New(cfg).Middleware(next).ServeHTTP(w, r) + response := w.Result() + defer response.Body.Close() + + assert.Equal(t, response.StatusCode, 900) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/client.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/client.go new file mode 100644 index 0000000000000000000000000000000000000000..3ac7a232f3f7495b14a0c19140ed16d515069507 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/client.go @@ -0,0 +1,112 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package apikey + +import ( + "crypto/sha256" + "crypto/subtle" + "fmt" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/usecases/config" +) + +type StaticApiKey struct { + config config.StaticAPIKey + weakKeyStorage [][sha256.Size]byte +} + +func NewStatic(cfg config.Config) (*StaticApiKey, error) { + c := &StaticApiKey{ + config: cfg.Authentication.APIKey, + } + + if err := c.validateConfig(); err != nil { + return nil, fmt.Errorf("invalid apikey config: %w", err) + } + + c.parseKeys() + + return c, nil +} + +func (c *StaticApiKey) parseKeys() { + c.weakKeyStorage = make([][sha256.Size]byte, len(c.config.AllowedKeys)) + for i, rawKey := range c.config.AllowedKeys { + c.weakKeyStorage[i] = sha256.Sum256([]byte(rawKey)) + } +} + +func (c *StaticApiKey) validateConfig() error { + if !c.config.Enabled { + // don't validate if this scheme isn't used + return nil + } + + if len(c.config.AllowedKeys) < 1 { + return fmt.Errorf("need at least one valid allowed key") + } + + for _, key := range c.config.AllowedKeys { + if len(key) == 0 { + return fmt.Errorf("keys cannot have length 0") + } + } + + if len(c.config.Users) < 1 { + return fmt.Errorf("need at least one user") + } + + for _, key := range c.config.Users { + if len(key) == 0 { + return fmt.Errorf("users cannot have length 0") + } + } + + if len(c.config.Users) > 1 && len(c.config.Users) != len(c.config.AllowedKeys) { + return fmt.Errorf("length of users and keys must match, alternatively provide single user for all keys") + } + + return nil +} + +func (c *StaticApiKey) ValidateAndExtract(token string, scopes []string) (*models.Principal, error) { + tokenPos, ok := c.isTokenAllowed(token) + if !ok { + return nil, fmt.Errorf("invalid api key") + } + + return &models.Principal{ + Username: c.getUser(tokenPos), UserType: models.UserTypeInputDb, + }, nil +} + +func (c *StaticApiKey) isTokenAllowed(token string) (int, bool) { + tokenHash := sha256.Sum256([]byte(token)) + + for i, allowed := range c.weakKeyStorage { + if subtle.ConstantTimeCompare(tokenHash[:], allowed[:]) == 1 { + return i, true + } + } + + return -1, false +} + +func (c *StaticApiKey) getUser(pos int) string { + // passed validation guarantees that one of those options will work + if pos >= len(c.config.Users) { + return c.config.Users[0] + } + + return c.config.Users[pos] +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/client_test.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/client_test.go new file mode 100644 index 0000000000000000000000000000000000000000..1ef5da04b0c2e12c211b6515ecf30daab2b052f2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/client_test.go @@ -0,0 +1,186 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package apikey + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_APIKeyClient(t *testing.T) { + type test struct { + name string + config config.StaticAPIKey + expectConfigErr bool + expectConfigErrMsg string + validate func(t *testing.T, c *StaticApiKey) + } + + tests := []test{ + { + name: "not enabled", + config: config.StaticAPIKey{ + Enabled: false, + }, + expectConfigErr: false, + }, + { + name: "key, but no user", + config: config.StaticAPIKey{ + Enabled: true, + AllowedKeys: []string{"secret-key"}, + Users: []string{}, + }, + expectConfigErr: true, + expectConfigErrMsg: "need at least one user", + }, + { + name: "zero length key", + config: config.StaticAPIKey{ + Enabled: true, + AllowedKeys: []string{""}, + Users: []string{"gooduser"}, + }, + expectConfigErr: true, + expectConfigErrMsg: "keys cannot have length 0", + }, + { + name: "user, but no key", + config: config.StaticAPIKey{ + Enabled: true, + AllowedKeys: []string{}, + Users: []string{"johnnyBeAllowed"}, + }, + expectConfigErr: true, + expectConfigErrMsg: "need at least one valid allowed key", + }, + { + name: "zero length user", + config: config.StaticAPIKey{ + Enabled: true, + AllowedKeys: []string{"secret-key"}, + Users: []string{""}, + }, + expectConfigErr: true, + expectConfigErrMsg: "users cannot have length 0", + }, + { + name: "one user, one key", + config: config.StaticAPIKey{ + Enabled: true, + AllowedKeys: []string{"secret-key"}, + Users: []string{"mrRoboto"}, + }, + expectConfigErr: false, + validate: func(t *testing.T, c *StaticApiKey) { + p, err := c.ValidateAndExtract("secret-key", nil) + require.Nil(t, err) + assert.Equal(t, "mrRoboto", p.Username) + + _, err = c.ValidateAndExtract("", nil) + require.NotNil(t, err) + _, err = c.ValidateAndExtract("other-key", nil) + require.NotNil(t, err) + }, + }, + { + // this is allowed, this means that all keys point to the same user for + // authZ purposes + name: "one user, multiple keys", + config: config.StaticAPIKey{ + Enabled: true, + AllowedKeys: []string{"secret-key", "another-secret-key", "third-key"}, + Users: []string{"jane"}, + }, + expectConfigErr: false, + validate: func(t *testing.T, c *StaticApiKey) { + p, err := c.ValidateAndExtract("secret-key", nil) + require.Nil(t, err) + assert.Equal(t, "jane", p.Username) + + p, err = c.ValidateAndExtract("another-secret-key", nil) + require.Nil(t, err) + assert.Equal(t, "jane", p.Username) + + p, err = c.ValidateAndExtract("third-key", nil) + require.Nil(t, err) + assert.Equal(t, "jane", p.Username) + + _, err = c.ValidateAndExtract("", nil) + require.NotNil(t, err) + _, err = c.ValidateAndExtract("other-key", nil) + require.NotNil(t, err) + }, + }, + { + // this is allowed, this means that each key at pos i points to user at + // pos i for authZ purposes + name: "multiple user, multiple keys", + config: config.StaticAPIKey{ + Enabled: true, + AllowedKeys: []string{"secret-key", "another-secret-key", "third-key"}, + Users: []string{"jane", "jessica", "jennifer"}, + }, + expectConfigErr: false, + validate: func(t *testing.T, c *StaticApiKey) { + p, err := c.ValidateAndExtract("secret-key", nil) + require.Nil(t, err) + assert.Equal(t, "jane", p.Username) + + p, err = c.ValidateAndExtract("another-secret-key", nil) + require.Nil(t, err) + assert.Equal(t, "jessica", p.Username) + + p, err = c.ValidateAndExtract("third-key", nil) + require.Nil(t, err) + assert.Equal(t, "jennifer", p.Username) + + _, err = c.ValidateAndExtract("", nil) + require.NotNil(t, err) + _, err = c.ValidateAndExtract("other-key", nil) + require.NotNil(t, err) + }, + }, + { + // this is invalid, the keys cannot be mapped to the users + name: "2 users, 3 keys", + config: config.StaticAPIKey{ + Enabled: true, + AllowedKeys: []string{"secret-key", "another-secret-key", "third-key"}, + Users: []string{"jane", "jessica"}, + }, + expectConfigErr: true, + expectConfigErrMsg: "length of users and keys must match, alternatively provide single user for all keys", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + c, err := NewStatic(config.Config{ + Authentication: config.Authentication{APIKey: test.config}, + }) + + if test.expectConfigErr { + require.NotNil(t, err) + assert.Contains(t, err.Error(), test.expectConfigErrMsg) + return + } + + if test.validate != nil { + test.validate(t, c) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/db_user_test.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/db_user_test.go new file mode 100644 index 0000000000000000000000000000000000000000..7b8514a3d740d9d7d80c3c4c72e83b003bf44100 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/db_user_test.go @@ -0,0 +1,457 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package apikey + +import ( + "crypto/sha256" + "fmt" + "strconv" + "sync" + "testing" + "time" + + "github.com/weaviate/weaviate/entities/models" + + "github.com/sirupsen/logrus/hooks/test" + + "github.com/weaviate/weaviate/usecases/auth/authentication/apikey/keys" + + "github.com/stretchr/testify/require" +) + +var log, _ = test.NewNullLogger() + +func TestDynUserConcurrency(t *testing.T) { + dynUsers, err := NewDBUser(t.TempDir(), true, log) + require.NoError(t, err) + + numUsers := 10 + + wg := sync.WaitGroup{} + wg.Add(numUsers) + + userNames := make([]string, 0, numUsers) + for i := 0; i < numUsers; i++ { + userName := fmt.Sprintf("user%v", i) + go func() { + err := dynUsers.CreateUser(userName, "something", userName, "", time.Now()) + require.NoError(t, err) + wg.Done() + }() + userNames = append(userNames, userName) + } + wg.Wait() + + users, err := dynUsers.GetUsers(userNames...) + require.NoError(t, err) + require.Equal(t, len(userNames), len(users)) +} + +func TestConcurrentValidate(t *testing.T) { + dynUsers, err := NewDBUser(t.TempDir(), true, log) + require.NoError(t, err) + userId1 := "id" + userId2 := "id2" + + apiKey, hash, identifier, err := keys.CreateApiKeyAndHash() + require.NoError(t, err) + + require.NoError(t, dynUsers.CreateUser(userId1, hash, identifier, "", time.Now())) + + apiKey2, hash2, identifier2, err := keys.CreateApiKeyAndHash() + require.NoError(t, err) + + require.NoError(t, dynUsers.CreateUser(userId2, hash2, identifier2, "", time.Now())) + + randomKey, _, err := keys.DecodeApiKey(apiKey) + require.NoError(t, err) + randomKey2, _, err := keys.DecodeApiKey(apiKey2) + require.NoError(t, err) + start := time.Now() + wg := sync.WaitGroup{} + for i := 0; i < 10; i++ { + wg.Add(2) + go func() { + _, err := dynUsers.ValidateAndExtract(randomKey, identifier) + require.NoError(t, err) + wg.Done() + }() + + go func() { + _, err := dynUsers.ValidateAndExtract(randomKey2, identifier2) + require.NoError(t, err) + wg.Done() + }() + } + wg.Wait() + + users, err := dynUsers.GetUsers(userId1) + require.NoError(t, err) + user := users[userId1] + require.Less(t, start, user.LastUsedAt) +} + +func TestDynUserTestSlowAfterWeakHash(t *testing.T) { + dynUsers, err := NewDBUser(t.TempDir(), true, log) + require.NoError(t, err) + userId := "id" + + apiKey, hash, identifier, err := keys.CreateApiKeyAndHash() + require.NoError(t, err) + + require.NoError(t, dynUsers.CreateUser(userId, hash, identifier, "", time.Now())) + + randomKey, _, err := keys.DecodeApiKey(apiKey) + require.NoError(t, err) + + _, ok := dynUsers.memoryOnlyData.weakKeyStorageById[userId] + require.False(t, ok) + + startSlow := time.Now() + _, err = dynUsers.ValidateAndExtract(randomKey, identifier) + require.NoError(t, err) + tookSlow := time.Since(startSlow) + + _, ok = dynUsers.memoryOnlyData.weakKeyStorageById[userId] + require.True(t, ok) + + startFast := time.Now() + _, err = dynUsers.ValidateAndExtract(randomKey, identifier) + require.NoError(t, err) + tookFast := time.Since(startFast) + require.Less(t, tookFast, tookSlow) +} + +func TestUpdateUser(t *testing.T) { + dynUsers, err := NewDBUser(t.TempDir(), true, log) + require.NoError(t, err) + userId := "id" + + apiKey, hash, oldIdentifier, err := keys.CreateApiKeyAndHash() + require.NoError(t, err) + + require.NoError(t, dynUsers.CreateUser(userId, hash, oldIdentifier, "", time.Now())) + + // login works + randomKeyOld, _, err := keys.DecodeApiKey(apiKey) + require.NoError(t, err) + + principal, err := dynUsers.ValidateAndExtract(randomKeyOld, oldIdentifier) + require.NoError(t, err) + require.NotNil(t, principal) + + // update key and check that original key does not work, but new one does + apiKeyNew, hashNew, newIdentifier, err := keys.CreateApiKeyAndHash() + require.NoError(t, err) + require.NoError(t, dynUsers.RotateKey(userId, apiKeyNew[:3], hashNew, oldIdentifier, newIdentifier)) + + randomKeyNew, _, err := keys.DecodeApiKey(apiKeyNew) + require.NoError(t, err) + + principal, err = dynUsers.ValidateAndExtract(randomKeyOld, oldIdentifier) + require.Error(t, err) + require.Nil(t, principal) + + // first login with new key is slow again, second is fast + startSlow := time.Now() + principal, err = dynUsers.ValidateAndExtract(randomKeyNew, newIdentifier) + require.NoError(t, err) + require.NotNil(t, principal) + tookSlow := time.Since(startSlow) + + startFast := time.Now() + _, err = dynUsers.ValidateAndExtract(randomKeyNew, newIdentifier) + require.NoError(t, err) + tookFast := time.Since(startFast) + require.Less(t, tookFast, tookSlow) +} + +func TestSnapShotAndRestore(t *testing.T) { + dynUsers, err := NewDBUser(t.TempDir(), true, log) + require.NoError(t, err) + + userId1 := "id-1" + userId2 := "id-2" + + apiKey, hash, identifier, err := keys.CreateApiKeyAndHash() + require.NoError(t, err) + + require.NoError(t, dynUsers.CreateUser(userId1, hash, identifier, "", time.Now())) + login1, _, err := keys.DecodeApiKey(apiKey) + require.NoError(t, err) + + apiKey2, hash2, identifier2, err := keys.CreateApiKeyAndHash() + require.NoError(t, err) + require.NoError(t, dynUsers.CreateUser(userId2, hash2, identifier2, "", time.Now())) + login2, _, err := keys.DecodeApiKey(apiKey2) + require.NoError(t, err) + + // first login is slow, second is fast + startSlow := time.Now() + principal, err := dynUsers.ValidateAndExtract(login1, identifier) + require.NoError(t, err) + require.NotNil(t, principal) + tookSlow := time.Since(startSlow) + + startFast := time.Now() + _, err = dynUsers.ValidateAndExtract(login1, identifier) + require.NoError(t, err) + tookFast := time.Since(startFast) + require.Less(t, tookFast, tookSlow) + + principal2, err := dynUsers.ValidateAndExtract(login2, identifier2) + require.NoError(t, err) + require.NotNil(t, principal2) + + require.NoError(t, dynUsers.DeactivateUser(userId2, true)) + + // create snapshot and restore to an empty new DBUser struct + snapShot, err := dynUsers.Snapshot() + require.NoError(t, err) + + dynUsers2, err := NewDBUser(t.TempDir(), true, log) + require.NoError(t, err) + require.NoError(t, dynUsers2.Restore(snapShot)) + + // content should be identical: + // - all users and their status present + // - taking a new snapshot should be identical + // - only weak hash is missing => first login should be slow again + snapshot2, err := dynUsers2.Snapshot() + require.NoError(t, err) + require.Equal(t, snapShot, snapshot2) + + startAfterRestoreSlow := time.Now() + _, err = dynUsers2.ValidateAndExtract(login1, identifier) + require.NoError(t, err) + tookAfterRestore := time.Since(startAfterRestoreSlow) + require.Less(t, tookFast, tookAfterRestore) + + _, err = dynUsers2.ValidateAndExtract(login2, identifier2) + require.Error(t, err) + + apiKey3, hash3, identifier3, err := keys.CreateApiKeyAndHash() + require.NoError(t, err) + require.NoError(t, dynUsers2.RotateKey(userId2, apiKey3[:3], hash3, identifier2, identifier3)) + + login3, _, err := keys.DecodeApiKey(apiKey3) + require.NoError(t, err) + _, err = dynUsers2.ValidateAndExtract(login3, identifier3) + require.Error(t, err) +} + +func TestSuspendAfterDelete(t *testing.T) { + dynUsers, err := NewDBUser(t.TempDir(), true, log) + require.NoError(t, err) + userId := "id" + + _, hash, identifier, err := keys.CreateApiKeyAndHash() + require.NoError(t, err) + + require.NoError(t, dynUsers.CreateUser(userId, hash, identifier, "", time.Now())) + + users, err := dynUsers.GetUsers(userId) + require.NoError(t, err) + require.Contains(t, users, userId) + require.Len(t, users, 1) + + require.NoError(t, dynUsers.DeleteUser(userId)) + + require.Error(t, dynUsers.DeactivateUser(userId, false)) + require.Error(t, dynUsers.ActivateUser(userId)) + require.Error(t, dynUsers.RotateKey(userId, "", "", "", "")) + require.Error(t, dynUsers.ActivateUser(userId)) +} + +func TestLastUsedTime(t *testing.T) { + dynUsers, err := NewDBUser(t.TempDir(), true, log) + require.NoError(t, err) + userId := "user" + + start := time.Now() + + apiKey, hash, identifier, err := keys.CreateApiKeyAndHash() + require.NoError(t, err) + + require.NoError(t, dynUsers.CreateUser(userId, hash, identifier, "", time.Now())) + + user, err := dynUsers.GetUsers(userId) + require.NoError(t, err) + require.Less(t, user[userId].LastUsedAt, start) // no usage yet + + login, _, err := keys.DecodeApiKey(apiKey) + require.NoError(t, err) + _, err = dynUsers.ValidateAndExtract(login, identifier) + require.NoError(t, err) + + user, err = dynUsers.GetUsers(userId) + require.NoError(t, err) + require.Less(t, start, user[userId].LastUsedAt) // was just used + require.Less(t, user[userId].LastUsedAt, time.Now()) + lastUsedTime := user[userId].LastUsedAt + + // try to update with older timestamp => no effect + dynUsers.UpdateLastUsedTimestamp(map[string]time.Time{userId: start}) + user, err = dynUsers.GetUsers(userId) + require.NoError(t, err) + + require.Equal(t, user[userId].LastUsedAt, lastUsedTime) + + // update with newer timestamp (that another node has seen) + updateTime := time.Now() + dynUsers.UpdateLastUsedTimestamp(map[string]time.Time{userId: updateTime}) + user, err = dynUsers.GetUsers(userId) + require.NoError(t, err) + + require.Equal(t, user[userId].LastUsedAt, updateTime) +} + +func TestImportingAndSuspendingStaticKeys(t *testing.T) { + dynUsers, err := NewDBUser(t.TempDir(), true, log) + require.NoError(t, err) + + createdAt := time.Now() + userId := "user" + importedApiKey := "importedApiKey" + require.NoError(t, dynUsers.CreateUserWithKey(userId, importedApiKey[:3], sha256.Sum256([]byte(importedApiKey)), createdAt)) + + principal, err := dynUsers.ValidateImportedKey(importedApiKey) + require.NoError(t, err) + require.NotNil(t, principal) + require.Equal(t, userId, principal.Username) + + require.NoError(t, dynUsers.DeactivateUser(userId, true)) + + principal, err = dynUsers.ValidateImportedKey(importedApiKey) + require.Error(t, err) + require.Nil(t, principal) + + require.NoError(t, dynUsers.ActivateUser(userId)) + principal, err = dynUsers.ValidateImportedKey(importedApiKey) + require.Error(t, err) + require.Nil(t, principal) + + apiKey, hash, identifier, err := keys.CreateApiKeyAndHash() + require.NoError(t, err) + require.NoError(t, dynUsers.RotateKey(userId, apiKey[:3], hash, "imported_"+userId, identifier)) + + login, _, err := keys.DecodeApiKey(apiKey) + require.NoError(t, err) + _, err = dynUsers.ValidateAndExtract(login, identifier) + require.NoError(t, err) + + principal, err = dynUsers.ValidateImportedKey(importedApiKey) + require.NoError(t, err) // error is only returned if key is deactivated + require.Nil(t, principal) +} + +func TestImportingStaticKeys(t *testing.T) { + dynUsers, err := NewDBUser(t.TempDir(), true, log) + require.NoError(t, err) + createdAt := time.Now() + for i := 0; i < 10; i++ { + userId := "user" + strconv.Itoa(i) + importedApiKey := "importedApiKey" + strconv.Itoa(i) + require.NoError(t, dynUsers.CreateUserWithKey(userId, importedApiKey[:3], sha256.Sum256([]byte(importedApiKey)), createdAt)) + + principal, err := dynUsers.ValidateImportedKey(importedApiKey) + require.NoError(t, err) + require.NotNil(t, principal) + require.Equal(t, userId, principal.Username) + require.Equal(t, principal.UserType, models.UserTypeInputDb) + + require.True(t, dynUsers.IsBlockedKey(importedApiKey)) + } + + for i := 0; i < 10; i++ { + userId := "user" + strconv.Itoa(i) + importedApiKey := "importedApiKey" + strconv.Itoa(i) + + users, err := dynUsers.GetUsers(userId) + require.NoError(t, err) + require.Len(t, users, 1) + user, ok := users[userId] + require.True(t, ok) + require.Equal(t, user.Id, userId) + require.Equal(t, user.InternalIdentifier, "imported_"+userId) + require.Equal(t, user.CreatedAt, createdAt) + require.True(t, user.ImportedWithKey) + + apiKey, hash, identifier, err := keys.CreateApiKeyAndHash() + require.NoError(t, err) + require.NoError(t, dynUsers.RotateKey(userId, apiKey[:3], hash, "imported_"+userId, identifier)) + + login, _, err := keys.DecodeApiKey(apiKey) + require.NoError(t, err) + _, err = dynUsers.ValidateAndExtract(login, identifier) + require.NoError(t, err) + + users, err = dynUsers.GetUsers(userId) + require.NoError(t, err) + require.Len(t, users, 1) + user, ok = users[userId] + require.True(t, ok) + require.Equal(t, user.Id, userId) + require.Equal(t, user.InternalIdentifier, identifier) + require.Equal(t, user.CreatedAt, createdAt) + require.False(t, user.ImportedWithKey) + + require.True(t, dynUsers.IsBlockedKey(importedApiKey)) + + } +} + +func TestImportingStaticKeysWithTime(t *testing.T) { + dynUsers, err := NewDBUser(t.TempDir(), true, log) + require.NoError(t, err) + createdAt := time.Now().Add(-time.Hour) + + importedApiKey := "importedApiKey" + userId := "user" + require.NoError(t, dynUsers.CreateUserWithKey(userId, importedApiKey[:3], sha256.Sum256([]byte(importedApiKey)), createdAt)) + + users, err := dynUsers.GetUsers(userId) + require.NoError(t, err) + require.Len(t, users, 1) + user, ok := users[userId] + require.True(t, ok) + require.Equal(t, user.CreatedAt, createdAt) +} + +func TestSnapshotRestoreEmpty(t *testing.T) { + dynUsers, err := NewDBUser(t.TempDir(), true, log) + require.NoError(t, err) + userId := "user" + + _, hash, identifier, err := keys.CreateApiKeyAndHash() + require.NoError(t, err) + + require.NoError(t, dynUsers.CreateUser(userId, hash, identifier, "", time.Now())) + user, err := dynUsers.GetUsers(userId) + require.NoError(t, err) + require.Equal(t, user[userId].Id, userId) + + err = dynUsers.Restore([]byte{}) + require.NoError(t, err) + + // nothing overwritten + user, err = dynUsers.GetUsers(userId) + require.NoError(t, err) + require.Equal(t, user[userId].Id, userId) +} + +func TestRestoreInvalidData(t *testing.T) { + dynUsers, err := NewDBUser(t.TempDir(), true, log) + require.NoError(t, err) + + require.Error(t, dynUsers.Restore([]byte("invalid json"))) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/db_users.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/db_users.go new file mode 100644 index 0000000000000000000000000000000000000000..d565ac4e8246639f53121c9b3f27d0fe435ad84a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/db_users.go @@ -0,0 +1,550 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package apikey + +import ( + "crypto/sha256" + "crypto/subtle" + "encoding/json" + "errors" + "fmt" + "os" + "path/filepath" + "sync" + "time" + + "github.com/sirupsen/logrus" + enterrors "github.com/weaviate/weaviate/entities/errors" + + "github.com/alexedwards/argon2id" + + "github.com/weaviate/weaviate/entities/models" +) + +const ( + SnapshotVersion = 0 + FileName = "users.json" + UserNameMaxLength = 128 + UserNameRegexCore = `[A-Za-z][-_0-9A-Za-z@.]{0,128}` +) + +type DBUsers interface { + CreateUser(userId, secureHash, userIdentifier, apiKeyFirstLetters string, createdAt time.Time) error + CreateUserWithKey(userId, apiKeyFirstLetters string, weakHash [sha256.Size]byte, createdAt time.Time) error + DeleteUser(userId string) error + ActivateUser(userId string) error + DeactivateUser(userId string, revokeKey bool) error + GetUsers(userIds ...string) (map[string]*User, error) + RotateKey(userId, apiKeyFirstLetters, secureHash, oldIdentifier, newIdentifier string) error + CheckUserIdentifierExists(userIdentifier string) (bool, error) +} + +type User struct { + sync.RWMutex + Id string + Active bool + InternalIdentifier string + ApiKeyFirstLetters string + CreatedAt time.Time + LastUsedAt time.Time + ImportedWithKey bool +} + +type DBUser struct { + lock *sync.RWMutex + weakHashLock *sync.RWMutex + data dbUserdata + memoryOnlyData memoryOnlyData + path string + enabled bool +} + +type DBUserSnapshot struct { + Data dbUserdata + Version int +} + +type dbUserdata struct { + SecureKeyStorageById map[string]string + IdentifierToId map[string]string + IdToIdentifier map[string]string + Users map[string]*User + UserKeyRevoked map[string]struct{} + ImportedApiKeysWeakHash map[string][sha256.Size]byte +} + +type memoryOnlyData struct { + weakKeyStorageById map[string][sha256.Size]byte + // imported keys from static users should not work after key rotation, eg the following scenario + // - import user with "key" + // - login works when using "key" through dynamic users + // - key rotation "key" => "new-key" + // - login works when using "new-key" through dynamic users + // - login using "key" is blocked and does not reach static user config where the old key is still present + // + // Note that this will NOT be persisted and we expect that the static user configuration does not contain "key" anymore + // on the next restart + importedApiKeysBlocked [][sha256.Size]byte +} + +func NewDBUser(path string, enabled bool, logger logrus.FieldLogger) (*DBUser, error) { + fullpath := fmt.Sprintf("%s/raft/db_users/", path) + err := createStorage(fullpath + FileName) + if err != nil { + return nil, err + } + existingData, err := ReadFile(fullpath + FileName) + if err != nil { + return nil, err + } + snapshot := DBUserSnapshot{} + if len(existingData) > 0 { + if err := json.Unmarshal(existingData, &snapshot); err != nil { + return nil, err + } + } + + if snapshot.Data.SecureKeyStorageById == nil { + snapshot.Data.SecureKeyStorageById = make(map[string]string) + } + if snapshot.Data.IdentifierToId == nil { + snapshot.Data.IdentifierToId = make(map[string]string) + } + if snapshot.Data.IdToIdentifier == nil { + snapshot.Data.IdToIdentifier = make(map[string]string) + } + if snapshot.Data.Users == nil { + snapshot.Data.Users = make(map[string]*User) + } + if snapshot.Data.UserKeyRevoked == nil { + snapshot.Data.UserKeyRevoked = make(map[string]struct{}) + } + + if snapshot.Data.ImportedApiKeysWeakHash == nil { + snapshot.Data.ImportedApiKeysWeakHash = make(map[string][sha256.Size]byte) + } + + dbUsers := &DBUser{ + path: fullpath, + lock: &sync.RWMutex{}, + weakHashLock: &sync.RWMutex{}, + data: snapshot.Data, + memoryOnlyData: memoryOnlyData{ + weakKeyStorageById: make(map[string][sha256.Size]byte), + importedApiKeysBlocked: make([][sha256.Size]byte, 0), + }, + enabled: enabled, + } + + // we save every change to file after a request is done, EXCEPT the lastUsedAt time as we do not want to write to a + // file with every request. + // This information is not terribly important (besides WCD UX), so it does not matter much if we very rarely loose + // some information here. This info will also be written on shutdown so the only loss of information occurs with + // OOM or similar. + if enabled { + enterrors.GoWrapper(func() { + ticker := time.NewTicker(1 * time.Minute) + for range ticker.C { + func() { + dbUsers.lock.RLock() + defer dbUsers.lock.RUnlock() + err := dbUsers.storeToFile() + if err != nil { + logger.WithField("action", "db_users_write_to_file"). + WithField("error", err). + Warn("db users file not written") + } + }() + } + }, logger) + } + + return dbUsers, nil +} + +func (c *DBUser) CreateUser(userId, secureHash, userIdentifier, apiKeyFirstLetters string, createdAt time.Time) error { + c.lock.Lock() + defer c.lock.Unlock() + + if len(apiKeyFirstLetters) > 3 { + return errors.New("api key first letters too long") + } + + c.data.SecureKeyStorageById[userId] = secureHash + c.data.IdentifierToId[userIdentifier] = userId + c.data.IdToIdentifier[userId] = userIdentifier + c.data.Users[userId] = &User{Id: userId, Active: true, InternalIdentifier: userIdentifier, CreatedAt: createdAt, ApiKeyFirstLetters: apiKeyFirstLetters} + return c.storeToFile() +} + +func (c *DBUser) CreateUserWithKey(userId, apiKeyFirstLetters string, weakHash [sha256.Size]byte, createdAt time.Time) error { + c.lock.Lock() + defer c.lock.Unlock() + + if len(apiKeyFirstLetters) > 3 { + return errors.New("api key first letters too long") + } + + c.data.ImportedApiKeysWeakHash[userId] = weakHash + c.memoryOnlyData.importedApiKeysBlocked = append(c.memoryOnlyData.importedApiKeysBlocked, weakHash) + c.data.Users[userId] = &User{ + Id: userId, + Active: true, + InternalIdentifier: "imported_" + userId, + CreatedAt: createdAt, + ApiKeyFirstLetters: apiKeyFirstLetters, + ImportedWithKey: true, + } + return c.storeToFile() +} + +func (c *DBUser) RotateKey(userId, apiKeyFirstLetters, secureHash, oldIdentifier, newIdentifier string) error { + if len(apiKeyFirstLetters) > 3 { + return errors.New("api key first letters too long") + } + + c.lock.Lock() + defer c.lock.Unlock() + + if _, ok := c.data.Users[userId]; !ok { + return fmt.Errorf("user %s does not exist", userId) + } + + // replay of old raft commands can have these be "" + if oldIdentifier != "" && newIdentifier != "" { + c.data.IdToIdentifier[userId] = newIdentifier + delete(c.data.IdentifierToId, oldIdentifier) + c.data.IdentifierToId[newIdentifier] = userId + c.data.Users[userId].InternalIdentifier = newIdentifier + } + if c.data.Users[userId].ImportedWithKey { + c.data.Users[userId].ImportedWithKey = false + delete(c.data.ImportedApiKeysWeakHash, userId) + + } + + c.data.Users[userId].ApiKeyFirstLetters = apiKeyFirstLetters + c.data.SecureKeyStorageById[userId] = secureHash + delete(c.memoryOnlyData.weakKeyStorageById, userId) + delete(c.data.UserKeyRevoked, userId) + return c.storeToFile() +} + +func (c *DBUser) DeleteUser(userId string) error { + c.lock.Lock() + defer c.lock.Unlock() + + delete(c.data.SecureKeyStorageById, userId) + delete(c.data.IdentifierToId, c.data.IdToIdentifier[userId]) + delete(c.data.IdToIdentifier, userId) + delete(c.data.Users, userId) + delete(c.memoryOnlyData.weakKeyStorageById, userId) + delete(c.data.UserKeyRevoked, userId) + delete(c.data.ImportedApiKeysWeakHash, userId) + return c.storeToFile() +} + +func (c *DBUser) ActivateUser(userId string) error { + c.lock.Lock() + defer c.lock.Unlock() + + if _, ok := c.data.Users[userId]; !ok { + return fmt.Errorf("user %s does not exist", userId) + } + + c.data.Users[userId].Active = true + return c.storeToFile() +} + +func (c *DBUser) DeactivateUser(userId string, revokeKey bool) error { + c.lock.Lock() + defer c.lock.Unlock() + if _, ok := c.data.Users[userId]; !ok { + return fmt.Errorf("user %s does not exist", userId) + } + if revokeKey { + c.data.UserKeyRevoked[userId] = struct{}{} + } + c.data.Users[userId].Active = false + + return c.storeToFile() +} + +func (c *DBUser) GetUsers(userIds ...string) (map[string]*User, error) { + c.lock.RLock() + defer c.lock.RUnlock() + + if len(userIds) == 0 { + return c.data.Users, nil + } + + users := make(map[string]*User, len(userIds)) + for _, id := range userIds { + user, ok := c.data.Users[id] + if ok { + users[id] = user + } + } + return users, nil +} + +func (c *DBUser) CheckUserIdentifierExists(userIdentifier string) (bool, error) { + c.lock.RLock() + defer c.lock.RUnlock() + + _, ok := c.data.Users[userIdentifier] + return ok, nil +} + +func (c *DBUser) UpdateLastUsedTimestamp(users map[string]time.Time) { + // RLock is fine here, we only want to avoid that c.data.Users is being changed. LastUsed has its own + // locking mechanism + c.lock.RLock() + defer c.lock.RUnlock() + + for userID, lastUsed := range users { + if c.data.Users[userID].LastUsedAt.Before(lastUsed) { + c.data.Users[userID].Lock() + c.data.Users[userID].LastUsedAt = lastUsed + c.data.Users[userID].Unlock() + } + } +} + +func (c *DBUser) ValidateImportedKey(token string) (*models.Principal, error) { + c.lock.RLock() + defer c.lock.RUnlock() + + keyHashGiven := sha256.Sum256([]byte(token)) + for userId, keyHashStored := range c.data.ImportedApiKeysWeakHash { + if subtle.ConstantTimeCompare(keyHashGiven[:], keyHashStored[:]) != 1 { + continue + } + if c.data.Users[userId] != nil && !c.data.Users[userId].Active { + return nil, fmt.Errorf("user deactivated") + } + + if _, ok := c.data.UserKeyRevoked[userId]; ok { + return nil, fmt.Errorf("key is revoked") + } + + // Last used time does not have to be exact. If we have multiple concurrent requests for the same + // user, only recording one of them is good enough + if c.data.Users[userId].TryLock() { + c.data.Users[userId].LastUsedAt = time.Now() + c.data.Users[userId].Unlock() + } + + return &models.Principal{Username: userId, UserType: models.UserTypeInputDb}, nil + } + + return nil, nil +} + +func (c *DBUser) IsBlockedKey(token string) bool { + keyHashGiven := sha256.Sum256([]byte(token)) + for _, keyHashStored := range c.memoryOnlyData.importedApiKeysBlocked { + if subtle.ConstantTimeCompare(keyHashGiven[:], keyHashStored[:]) == 1 { + return true + } + } + return false +} + +func (c *DBUser) ValidateAndExtract(key, userIdentifier string) (*models.Principal, error) { + c.lock.RLock() + defer c.lock.RUnlock() + + userId, ok := c.data.IdentifierToId[userIdentifier] + if !ok { + return nil, fmt.Errorf("invalid token") + } + + secureHash, ok := c.data.SecureKeyStorageById[userId] + if !ok { + return nil, fmt.Errorf("invalid token") + } + c.weakHashLock.RLock() + weakHash, ok := c.memoryOnlyData.weakKeyStorageById[userId] + c.weakHashLock.RUnlock() + if ok { + // use the secureHash as salt for the computation of the weaker in-memory + if err := c.validateWeakHash([]byte(key+secureHash), weakHash); err != nil { + return nil, err + } + } else { + if err := c.validateStrongHash(key, secureHash, userId); err != nil { + return nil, err + } + } + + if c.data.Users[userId] != nil && !c.data.Users[userId].Active { + return nil, fmt.Errorf("user deactivated") + } + if _, ok := c.data.UserKeyRevoked[userId]; ok { + return nil, fmt.Errorf("key is revoked") + } + + // Last used time does not have to be exact. If we have multiple concurrent requests for the same + // user, only recording one of them is good enough + if c.data.Users[userId].TryLock() { + c.data.Users[userId].LastUsedAt = time.Now() + c.data.Users[userId].Unlock() + } + + return &models.Principal{Username: userId, UserType: models.UserTypeInputDb}, nil +} + +func (c *DBUser) validateWeakHash(key []byte, weakHash [32]byte) error { + keyHash := sha256.Sum256(key) + if subtle.ConstantTimeCompare(keyHash[:], weakHash[:]) != 1 { + return fmt.Errorf("invalid token") + } + + return nil +} + +func (c *DBUser) validateStrongHash(key, secureHash, userId string) error { + match, err := argon2id.ComparePasswordAndHash(key, secureHash) + if err != nil { + return err + } + if !match { + return fmt.Errorf("invalid token") + } + token := []byte(key + secureHash) + // avoid concurrent writes to map + weakHash := sha256.Sum256(token) + + c.weakHashLock.Lock() + c.memoryOnlyData.weakKeyStorageById[userId] = weakHash + c.weakHashLock.Unlock() + + return nil +} + +func (c *DBUser) Snapshot() ([]byte, error) { + c.lock.Lock() + defer c.lock.Unlock() + + marshal, err := json.Marshal(DBUserSnapshot{Data: c.data, Version: SnapshotVersion}) + if err != nil { + return nil, err + } + return marshal, nil +} + +func (c *DBUser) Restore(snapshot []byte) error { + c.lock.Lock() + defer c.lock.Unlock() + + // don't overwrite with empty snapshot to avoid overwriting recovery from file + // with a non-existent db user snapshot when coming from old versions + if len(snapshot) == 0 { + return nil + } + + snapshotRestore := DBUserSnapshot{} + err := json.Unmarshal(snapshot, &snapshotRestore) + if err != nil { + return err + } + + if snapshotRestore.Version != SnapshotVersion { + return fmt.Errorf("invalid snapshot version") + } + c.data = snapshotRestore.Data + + return nil +} + +func (c *DBUser) storeToFile() error { + data, err := json.Marshal(DBUserSnapshot{Data: c.data, Version: SnapshotVersion}) + if err != nil { + return err + } + + tmpFile, err := os.CreateTemp(c.path, "temp-*.tmp") + if err != nil { + return err + } + tempFilename := tmpFile.Name() + + defer func() { + tmpFile.Close() + os.Remove(tempFilename) // Remove temp file if it still exists + }() + + // Write data to temp file, flush and close + if _, err := tmpFile.Write(data); err != nil { + return err + } + if err := tmpFile.Sync(); err != nil { + return err + } + if err := tmpFile.Close(); err != nil { + return err + } + + // Atomically rename the temp file to the target filename to not leave garbage when it crashes + return os.Rename(tempFilename, c.path+"/"+FileName) +} + +func (c *DBUser) Close() error { + c.lock.Lock() + defer c.lock.Unlock() + return c.storeToFile() +} + +func createStorage(filePath string) error { + if err := os.MkdirAll(filepath.Dir(filePath), os.ModePerm); err != nil { + return fmt.Errorf("failed to create directories: %w", err) + } + + _, err := os.Stat(filePath) + if err == nil { // file exists + return nil + } + + if os.IsNotExist(err) { + file, err := os.Create(filePath) + if err != nil { + return fmt.Errorf("failed to create file: %w", err) + } + defer file.Close() + return nil + } + + return err +} + +func ReadFile(filename string) ([]byte, error) { + file, err := os.Open(filename) + if err != nil { + return nil, err + } + defer file.Close() + + fileInfo, err := file.Stat() + if err != nil { + return nil, err + } + + data := make([]byte, fileInfo.Size()) + + _, err = file.Read(data) + if err != nil { + return nil, err + } + + return data, nil +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/keys/key_generation.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/keys/key_generation.go new file mode 100644 index 0000000000000000000000000000000000000000..d8765fc7cc555a96635c25077c543c1ab4b31b52 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/keys/key_generation.go @@ -0,0 +1,118 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package keys + +import ( + "crypto/rand" + "encoding/base64" + "fmt" + "strings" + + "github.com/alexedwards/argon2id" +) + +const ( + DynUserIdentifier = "v200" // has to be 4 chars so full key is divisible by 3 and Base64Encoding does not end in "=" + RandomBytesLength = 32 + RandomBytesBase64Length = 44 + UserIdentifierBytesLength = 12 + UserIdentifierBytesBase64Length = 16 +) + +// second recommendation from the RFC: https://www.rfc-editor.org/rfc/rfc9106.html#name-parameter-choice +// Changing ANY of these parameters will change the output and make previously generated keys invalid. +var argonParameters = &argon2id.Params{ + Memory: 64 * 1024, + Parallelism: 2, + Iterations: 3, + SaltLength: 16, + KeyLength: 32, +} + +// CreateApiKeyAndHash creates an api key that has three parts: +// 1) an argon hash of a random key with length 32 bytes (Base64 encoded) +// 2) a random user identifier with length 10 bytes (Base64 encoded) +// - this identifier can be used to fetch the hash later +// +// 3) a version string to identify the type of api key +// +// The different parts have "_" as separator (which is not part of Base64) and the combined string is encoded again as +// Base64 to be returned to the user. The apiKey length is divisible by 3 so the Base64Encoding does not end in "=". +// +// To verify that a user provides the correct key the following steps have to be taken: +// - decode the key into the 3 parts mentioned above using DecodeApiKey +// - fetch the saved hash based on the returned user identifier +// - compare the returned randomKey with the fetched hash using argon2id.ComparePasswordAndHash +func CreateApiKeyAndHash() (string, string, string, error) { + randomBytesKey, err := generateRandomBytes(RandomBytesLength) + if err != nil { + return "", "", "", err + } + randomKey := base64.StdEncoding.EncodeToString(randomBytesKey) + + randomBytesIdentifier, err := generateRandomBytes(UserIdentifierBytesLength) + if err != nil { + return "", "", "", err + } + identifier := base64.StdEncoding.EncodeToString(randomBytesIdentifier) + + fullApiKey := generateApiKey(randomKey, identifier) + + hash, err := argon2id.CreateHash(randomKey, argonParameters) + + return fullApiKey, hash, identifier, err +} + +func generateApiKey(randomKey, userIdentifier string) string { + fullString := userIdentifier + "_" + randomKey + "_" + DynUserIdentifier + return base64.StdEncoding.EncodeToString([]byte(fullString)) +} + +func generateRandomBytes(length int) ([]byte, error) { + b := make([]byte, length) + _, err := rand.Read(b) + // Note that err == nil only if we read len(b) bytes. + if err != nil { + return nil, err + } + + return b, nil +} + +func DecodeApiKey(fullApiKey string) (string, string, error) { + decodeString, err := base64.StdEncoding.DecodeString(fullApiKey) + if err != nil { + return "", "", err + } + + parts := strings.Split(string(decodeString), "_") + if len(parts) != 3 { + return "", "", fmt.Errorf("invalid token") + } + + userIdentifier := parts[0] + randomKey := parts[1] + version := parts[2] + if version != DynUserIdentifier { + return "", "", fmt.Errorf("invalid token") + } + + if len(userIdentifier) != UserIdentifierBytesBase64Length { + return "", "", fmt.Errorf("invalid token") + } + + if len(randomKey) != RandomBytesBase64Length { + return "", "", fmt.Errorf("invalid token") + } + + return randomKey, userIdentifier, nil +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/keys/key_generation_test.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/keys/key_generation_test.go new file mode 100644 index 0000000000000000000000000000000000000000..88421ff0065cfd6a288b985f5f70bf629e84a624 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/keys/key_generation_test.go @@ -0,0 +1,69 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package keys + +import ( + "encoding/base64" + "strings" + "testing" + + "github.com/alexedwards/argon2id" + "github.com/stretchr/testify/require" +) + +func TestKeyGeneration(t *testing.T) { + fullApiKey, hash, userIdentifier, err := CreateApiKeyAndHash() + require.NoError(t, err) + + randomKey, userIdentifierDecoded, err := DecodeApiKey(fullApiKey) + require.NoError(t, err) + require.Equal(t, userIdentifier, userIdentifierDecoded) + + match, err := argon2id.ComparePasswordAndHash(randomKey, hash) + require.NoError(t, err) + require.True(t, match) +} + +func TestInvalidKeys(t *testing.T) { + randomKeyDummy := strings.Repeat("A", RandomBytesBase64Length) + randomIdentifierDummy := strings.Repeat("A", UserIdentifierBytesBase64Length) + + combiner := func(parts ...string) string { + return strings.Join(parts, "_") + } + + tests := []struct { + name string + key string + error bool + }{ + {name: "valid", key: combiner(randomIdentifierDummy, randomKeyDummy, DynUserIdentifier), error: false}, + {name: "invalid base64", key: "i am a string that is not base64", error: true}, + {name: "invalid version", key: combiner(randomIdentifierDummy, randomKeyDummy, "v123"), error: true}, + {name: "missing part", key: combiner(randomIdentifierDummy, randomKeyDummy), error: true}, + {name: "invalid randomKey", key: combiner(randomIdentifierDummy, randomKeyDummy[:5], DynUserIdentifier), error: true}, + {name: "invalid identifier", key: combiner(randomIdentifierDummy[:5], randomKeyDummy, DynUserIdentifier), error: true}, + {name: "all wrong", key: combiner(randomIdentifierDummy[:5], randomKeyDummy[:5], "v123"), error: true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + encodedKey := base64.StdEncoding.EncodeToString([]byte(tt.key)) + _, _, err := DecodeApiKey(encodedKey) + if tt.error { + require.Error(t, err) + } else { + require.NoError(t, err) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/remote.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/remote.go new file mode 100644 index 0000000000000000000000000000000000000000..54e8da10115206a476a3837a34f9db407da3bf66 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/remote.go @@ -0,0 +1,57 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package apikey + +import ( + "context" + "time" +) + +type RemoteApiKey struct { + apikey *DBUser +} + +func NewRemoteApiKey(apikey *ApiKey) *RemoteApiKey { + return &RemoteApiKey{apikey: apikey.Dynamic} +} + +func (r *RemoteApiKey) GetUserStatus(ctx context.Context, users UserStatusRequest) (*UserStatusResponse, error) { + r.apikey.UpdateLastUsedTimestamp(users.Users) + + if !users.ReturnStatus { + return nil, nil + } + + userIds := make([]string, 0, len(users.Users)) + for userId := range users.Users { + userIds = append(userIds, userId) + } + userReturns, err := r.apikey.GetUsers(userIds...) + if err != nil { + return nil, err + } + + ret := make(map[string]time.Time, len(userReturns)) + for _, userReturn := range userReturns { + ret[userReturn.Id] = userReturn.LastUsedAt + } + return &UserStatusResponse{Users: ret}, nil +} + +type UserStatusResponse struct { + Users map[string]time.Time +} + +type UserStatusRequest struct { + Users map[string]time.Time + ReturnStatus bool +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/wrapper.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/wrapper.go new file mode 100644 index 0000000000000000000000000000000000000000..b38b089a227410eb972ae2bbee45c1d80d99f1f5 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/wrapper.go @@ -0,0 +1,77 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package apikey + +import ( + "fmt" + + "github.com/go-openapi/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/usecases/auth/authentication/apikey/keys" + "github.com/weaviate/weaviate/usecases/config" +) + +type ApiKey struct { + static *StaticApiKey + Dynamic *DBUser +} + +func New(cfg config.Config, logger logrus.FieldLogger) (*ApiKey, error) { + static, err := NewStatic(cfg) + if err != nil { + return nil, err + } + dynamic, err := NewDBUser(cfg.Persistence.DataPath, cfg.Authentication.DBUsers.Enabled, logger) + if err != nil { + return nil, err + } + + return &ApiKey{ + static: static, + Dynamic: dynamic, + }, nil +} + +func (a *ApiKey) ValidateAndExtract(token string, scopes []string) (*models.Principal, error) { + validate := func(token string, scopes []string) (*models.Principal, error) { + if a.Dynamic.enabled { + if randomKey, userIdentifier, err := keys.DecodeApiKey(token); err == nil { + principal, err := a.Dynamic.ValidateAndExtract(randomKey, userIdentifier) + if err != nil { + return nil, fmt.Errorf("invalid api key: %w", err) + } + return principal, nil + } + principal, err := a.Dynamic.ValidateImportedKey(token) + if err != nil { + return nil, fmt.Errorf("invalid api key: %w", err) + } + if principal != nil { + return principal, nil + } else if a.Dynamic.IsBlockedKey(token) { + // make sure static keys do not work after import and key rotation + return nil, fmt.Errorf("invalid api key") + } + } + if a.static.config.Enabled { + return a.static.ValidateAndExtract(token, scopes) + } + return nil, fmt.Errorf("invalid api key") + } + + principal, err := validate(token, scopes) + if err != nil { + return nil, errors.New(401, "unauthorized: %v", err) + } + return principal, nil +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/wrapper_test.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/wrapper_test.go new file mode 100644 index 0000000000000000000000000000000000000000..b9302100d0296e65663acad0d37d1e2f73c41f38 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/apikey/wrapper_test.go @@ -0,0 +1,148 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package apikey + +import ( + "testing" + "time" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/usecases/auth/authentication/apikey/keys" + "github.com/weaviate/weaviate/usecases/config" + + "github.com/sirupsen/logrus/hooks/test" +) + +func TestInvalidApiKey(t *testing.T) { + t.Parallel() + + logger, _ := test.NewNullLogger() + + testCases := []struct { + staticEnabled bool + dbEnabled bool + }{ + {staticEnabled: true, dbEnabled: false}, + {staticEnabled: false, dbEnabled: true}, + {staticEnabled: true, dbEnabled: true}, + } + + for _, testCase := range testCases { + t.Run("staticEnabled="+boolToStr(testCase.staticEnabled)+",dbEnabled="+boolToStr(testCase.dbEnabled), func(t *testing.T) { + conf := config.Config{ + Persistence: config.Persistence{DataPath: t.TempDir()}, + Authentication: config.Authentication{ + DBUsers: config.DbUsers{Enabled: testCase.dbEnabled}, + APIKey: config.StaticAPIKey{Enabled: testCase.staticEnabled, AllowedKeys: []string{"valid-key"}, Users: []string{"user1"}}, + }, + } + wrapper, err := New(conf, logger) + require.NoError(t, err) + + _, err = wrapper.ValidateAndExtract("invalid-key", nil) + require.Error(t, err) + require.Contains(t, err.Error(), "unauthorized: invalid api key") + }) + } +} + +func TestValidStaticKey(t *testing.T) { + t.Parallel() + + logger, _ := test.NewNullLogger() + + testCases := []struct { + staticEnabled bool + dbEnabled bool + expectError bool + }{ + {staticEnabled: true, dbEnabled: false, expectError: false}, + {staticEnabled: false, dbEnabled: true, expectError: true}, + {staticEnabled: true, dbEnabled: true, expectError: false}, + } + + for _, testCase := range testCases { + t.Run("staticEnabled="+boolToStr(testCase.staticEnabled)+",dbEnabled="+boolToStr(testCase.dbEnabled), func(t *testing.T) { + conf := config.Config{ + Persistence: config.Persistence{DataPath: t.TempDir()}, + Authentication: config.Authentication{ + DBUsers: config.DbUsers{Enabled: testCase.dbEnabled}, + APIKey: config.StaticAPIKey{Enabled: testCase.staticEnabled, AllowedKeys: []string{"valid-key"}, Users: []string{"user1"}}, + }, + } + wrapper, err := New(conf, logger) + require.NoError(t, err) + + principal, err := wrapper.ValidateAndExtract("valid-key", nil) + if testCase.expectError { + require.Error(t, err) + require.Contains(t, err.Error(), "unauthorized: invalid api key") + } else { + require.NoError(t, err) + require.NotNil(t, principal) + } + }) + } +} + +func TestValidDynamicKey(t *testing.T) { + t.Parallel() + + logger, _ := test.NewNullLogger() + + testCases := []struct { + staticEnabled bool + dbEnabled bool + expectError bool + }{ + {staticEnabled: true, dbEnabled: false, expectError: true}, + {staticEnabled: false, dbEnabled: true, expectError: false}, + {staticEnabled: true, dbEnabled: true, expectError: false}, + } + + for _, testCase := range testCases { + t.Run("staticEnabled="+boolToStr(testCase.staticEnabled)+",dbEnabled="+boolToStr(testCase.dbEnabled), func(t *testing.T) { + conf := config.Config{ + Persistence: config.Persistence{DataPath: t.TempDir()}, + Authentication: config.Authentication{ + DBUsers: config.DbUsers{Enabled: testCase.dbEnabled}, + APIKey: config.StaticAPIKey{Enabled: testCase.staticEnabled, AllowedKeys: []string{"valid-key"}, Users: []string{"user1"}}, + }, + } + wrapper, err := New(conf, logger) + require.NoError(t, err) + + userId := "id" + + apiKey, hash, identifier, err := keys.CreateApiKeyAndHash() + require.NoError(t, err) + + require.NoError(t, wrapper.Dynamic.CreateUser(userId, hash, identifier, "", time.Now())) + + principal, err := wrapper.ValidateAndExtract(apiKey, nil) + if testCase.expectError { + require.Error(t, err) + require.Contains(t, err.Error(), "unauthorized: invalid api key") + } else { + require.NoError(t, err) + require.NotNil(t, principal) + } + }) + } +} + +func boolToStr(enabled bool) string { + if enabled { + return "true" + } + return "false" +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/authentication.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/authentication.go new file mode 100644 index 0000000000000000000000000000000000000000..e466edcb483582c0b7ac72e0f14d904b7d68e01a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/authentication.go @@ -0,0 +1,19 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authentication + +type AuthType string + +const ( + AuthTypeDb AuthType = "db" + AuthTypeOIDC AuthType = "oidc" +) diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/composer/token_validation.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/composer/token_validation.go new file mode 100644 index 0000000000000000000000000000000000000000..22775a2634e286845305cd7f23e72f761543198d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/composer/token_validation.go @@ -0,0 +1,61 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package composer + +import ( + "github.com/golang-jwt/jwt/v4" + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/usecases/config" +) + +type TokenFunc func(token string, scopes []string) (*models.Principal, error) + +// New provides an OpenAPI compatible token validation +// function that validates the token either as OIDC or as an StaticAPIKey token +// depending on which is configured. If both are configured, the scheme is +// figured out at runtime. +func New(config config.Authentication, + apikey authValidator, oidc authValidator, +) TokenFunc { + if config.AnyApiKeyAvailable() && config.OIDC.Enabled { + return pickAuthSchemeDynamically(apikey, oidc) + } + + if config.AnyApiKeyAvailable() { + return apikey.ValidateAndExtract + } + + // default to OIDC, even if no scheme is enabled, then it can deal with this + // scenario itself. This is the backward-compatible scenario. + return oidc.ValidateAndExtract +} + +func pickAuthSchemeDynamically( + apiKey authValidator, oidc authValidator, +) TokenFunc { + return func(token string, scopes []string) (*models.Principal, error) { + _, err := jwt.Parse(token, func(t *jwt.Token) (interface{}, error) { + return nil, nil + }) + + if err != nil && errors.Is(err, jwt.ErrTokenMalformed) { + return apiKey.ValidateAndExtract(token, scopes) + } + + return oidc.ValidateAndExtract(token, scopes) + } +} + +type authValidator interface { + ValidateAndExtract(token string, scopes []string) (*models.Principal, error) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/composer/token_validation_test.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/composer/token_validation_test.go new file mode 100644 index 0000000000000000000000000000000000000000..8a42c8620c3843e15d07c6bc754ea0b52de40722 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/composer/token_validation_test.go @@ -0,0 +1,240 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package composer + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_TokenAuthComposer(t *testing.T) { + type test struct { + name string + token string + config config.Authentication + oidc TokenFunc + apiKey TokenFunc + expectErr bool + expectErrMsg string + } + + tests := []test{ + { + name: "everything disabled - pass to oidc provider (backward compat)", + config: config.Authentication{ + OIDC: config.OIDC{ + Enabled: false, + }, + APIKey: config.StaticAPIKey{ + Enabled: false, + }, + }, + token: "does not matter", + apiKey: func(t string, s []string) (*models.Principal, error) { + panic("i should never be called") + }, + oidc: func(t string, s []string) (*models.Principal, error) { + return nil, nil + }, + expectErr: false, + }, + { + name: "everything disabled - pass to oidc provider fail", + config: config.Authentication{ + OIDC: config.OIDC{ + Enabled: false, + }, + APIKey: config.StaticAPIKey{ + Enabled: false, + }, + }, + token: "does not matter", + apiKey: func(t string, s []string) (*models.Principal, error) { + panic("i should never be called") + }, + oidc: func(t string, s []string) (*models.Principal, error) { + return nil, fmt.Errorf("oidc says nope!") + }, + expectErr: true, + expectErrMsg: "oidc says nope!", + }, + { + name: "only oidc enabled, returns success", + config: config.Authentication{ + OIDC: config.OIDC{ + Enabled: true, + }, + APIKey: config.StaticAPIKey{ + Enabled: false, + }, + }, + token: "does not matter", + apiKey: func(t string, s []string) (*models.Principal, error) { + panic("i should never be called") + }, + oidc: func(t string, s []string) (*models.Principal, error) { + return nil, nil + }, + expectErr: false, + }, + { + name: "only oidc enabled, returns no success", + config: config.Authentication{ + OIDC: config.OIDC{ + Enabled: true, + }, + APIKey: config.StaticAPIKey{ + Enabled: false, + }, + }, + token: "does not matter", + apiKey: func(t string, s []string) (*models.Principal, error) { + panic("i should never be called") + }, + oidc: func(t string, s []string) (*models.Principal, error) { + return nil, fmt.Errorf("thou shalt not pass") + }, + expectErr: true, + expectErrMsg: "thou shalt not pass", + }, + { + name: "only apiKey enabled, returns success", + config: config.Authentication{ + OIDC: config.OIDC{ + Enabled: false, + }, + APIKey: config.StaticAPIKey{ + Enabled: true, + }, + }, + token: "does not matter", + apiKey: func(t string, s []string) (*models.Principal, error) { + return nil, nil + }, + oidc: func(t string, s []string) (*models.Principal, error) { + panic("i should never be called") + }, + expectErr: false, + }, + { + name: "only apiKey enabled, returns no success", + config: config.Authentication{ + OIDC: config.OIDC{ + Enabled: false, + }, + APIKey: config.StaticAPIKey{ + Enabled: true, + }, + }, + token: "does not matter", + apiKey: func(t string, s []string) (*models.Principal, error) { + return nil, fmt.Errorf("you think I let anyone through?") + }, + oidc: func(t string, s []string) (*models.Principal, error) { + panic("i should never be called") + }, + expectErr: true, + expectErrMsg: "you think I let anyone through?", + }, + { + name: "both an enabled, with an 'obvious' api key", + config: config.Authentication{ + OIDC: config.OIDC{ + Enabled: true, + }, + APIKey: config.StaticAPIKey{ + Enabled: true, + }, + }, + token: "does not matter", + apiKey: func(t string, s []string) (*models.Principal, error) { + return nil, fmt.Errorf("it's a pretty key, but not good enough") + }, + oidc: func(t string, s []string) (*models.Principal, error) { + panic("i should never be called") + }, + expectErr: true, + expectErrMsg: "it's a pretty key, but not good enough", + }, + { + name: "both an enabled, empty token", + config: config.Authentication{ + OIDC: config.OIDC{ + Enabled: true, + }, + APIKey: config.StaticAPIKey{ + Enabled: true, + }, + }, + token: "", + apiKey: func(t string, s []string) (*models.Principal, error) { + return nil, fmt.Errorf("really? an empty one?") + }, + oidc: func(t string, s []string) (*models.Principal, error) { + panic("i should never be called") + }, + expectErr: true, + expectErrMsg: "empty one", + }, + { + name: "both an enabled, jwt token", + config: config.Authentication{ + OIDC: config.OIDC{ + Enabled: true, + }, + APIKey: config.StaticAPIKey{ + Enabled: true, + }, + }, + token: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c", + apiKey: func(t string, s []string) (*models.Principal, error) { + panic("i should never be called") + }, + oidc: func(t string, s []string) (*models.Principal, error) { + return nil, fmt.Errorf("john doe ... that sounds like a fake name!") + }, + expectErr: true, + expectErrMsg: "john doe", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + v := New( + test.config, + fakeValidator{v: test.apiKey}, + fakeValidator{v: test.oidc}, + ) + _, err := v(test.token, nil) + if test.expectErr { + require.NotNil(t, err) + assert.Contains(t, err.Error(), test.expectErrMsg) + return + } + + require.Nil(t, err) + }) + } +} + +type fakeValidator struct { + v TokenFunc +} + +func (v fakeValidator) ValidateAndExtract(t string, s []string) (*models.Principal, error) { + return v.v(t, s) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/oidc/middleware.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/oidc/middleware.go new file mode 100644 index 0000000000000000000000000000000000000000..699acf4862013b49ad58d11e0ffd1e9d4b2eb6a2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/oidc/middleware.go @@ -0,0 +1,298 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package oidc + +import ( + "bytes" + "context" + "crypto/tls" + "crypto/x509" + "encoding/pem" + "fmt" + "io" + "net/http" + "os" + "strings" + + "github.com/coreos/go-oidc/v3/oidc" + errors "github.com/go-openapi/errors" + "github.com/minio/minio-go/v7" + "github.com/minio/minio-go/v7/pkg/credentials" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/usecases/config" +) + +// Client handles the OIDC setup at startup and provides a middleware to be +// used with the goswagger API +type Client struct { + Config config.OIDC + verifier *oidc.IDTokenVerifier + logger logrus.FieldLogger +} + +// New OIDC Client: It tries to retrieve the JWKs at startup (or fails), it +// provides a middleware which can be used at runtime with a go-swagger style +// API +func New(cfg config.Config, logger logrus.FieldLogger) (*Client, error) { + client := &Client{ + Config: cfg.Authentication.OIDC, + logger: logger.WithField("component", "oidc"), + } + + if !client.Config.Enabled { + // if oidc is not enabled, we are done, no need to setup an actual client. + // The "disabled" client is however still valuable to deny any requests + // coming in with an OAuth token set. + return client, nil + } + + if err := client.Init(); err != nil { + return nil, fmt.Errorf("oidc init: %w", err) + } + + return client, nil +} + +func (c *Client) Init() error { + if err := c.validateConfig(); err != nil { + return fmt.Errorf("invalid config: %w", err) + } + c.logger.WithField("action", "oidc_init").Info("validated OIDC configuration") + + ctx := context.Background() + if c.Config.Certificate.Get() != "" { + client, err := c.useCertificate() + if err != nil { + return fmt.Errorf("could not setup client with custom certificate: %w", err) + } + ctx = oidc.ClientContext(ctx, client) + c.logger.WithField("action", "oidc_init").Info("configured OIDC client with custom certificate") + } + + if c.Config.JWKSUrl.Get() != "" { + keySet := oidc.NewRemoteKeySet(ctx, c.Config.JWKSUrl.Get()) + verifier := oidc.NewVerifier(c.Config.Issuer.Get(), keySet, &oidc.Config{ + ClientID: c.Config.ClientID.Get(), + SkipClientIDCheck: c.Config.SkipClientIDCheck.Get(), + }) + c.verifier = verifier + c.logger.WithField("action", "oidc_init").WithField("jwks_url", c.Config.JWKSUrl.Get()).Info("configured OIDC verifier") + } else { + provider, err := oidc.NewProvider(ctx, c.Config.Issuer.Get()) + if err != nil { + return fmt.Errorf("could not setup provider: %w", err) + } + c.logger.WithField("action", "oidc_init").Info("configured OIDC provider") + + // oauth2 + + verifier := provider.Verifier(&oidc.Config{ + ClientID: c.Config.ClientID.Get(), + SkipClientIDCheck: c.Config.SkipClientIDCheck.Get(), + }) + c.verifier = verifier + c.logger.WithField("action", "oidc_init").Info("configured OIDC verifier") + } + + return nil +} + +func (c *Client) validateConfig() error { + var msgs []string + + if c.Config.Issuer.Get() == "" { + msgs = append(msgs, "missing required field 'issuer'") + } + + if c.Config.UsernameClaim.Get() == "" { + msgs = append(msgs, "missing required field 'username_claim'") + } + + if !c.Config.SkipClientIDCheck.Get() && c.Config.ClientID.Get() == "" { + msgs = append(msgs, "missing required field 'client_id': "+ + "either set a client_id or explicitly disable the check with 'skip_client_id_check: true'") + } + + if len(msgs) == 0 { + return nil + } + + return fmt.Errorf("%v", strings.Join(msgs, ", ")) +} + +// ValidateAndExtract can be used as a middleware for go-swagger +func (c *Client) ValidateAndExtract(token string, scopes []string) (*models.Principal, error) { + if !c.Config.Enabled { + return nil, errors.New(401, "oidc auth is not configured, please try another auth scheme or set up weaviate with OIDC configured") + } + + parsed, err := c.verifier.Verify(context.Background(), token) + if err != nil { + return nil, errors.New(401, "unauthorized: %v", err) + } + + claims, err := c.extractClaims(parsed) + if err != nil { + return nil, errors.New(500, "oidc: %v", err) + } + + username, err := c.extractUsername(claims) + if err != nil { + return nil, errors.New(500, "oidc: %v", err) + } + + groups := c.extractGroups(claims) + + return &models.Principal{ + Username: username, + Groups: groups, + UserType: models.UserTypeInputOidc, + }, nil +} + +func (c *Client) extractClaims(token *oidc.IDToken) (map[string]interface{}, error) { + var claims map[string]interface{} + if err := token.Claims(&claims); err != nil { + return nil, fmt.Errorf("could not extract claims from token: %w", err) + } + + return claims, nil +} + +func (c *Client) extractUsername(claims map[string]interface{}) (string, error) { + usernameUntyped, ok := claims[c.Config.UsernameClaim.Get()] + if !ok { + return "", fmt.Errorf("token doesn't contain required claim '%s'", c.Config.UsernameClaim.Get()) + } + + username, ok := usernameUntyped.(string) + if !ok { + return "", fmt.Errorf("claim '%s' is not a string, but %T", c.Config.UsernameClaim.Get(), usernameUntyped) + } + + return username, nil +} + +// extractGroups never errors, if groups can't be parsed an empty set of groups +// is returned. This is because groups are not a required standard in the OIDC +// spec, so we can't error if an OIDC provider does not support them. +func (c *Client) extractGroups(claims map[string]interface{}) []string { + var groups []string + + groupsUntyped, ok := claims[c.Config.GroupsClaim.Get()] + if !ok { + return groups + } + + groupsSlice, ok := groupsUntyped.([]interface{}) + if !ok { + groupAsString, ok := groupsUntyped.(string) + if ok { + return []string{groupAsString} + } + return groups + } + + for _, untyped := range groupsSlice { + if group, ok := untyped.(string); ok { + groups = append(groups, group) + } + } + + return groups +} + +func (c *Client) useCertificate() (*http.Client, error) { + var certificate, certificateSource string + if strings.HasPrefix(c.Config.Certificate.Get(), "http") { + resp, err := http.Get(c.Config.Certificate.Get()) + if err != nil { + return nil, fmt.Errorf("failed to get certificate from %s: %w", c.Config.Certificate.Get(), err) + } + defer resp.Body.Close() + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("failed to download certificate from %s: http status: %v", c.Config.Certificate.Get(), resp.StatusCode) + } + certBytes, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("failed to read certificate from %s: %w", c.Config.Certificate.Get(), err) + } + certificate = string(certBytes) + certificateSource = c.Config.Certificate.Get() + } else if strings.HasPrefix(c.Config.Certificate.Get(), "s3://") { + parts := strings.TrimPrefix(c.Config.Certificate.Get(), "s3://") + segments := strings.SplitN(parts, "/", 2) + if len(segments) != 2 { + return nil, fmt.Errorf("invalid S3 URI, must contain bucket and key: %s", c.Config.Certificate.Get()) + } + region := os.Getenv("AWS_REGION") + if region == "" { + region = os.Getenv("AWS_DEFAULT_REGION") + } + creds := credentials.NewIAM("") + // check if we are able to get the credentials using AWS IAM + if _, err := creds.GetWithContext(nil); err != nil { + // if IAM doesn't work, check environment settings for creds, set anonymous access if none found + creds = credentials.NewEnvAWS() + } + bucketName, objectKey := segments[0], segments[1] + minioClient, err := minio.New("s3.amazonaws.com", &minio.Options{ + Creds: creds, + Secure: true, + Region: region, + }) + if err != nil { + return nil, fmt.Errorf("failed to create S3 client: %w", err) + } + object, err := minioClient.GetObject(context.Background(), bucketName, objectKey, minio.GetObjectOptions{}) + if err != nil { + return nil, fmt.Errorf("failed to get certificate from: %s: %w", c.Config.Certificate.Get(), err) + } + defer object.Close() + var content bytes.Buffer + if _, err := io.Copy(&content, object); err != nil { + return nil, fmt.Errorf("failed to read certificate from %s: %w", c.Config.Certificate.Get(), err) + } + certificate = content.String() + certificateSource = c.Config.Certificate.Get() + } else { + certificate = c.Config.Certificate.Get() + certificateSource = "environment variable" + } + + certBlock, _ := pem.Decode([]byte(certificate)) + if certBlock == nil || len(certBlock.Bytes) == 0 { + return nil, fmt.Errorf("failed to decode certificate") + } + cert, err := x509.ParseCertificate(certBlock.Bytes) + if err != nil { + return nil, fmt.Errorf("failed to parse certificate: %w", err) + } + + c.logger.WithField("action", "oidc_init").WithField("source", certificateSource).Info("custom certificate is valid") + + certPool := x509.NewCertPool() + certPool.AddCert(cert) + + // Create an HTTP client with self signed certificate + client := &http.Client{ + Transport: &http.Transport{ + TLSClientConfig: &tls.Config{ + RootCAs: certPool, + MinVersion: tls.VersionTLS12, + }, + }, + } + + return client, nil +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/oidc/middleware_test.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/oidc/middleware_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ba4b7f483d43ddb07147bef7b0d45121d4eeaa60 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/oidc/middleware_test.go @@ -0,0 +1,279 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package oidc + +import ( + "fmt" + "testing" + "time" + + errors "github.com/go-openapi/errors" + "github.com/golang-jwt/jwt/v4" + "github.com/sirupsen/logrus" + logrustest "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/config/runtime" +) + +func Test_Middleware_NotConfigured(t *testing.T) { + cfg := config.Config{ + Authentication: config.Authentication{ + OIDC: config.OIDC{ + Enabled: false, + }, + }, + } + expectedErr := errors.New(401, "oidc auth is not configured, please try another auth scheme or set up weaviate with OIDC configured") + + logger, _ := logrustest.NewNullLogger() + client, err := New(cfg, logger) + require.Nil(t, err) + + principal, err := client.ValidateAndExtract("token-doesnt-matter", []string{}) + assert.Nil(t, principal) + assert.Equal(t, expectedErr, err) +} + +func Test_Middleware_IncompleteConfiguration(t *testing.T) { + cfg := config.Config{ + Authentication: config.Authentication{ + OIDC: config.OIDC{ + Enabled: true, + }, + }, + } + expectedErr := fmt.Errorf("oidc init: invalid config: missing required field 'issuer', " + + "missing required field 'username_claim', missing required field 'client_id': either set a client_id or explicitly disable the check with 'skip_client_id_check: true'") + + logger, _ := logrustest.NewNullLogger() + _, err := New(cfg, logger) + assert.ErrorAs(t, err, &expectedErr) +} + +type claims struct { + jwt.StandardClaims + Email string `json:"email"` + Groups []string `json:"groups"` + GroupAsString string `json:"group_as_string"` +} + +func Test_Middleware_WithValidToken(t *testing.T) { + t.Run("without groups set", func(t *testing.T) { + server := newOIDCServer(t) + defer server.Close() + + cfg := config.Config{ + Authentication: config.Authentication{ + OIDC: config.OIDC{ + Enabled: true, + Issuer: runtime.NewDynamicValue(server.URL), + ClientID: runtime.NewDynamicValue("best_client"), + SkipClientIDCheck: runtime.NewDynamicValue(false), + UsernameClaim: runtime.NewDynamicValue("sub"), + }, + }, + } + + token := token(t, "best-user", server.URL, "best_client") + logger, _ := logrustest.NewNullLogger() + client, err := New(cfg, logger) + require.Nil(t, err) + + principal, err := client.ValidateAndExtract(token, []string{}) + require.Nil(t, err) + assert.Equal(t, "best-user", principal.Username) + }) + + t.Run("with a non-standard username claim", func(t *testing.T) { + server := newOIDCServer(t) + defer server.Close() + + cfg := config.Config{ + Authentication: config.Authentication{ + OIDC: config.OIDC{ + Enabled: true, + Issuer: runtime.NewDynamicValue(server.URL), + ClientID: runtime.NewDynamicValue("best_client"), + SkipClientIDCheck: runtime.NewDynamicValue(false), + UsernameClaim: runtime.NewDynamicValue("email"), + GroupsClaim: runtime.NewDynamicValue("groups"), + }, + }, + } + + token := tokenWithEmail(t, "best-user", server.URL, "best_client", "foo@bar.com") + logger, _ := logrustest.NewNullLogger() + client, err := New(cfg, logger) + require.Nil(t, err) + + principal, err := client.ValidateAndExtract(token, []string{}) + require.Nil(t, err) + assert.Equal(t, "foo@bar.com", principal.Username) + }) + + t.Run("with groups claim", func(t *testing.T) { + server := newOIDCServer(t) + defer server.Close() + + cfg := config.Config{ + Authentication: config.Authentication{ + OIDC: config.OIDC{ + Enabled: true, + Issuer: runtime.NewDynamicValue(server.URL), + ClientID: runtime.NewDynamicValue("best_client"), + SkipClientIDCheck: runtime.NewDynamicValue(false), + UsernameClaim: runtime.NewDynamicValue("sub"), + GroupsClaim: runtime.NewDynamicValue("groups"), + }, + }, + } + + token := tokenWithGroups(t, "best-user", server.URL, "best_client", []string{"group1", "group2"}) + logger, _ := logrustest.NewNullLogger() + client, err := New(cfg, logger) + require.Nil(t, err) + + principal, err := client.ValidateAndExtract(token, []string{}) + require.Nil(t, err) + assert.Equal(t, "best-user", principal.Username) + assert.Equal(t, []string{"group1", "group2"}, principal.Groups) + }) + + t.Run("with a string groups claim", func(t *testing.T) { + server := newOIDCServer(t) + defer server.Close() + + cfg := config.Config{ + Authentication: config.Authentication{ + OIDC: config.OIDC{ + Enabled: true, + Issuer: runtime.NewDynamicValue(server.URL), + ClientID: runtime.NewDynamicValue("best_client"), + SkipClientIDCheck: runtime.NewDynamicValue(false), + UsernameClaim: runtime.NewDynamicValue("sub"), + GroupsClaim: runtime.NewDynamicValue("group_as_string"), + }, + }, + } + + token := tokenWithStringGroups(t, "best-user", server.URL, "best_client", "group1") + logger, _ := logrustest.NewNullLogger() + client, err := New(cfg, logger) + require.Nil(t, err) + + principal, err := client.ValidateAndExtract(token, []string{}) + require.Nil(t, err) + assert.Equal(t, "best-user", principal.Username) + assert.Equal(t, []string{"group1"}, principal.Groups) + }) +} + +func token(t *testing.T, subject string, issuer string, aud string) string { + return tokenWithEmail(t, subject, issuer, aud, "") +} + +func tokenWithEmail(t *testing.T, subject string, issuer string, aud string, email string) string { + claims := claims{ + Email: email, + } + + return tokenWithClaims(t, subject, issuer, aud, claims) +} + +func tokenWithGroups(t *testing.T, subject string, issuer string, aud string, groups []string) string { + claims := claims{ + Groups: groups, + } + + return tokenWithClaims(t, subject, issuer, aud, claims) +} + +func tokenWithStringGroups(t *testing.T, subject string, issuer string, aud string, groups string) string { + claims := claims{ + GroupAsString: groups, + } + + return tokenWithClaims(t, subject, issuer, aud, claims) +} + +func tokenWithClaims(t *testing.T, subject string, issuer string, aud string, claims claims) string { + claims.StandardClaims = jwt.StandardClaims{ + Subject: subject, + Issuer: issuer, + Audience: aud, + ExpiresAt: time.Now().Add(10 * time.Second).Unix(), + } + + token, err := signToken(claims) + require.Nil(t, err, "signing token should not error") + + return token +} + +func Test_Middleware_CertificateDownload(t *testing.T) { + newClientWithCertificate := func(certificate string) (*Client, *logrustest.Hook) { + logger, loggerHook := logrustest.NewNullLogger() + logger.SetLevel(logrus.InfoLevel) + cfg := config.Config{ + Authentication: config.Authentication{ + OIDC: config.OIDC{ + Enabled: true, + Certificate: runtime.NewDynamicValue(certificate), + }, + }, + } + client := &Client{ + Config: cfg.Authentication.OIDC, + logger: logger.WithField("component", "oidc"), + } + return client, loggerHook + } + + verifyLogs := func(t *testing.T, loggerHook *logrustest.Hook, certificateSource string) { + for _, logEntry := range loggerHook.AllEntries() { + assert.Contains(t, logEntry.Message, "custom certificate is valid") + assert.Contains(t, logEntry.Data["source"], certificateSource) + assert.Contains(t, logEntry.Data["action"], "oidc_init") + assert.Contains(t, logEntry.Data["component"], "oidc") + } + } + + t.Run("certificate string", func(t *testing.T) { + client, loggerHook := newClientWithCertificate(testingCertificate) + clientWithCertificate, err := client.useCertificate() + require.NoError(t, err) + require.NotNil(t, clientWithCertificate) + verifyLogs(t, loggerHook, "environment variable") + }) + + t.Run("certificate URL", func(t *testing.T) { + certificateServer := newServerWithCertificate() + defer certificateServer.Close() + source := certificateURL(certificateServer) + client, loggerHook := newClientWithCertificate(source) + clientWithCertificate, err := client.useCertificate() + require.NoError(t, err) + require.NotNil(t, clientWithCertificate) + verifyLogs(t, loggerHook, source) + }) + + t.Run("unparseable string", func(t *testing.T) { + client, _ := newClientWithCertificate("unparseable") + clientWithCertificate, err := client.useCertificate() + require.Nil(t, clientWithCertificate) + require.Error(t, err) + assert.ErrorContains(t, err, "failed to decode certificate") + }) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/oidc/oidc_server_for_test.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/oidc/oidc_server_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..9a764df10809f98ba72b141a0b61ad370276fcd2 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/oidc/oidc_server_for_test.go @@ -0,0 +1,115 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package oidc + +import ( + "encoding/json" + "fmt" + "net/http" + "net/http/httptest" + "testing" + + jose "github.com/go-jose/go-jose/v4" + "github.com/golang-jwt/jwt/v4" +) + +func newOIDCServer(t *testing.T) *httptest.Server { + // we need to start up with an empty handler + s := httptest.NewServer(nil) + + // so that we can configure it once we now the url, this is used to match the + // issue field + s.Config.Handler = oidcHandler(t, s.URL) + return s +} + +type oidcDiscovery struct { + Issuer string `json:"issuer"` + JWKSUri string `json:"jwks_uri"` +} + +type jwksResponse struct { + Keys []jose.JSONWebKey `json:"keys"` +} + +func oidcHandler(t *testing.T, url string) http.Handler { + mux := http.NewServeMux() + + publicKey, err := jwt.ParseRSAPublicKeyFromPEM([]byte(testingPublicKey)) + if err != nil { + t.Fatalf("test server: couldn't parse public key: %v", err) + } + + mux.HandleFunc("/.well-known/openid-configuration", func(w http.ResponseWriter, req *http.Request) { + w.Header().Add("Content-Type", "application/json") + d := oidcDiscovery{ + Issuer: url, + JWKSUri: fmt.Sprintf("%v/.well-known/jwks", url), + } + json.NewEncoder(w).Encode(d) + }) + + mux.HandleFunc("/.well-known/jwks", func(w http.ResponseWriter, req *http.Request) { + w.Header().Add("Content-Type", "application/json") + d := jwksResponse{ + Keys: []jose.JSONWebKey{ + { + Key: publicKey, + Use: "sig", + Algorithm: string(jose.RS256), + KeyID: "my-key", + }, + }, + } + if err := json.NewEncoder(w).Encode(d); err != nil { + t.Fatalf("encoding jwks in test server: %v", err) + } + }) + + return mux +} + +// those keys are intended to make it possible to sign our own tokens in tests. +// Never use these keys for anything outside a test scenario! + +var testingPrivateKey = `-----BEGIN RSA PRIVATE KEY----- +MIICXAIBAAKBgQDFRV9sD1ULVV7q1w9OXCXPTFRcrTYAZAVZwg8X9V1QyBd8eyp5 +OMI4YxuL7sk+Las+PTcS6AdrHitdDZNqUjWFYOo5EQLnVBghIlu3ZWlAnM2SCPo5 +e2jFD8IgAVHtkAHbFUliQtP6a6OOLMRq9GMhIv2ZWf79KyXvh5DFuM7zbwIDAQAB +AoGAXptEhghcWtEYcjutZYEfyOjsVH3lNg7B2igNIQpVNFahnNtcpUIpMu2k2lks +Phuc0n59GR4Z4K9ZUIkgN48xhuqDtHevMQLfg6KQaqf0KRwxBw4dIOhUX0aLkvcJ +WTtUPE+3hYbOuAPuXVBDB6hBZAe5mbvLPYDM3yYyRotbN7ECQQD/S3Y+shEHOMg1 +ve1eQ4tjN+5Fdmq8l2JIbOPpvH6ytiEQSV2Q55u8gL+1x5Tb9vh3rAdg2OJ0LFay +VTqmCmkDAkEAxdDgvDqk7JwMbM2jxozVEcECoN07eGrshVWlXtnEpJgU4vBN8wAj +sS94WZCWu4LZRzPHp36dVDiPFS0aqGlCJQJAMGKX/Zf4HDtJzs25YEVC9MIT+bxQ +zH+QlBN3OsSL6skUCScugZkz7g0kyIoUD4CGZQAIwfU5LjV9FP2MSQ3uCwJAZxS0 +t4F7xcx/cQcry+BBe7HvU7JVNifJvqVlumqSXQ7e+28rv3AYKVHKTinZUjcaUE88 +QBzrkSKz9N3/ITlQfQJBAL25aXdmooBdYQUvXmNu+n10wwDAqCKtoGW75cZBJvjX +WnBQsDVlzaBcs32lr08XZIAH318OibfmAs5HKHABoFk= +-----END RSA PRIVATE KEY-----` + +var testingPublicKey = `-----BEGIN PUBLIC KEY----- +MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDFRV9sD1ULVV7q1w9OXCXPTFRc +rTYAZAVZwg8X9V1QyBd8eyp5OMI4YxuL7sk+Las+PTcS6AdrHitdDZNqUjWFYOo5 +EQLnVBghIlu3ZWlAnM2SCPo5e2jFD8IgAVHtkAHbFUliQtP6a6OOLMRq9GMhIv2Z +Wf79KyXvh5DFuM7zbwIDAQAB +-----END PUBLIC KEY-----` + +func signToken(claims jwt.Claims) (string, error) { + token := jwt.NewWithClaims(jwt.SigningMethodRS256, claims) + key, err := jwt.ParseRSAPrivateKeyFromPEM([]byte(testingPrivateKey)) + if err != nil { + return "", err + } + + return token.SignedString(key) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/oidc/oidc_server_with_certificate_for_test.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/oidc/oidc_server_with_certificate_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..5f210f96631587ace32712e4533c7601ed820c36 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authentication/oidc/oidc_server_with_certificate_for_test.go @@ -0,0 +1,79 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package oidc + +import ( + "bytes" + "fmt" + "net/http" + "net/http/httptest" + "time" +) + +const testCertificateFilename = "certificate.crt" + +func newServerWithCertificate() *httptest.Server { + mux := http.NewServeMux() + mux.HandleFunc(fmt.Sprintf("/%s", testCertificateFilename), downloadHandler) + s := httptest.NewServer(mux) + return s +} + +func certificateURL(s *httptest.Server) string { + return fmt.Sprintf("%s/%s", s.URL, testCertificateFilename) +} + +func downloadHandler(w http.ResponseWriter, r *http.Request) { + fileName := testCertificateFilename + // Create in-memory buffer + buffer := bytes.NewBufferString(testingCertificate) + + // Set headers + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%s", fileName)) + w.Header().Set("Content-Type", "text/plain") + w.Header().Set("Content-Length", fmt.Sprintf("%d", buffer.Len())) + + // Serve file from memory + http.ServeContent(w, r, fileName, time.Now(), bytes.NewReader(buffer.Bytes())) +} + +// This certificate is only intended for tests so that certificate parsing step won't fail +// Never use these certificate for anything outside a test scenario! +var testingCertificate = `-----BEGIN CERTIFICATE----- +MIIE8zCCAtugAwIBAgIUXiJ3NER2OjKPD4f4QehH+NRsujIwDQYJKoZIhvcNAQEL +BQAwFDESMBAGA1UEAwwJbW9jay1vaWRjMB4XDTI1MDYxMzE3MjEyNVoXDTI2MDYx +MzE3MjEyNVowFDESMBAGA1UEAwwJbW9jay1vaWRjMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEAvJ8n2h7h0ZKNO20MCQuUOn9eyA+VKtkkkoXfZFpsZ/vv +fvdOe9hNz+X9igJVugfwi9MgKhfPUnGOoWXJB8uwJl4yeHV8lzd5dXYkp3Pi6oCg +PkGsOwOL7KPQWkCcJDNY8Nj2IsaP+VsdQY2ydCaryztyUBZ8nKs6HWOFsMEPVQUt +01SDcWT8lBet6GZ+4QxY+JTnBCZCDoQUk9VAOWucstGfJ9lrC8PBNRNjIXtq88dR +d4e78RRdTrErwZstZgsjXvfoudNuHeAq5RdCBBuc95sCzHvcbsP7qn4JMDCtk1Xn +y7Jlh2gTAdLEjqj7yZHgmcRuDTv4AGY3jHXgB2dSYu9PXGBJR0x75O9ILfF/Rvgh +xqsaqTDmC7lFp1Tc7W0LC0joG0qsmD5Co1xhHpeivsydhI0FKcjZGp0tcKLdE8Nr +O+za2Tl8IDHkMUBNn1Spf0+MXCX5gVvWkqsvegin6+zz++0ncEqSZLJ1vhUywiJO +NnNRw8N3iYNfpuFKY3SWGOECVfw57Uzl3YD7p5HNBUSRgjIV5oPtJpluWPb9/v1X +1uYoBmerRQ48GOW/S+OeMHoTNHpGT+yFUYlooStsKUzlVg6qgDgGzciXllvSXlNP +aIk1el5LRWCQ+7NdxyIDZ7z018d5/Nvq4TGtYMxPJ1q2UslsuT8ma8kRdZDLAmcC +AwEAAaM9MDswGgYDVR0RBBMwEYIJbW9jay1vaWRjhwR/AAABMB0GA1UdDgQWBBTV +UEwK/9twnd+iP0B7eZFjR9eB3DANBgkqhkiG9w0BAQsFAAOCAgEAeAImntOcqGnN +30Y3I36v1tyYC3U109AatLarHvMglhkwmCPjStel0GuyOyxxpnzpbe17vlxSF4Bg +u/fXoM1PrPn0As/5/MQdbxZs5mDMc1D5ykpo0T+CYlgJNzxBDjCz+lODGf4vH9Ud +4ZKLWss0QCYCuF7psyOvwM223mWa63SvWgIBI7vBXcHwk5y4nVAb6jGZVTx0D9Xw +OTDsxeevpfndTuXQdSHQ09hZlmify7alAPC88Ef3oaudli8QjkNasQswAVKp+0V6 +jKYEHe0XUWcdKX3kT0I1jynh7EKcR/eiHePJv9tR2Trn8bAdgnqvmUwgKPx8mOTp +tXpkz/72v3Rbbp7/mmg3U9XVIe9i7jroa05sRrrnoqW0j1EGft8MEqYw8gspnbEw +OI0tw+zwMZ0ikkZzxCn4kn+TRuzGEvUQMz32T++v7YiQwZMgRLOSTuCov0G3Co8F +LIZf11PYTnOSAmwBLDiONbBOP5OJLnvscZc9bYeEBjSUvFnDeuEb7JduvYuDbche +zGO07I7oRupj0lzXVtFJJ8D/lTjl9usQ6pFXoFEI8WaOKcUA+kFEHUfKQ9QPrJsP +MtlI4pJn5rg9W195YH0eMj06QXuy4qi/pPZQHLfx4V7UpVydWemWXeYxVpwy0j2j +9CEbs0CkTGfHn/7DvYbVqIy/QMVxsMU= +-----END CERTIFICATE-----` diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/adminlist/authorizer.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/adminlist/authorizer.go new file mode 100644 index 0000000000000000000000000000000000000000..5039b2b8dde04b692b14711550ca01218f57cb7f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/adminlist/authorizer.go @@ -0,0 +1,133 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package adminlist + +import ( + "context" + "fmt" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/usecases/auth/authorization/errors" +) + +const AnonymousPrincipalUsername = "anonymous" + +// Authorizer provides either full (admin) or no access +type Authorizer struct { + adminUsers map[string]int + readOnlyUsers map[string]int + adminGroups map[string]int + readOnlyGroups map[string]int +} + +// New Authorizer using the AdminList method +func New(cfg Config) *Authorizer { + a := &Authorizer{} + a.addAdminUserList(cfg.Users) + a.addReadOnlyUserList(cfg.ReadOnlyUsers) + a.addAdminGroupList(cfg.Groups) + a.addReadOnlyGroupList(cfg.ReadOnlyGroups) + return a +} + +// Authorize will give full access (to any resource!) if the user is part of +// the admin list or no access at all if they are not +func (a *Authorizer) Authorize(ctx context.Context, principal *models.Principal, verb string, resources ...string) error { + if principal == nil { + principal = newAnonymousPrincipal() + } + + if _, ok := a.adminUsers[principal.Username]; ok { + return nil + } + + for _, group := range principal.Groups { + if _, ok := a.adminGroups[group]; ok { + return nil + } + } + + if verb == "R" { + if _, ok := a.readOnlyUsers[principal.Username]; ok { + return nil + } + for _, group := range principal.Groups { + if _, ok := a.readOnlyGroups[group]; ok { + return nil + } + } + } + + return fmt.Errorf("adminlist: %w", errors.NewForbidden(principal, verb, resources...)) +} + +func (a *Authorizer) AuthorizeSilent(ctx context.Context, principal *models.Principal, verb string, resources ...string) error { + return a.Authorize(ctx, principal, verb, resources...) +} + +func (a *Authorizer) FilterAuthorizedResources(ctx context.Context, principal *models.Principal, verb string, resources ...string) ([]string, error) { + if err := a.Authorize(ctx, principal, verb, resources...); err != nil { + return nil, err + } + + return resources, nil +} + +func (a *Authorizer) addAdminUserList(users []string) { + // build a map for more efficient lookup on long lists + if a.adminUsers == nil { + a.adminUsers = map[string]int{} + } + + for _, user := range users { + a.adminUsers[user] = 1 + } +} + +func (a *Authorizer) addReadOnlyUserList(users []string) { + // build a map for more efficient lookup on long lists + if a.readOnlyUsers == nil { + a.readOnlyUsers = map[string]int{} + } + + for _, user := range users { + a.readOnlyUsers[user] = 1 + } +} + +func (a *Authorizer) addAdminGroupList(groups []string) { + // build a map for more efficient lookup on long lists + if a.adminGroups == nil { + a.adminGroups = map[string]int{} + } + + for _, group := range groups { + a.adminGroups[group] = 1 + } +} + +func (a *Authorizer) addReadOnlyGroupList(groups []string) { + // build a map for more efficient lookup on long lists + if a.readOnlyGroups == nil { + a.readOnlyGroups = map[string]int{} + } + + for _, group := range groups { + a.readOnlyGroups[group] = 1 + } +} + +func newAnonymousPrincipal() *models.Principal { + return &models.Principal{ + Username: AnonymousPrincipalUsername, + } +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/adminlist/authorizer_test.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/adminlist/authorizer_test.go new file mode 100644 index 0000000000000000000000000000000000000000..9e4b7dc95c74def0edc3a4090facec82c8fe3a7c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/adminlist/authorizer_test.go @@ -0,0 +1,449 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package adminlist + +import ( + "context" + "errors" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/models" + authZErrors "github.com/weaviate/weaviate/usecases/auth/authorization/errors" +) + +func Test_AdminList_Authorizer(t *testing.T) { + t.Run("with read requests", func(t *testing.T) { + t.Run("with no users configured at all", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Users: []string{}, + } + + principal := &models.Principal{ + Username: "johndoe", + } + + err := New(cfg).Authorize(context.Background(), principal, "R", "things") + + assert.True(t, errors.As(err, &authZErrors.Forbidden{})) + assert.Contains(t, err.Error(), "forbidden") + assert.Contains(t, err.Error(), "johndoe") + }) + + t.Run("with a nil principal", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Users: []string{}, + } + + principal := (*models.Principal)(nil) + err := New(cfg).Authorize(context.Background(), principal, "R", "things") + assert.True(t, errors.As(err, &authZErrors.Forbidden{})) + assert.Contains(t, err.Error(), "forbidden") + assert.Contains(t, err.Error(), "anonymous") + }) + + t.Run("with a non-configured user, it denies the request", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Users: []string{ + "alice", + }, + } + + principal := &models.Principal{ + Username: "johndoe", + } + + err := New(cfg).Authorize(context.Background(), principal, "R", "things") + assert.True(t, errors.As(err, &authZErrors.Forbidden{})) + assert.Contains(t, err.Error(), "forbidden") + assert.Contains(t, err.Error(), "johndoe") + }) + + t.Run("with a configured admin user, it allows the request", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Users: []string{ + "alice", + "johndoe", + }, + } + + principal := &models.Principal{ + Username: "johndoe", + } + + err := New(cfg).Authorize(context.Background(), principal, "R", "things") + assert.Nil(t, err) + }) + + t.Run("with a configured read-only user, it allows the request", func(t *testing.T) { + cfg := Config{ + Enabled: true, + ReadOnlyUsers: []string{ + "alice", + "johndoe", + }, + } + + principal := &models.Principal{ + Username: "johndoe", + } + + err := New(cfg).Authorize(context.Background(), principal, "R", "things") + assert.Nil(t, err) + }) + + t.Run("with anonymous as read-only user and no principal, it allows the request", func(t *testing.T) { + cfg := Config{ + Enabled: true, + ReadOnlyUsers: []string{ + "anonymous", + }, + } + + principal := (*models.Principal)(nil) + err := New(cfg).Authorize(context.Background(), principal, "R", "things") + assert.Nil(t, err) + }) + }) + + t.Run("with a non-configured group, it denies the request", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Groups: []string{ + "band", + }, + } + + principal := &models.Principal{ + Username: "alice", + Groups: []string{ + "posse", + }, + } + err := New(cfg).Authorize(context.Background(), principal, "R", "things") + assert.True(t, errors.As(err, &authZErrors.Forbidden{})) + assert.Contains(t, err.Error(), "forbidden") + assert.Contains(t, err.Error(), "posse") + assert.Contains(t, err.Error(), "alice") + }) + + t.Run("with a configured admin group, it allows the request", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Groups: []string{ + "band", + "posse", + }, + } + + principal := &models.Principal{ + Username: "alice", + Groups: []string{ + "posse", + }, + } + err := New(cfg).Authorize(context.Background(), principal, "R", "things") + assert.Nil(t, err) + }) + + t.Run("with a configured read-only group, it allows the request", func(t *testing.T) { + cfg := Config{ + Enabled: true, + ReadOnlyGroups: []string{ + "band", + "posse", + }, + } + + principal := &models.Principal{ + Username: "johndoe", + Groups: []string{ + "posse", + }, + } + + err := New(cfg).Authorize(context.Background(), principal, "R", "things") + assert.Nil(t, err) + }) + + t.Run("with a configured admin user and non-configured group, it allows the request", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Users: []string{ + "alice", + "johndoe", + }, + Groups: []string{ + "band", + }, + } + + principal := &models.Principal{ + Username: "johndoe", + Groups: []string{ + "posse", + }, + } + + err := New(cfg).Authorize(context.Background(), principal, "R", "things") + assert.Nil(t, err) + }) + + t.Run("with a configured read-only user and non-configured read-only group, it allows the request", func(t *testing.T) { + cfg := Config{ + Enabled: true, + ReadOnlyUsers: []string{ + "alice", + "johndoe", + }, + ReadOnlyGroups: []string{ + "band", + }, + } + + principal := &models.Principal{ + Username: "johndoe", + Groups: []string{ + "posse", + }, + } + + err := New(cfg).Authorize(context.Background(), principal, "R", "things") + assert.Nil(t, err) + }) + + t.Run("with write/delete requests", func(t *testing.T) { + t.Run("with a nil principal", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Users: []string{}, + } + + principal := (*models.Principal)(nil) + err := New(cfg).Authorize(context.Background(), principal, "create", "things") + assert.True(t, errors.As(err, &authZErrors.Forbidden{})) + assert.Contains(t, err.Error(), "forbidden") + assert.Contains(t, err.Error(), "anonymous") + }) + + t.Run("with no users configured at all", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Users: []string{}, + } + + principal := &models.Principal{ + Username: "johndoe", + } + + err := New(cfg).Authorize(context.Background(), principal, "create", "things") + assert.True(t, errors.As(err, &authZErrors.Forbidden{})) + assert.Contains(t, err.Error(), "forbidden") + assert.Contains(t, err.Error(), "johndoe") + }) + + t.Run("with a non-configured user, it denies the request", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Users: []string{ + "alice", + }, + } + + principal := &models.Principal{ + Username: "johndoe", + } + + err := New(cfg).Authorize(context.Background(), principal, "create", "things") + assert.True(t, errors.As(err, &authZErrors.Forbidden{})) + assert.Contains(t, err.Error(), "forbidden") + assert.Contains(t, err.Error(), "johndoe") + }) + + t.Run("with an empty user, it denies the request", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Users: []string{ + "alice", + }, + } + + principal := &models.Principal{ + Username: "", + } + + err := New(cfg).Authorize(context.Background(), principal, "create", "things") + assert.True(t, errors.As(err, &authZErrors.Forbidden{})) + assert.Contains(t, err.Error(), "forbidden") + }) + + t.Run("with a configured admin user, it allows the request", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Users: []string{ + "alice", + "johndoe", + }, + } + + principal := &models.Principal{ + Username: "johndoe", + } + + err := New(cfg).Authorize(context.Background(), principal, "create", "things") + assert.Nil(t, err) + }) + + t.Run("with a configured read-only user, it denies the request", func(t *testing.T) { + cfg := Config{ + Enabled: true, + ReadOnlyUsers: []string{ + "alice", + "johndoe", + }, + } + + principal := &models.Principal{ + Username: "johndoe", + } + + err := New(cfg).Authorize(context.Background(), principal, "create", "things") + assert.True(t, errors.As(err, &authZErrors.Forbidden{})) + assert.Contains(t, err.Error(), "forbidden") + assert.Contains(t, err.Error(), "johndoe") + }) + + t.Run("with anonymous on the read-only list and a nil principal", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Users: []string{}, + ReadOnlyUsers: []string{ + "anonymous", + }, + } + + principal := (*models.Principal)(nil) + err := New(cfg).Authorize(context.Background(), principal, "create", "things") + assert.True(t, errors.As(err, &authZErrors.Forbidden{})) + assert.Contains(t, err.Error(), "forbidden") + assert.Contains(t, err.Error(), "anonymous") + }) + + t.Run("with a non-configured group, it denies the request", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Groups: []string{ + "band", + }, + } + + principal := &models.Principal{ + Username: "johndoe", + Groups: []string{ + "posse", + }, + } + err := New(cfg).Authorize(context.Background(), principal, "create", "things") + assert.True(t, errors.As(err, &authZErrors.Forbidden{})) + assert.Contains(t, err.Error(), "forbidden") + assert.Contains(t, err.Error(), "posse") + assert.Contains(t, err.Error(), "johndoe") + }) + + t.Run("with an empty group, it denies the request", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Groups: []string{ + "band", + }, + } + + principal := &models.Principal{ + Username: "johndoe", + Groups: []string{}, + } + err := New(cfg).Authorize(context.Background(), principal, "create", "things") + assert.True(t, errors.As(err, &authZErrors.Forbidden{})) + assert.Contains(t, err.Error(), "forbidden") + assert.Contains(t, err.Error(), "johndoe") + }) + + t.Run("with a configured admin group, it allows the request", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Groups: []string{ + "band", + "posse", + }, + } + + principal := &models.Principal{ + Username: "johndoe", + Groups: []string{ + "band", + }, + } + err := New(cfg).Authorize(context.Background(), principal, "create", "things") + assert.Nil(t, err) + }) + + t.Run("with a configured read-only group, it denies the request", func(t *testing.T) { + cfg := Config{ + Enabled: true, + ReadOnlyGroups: []string{ + "band", + "posse", + }, + } + + principal := &models.Principal{ + Username: "johndoe", + Groups: []string{ + "posse", + }, + } + + err := New(cfg).Authorize(context.Background(), principal, "create", "things") + assert.True(t, errors.As(err, &authZErrors.Forbidden{})) + assert.Contains(t, err.Error(), "forbidden") + assert.Contains(t, err.Error(), "johndoe") + assert.Contains(t, err.Error(), "posse") + }) + + t.Run("with a configured admin user and non-configured group, it allows the request", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Users: []string{ + "alice", + "johndoe", + }, + Groups: []string{ + "band", + }, + } + + principal := &models.Principal{ + Username: "johndoe", + Groups: []string{ + "posse", + }, + } + + err := New(cfg).Authorize(context.Background(), principal, "create", "things") + assert.Nil(t, err) + }) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/adminlist/config.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/adminlist/config.go new file mode 100644 index 0000000000000000000000000000000000000000..605be2c4e226f6fabe3f5716e3963ae7268789c1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/adminlist/config.go @@ -0,0 +1,53 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package adminlist + +import "fmt" + +// Config makes every subject on the list an admin, whereas everyone else +// has no rights whatsoever +type Config struct { + Enabled bool `json:"enabled" yaml:"enabled"` + Users []string `json:"users" yaml:"users"` + ReadOnlyUsers []string `json:"read_only_users" yaml:"read_only_users"` + Groups []string `json:"groups" yaml:"groups"` + ReadOnlyGroups []string `json:"read_only_groups" yaml:"read_only_groups"` +} + +// Validate admin list config for viability, can be called from the central +// config package +func (c Config) Validate() error { + return c.validateOverlap() +} + +// we are expecting both lists to always contain few subjects and know that +// this comparison is only done once (at startup). We are therefore fine with +// the O(n^2) complexity of this very primitive overlap search in favor of very +// simple code. +func (c Config) validateOverlap() error { + for _, a := range c.Users { + for _, b := range c.ReadOnlyUsers { + if a == b { + return fmt.Errorf("admin list: subject '%s' is present on both admin and read-only list", a) + } + } + } + for _, a := range c.Groups { + for _, b := range c.ReadOnlyGroups { + if a == b { + return fmt.Errorf("admin list: subject '%s' is present on both admin and read-only list", a) + } + } + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/adminlist/config_test.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/adminlist/config_test.go new file mode 100644 index 0000000000000000000000000000000000000000..7ccbe3f798d9e7bd53c2d4afd130cae4281049cc --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/adminlist/config_test.go @@ -0,0 +1,224 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package adminlist + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Validation(t *testing.T) { + t.Run("with only an admin user list set", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Users: []string{ + "alice", + "johndoe", + }, + } + + err := cfg.Validate() + assert.Nil(t, err) + }) + + t.Run("with only a read only user list set", func(t *testing.T) { + cfg := Config{ + Enabled: true, + ReadOnlyUsers: []string{ + "alice", + "johndoe", + }, + } + + err := cfg.Validate() + assert.Nil(t, err) + }) + + t.Run("with both user lists present, but no overlap", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Users: []string{ + "alice", + }, + ReadOnlyUsers: []string{ + "johndoe", + }, + } + + err := cfg.Validate() + assert.Nil(t, err) + }) + + t.Run("with one subject part of both user lists", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Users: []string{ + "alice", + "johndoe", + }, + ReadOnlyUsers: []string{ + "johndoe", + }, + } + + err := cfg.Validate() + assert.Equal(t, err, fmt.Errorf("admin list: subject 'johndoe' is present on both admin and read-only list")) + }) + + t.Run("with only an admin group list set", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Groups: []string{ + "band", + "posse", + }, + } + + err := cfg.Validate() + assert.Nil(t, err) + }) + + t.Run("with only a read only group list set", func(t *testing.T) { + cfg := Config{ + Enabled: true, + ReadOnlyGroups: []string{ + "band", + "posse", + }, + } + + err := cfg.Validate() + assert.Nil(t, err) + }) + + t.Run("with both group lists present, but no overlap", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Groups: []string{ + "band", + }, + ReadOnlyGroups: []string{ + "posse", + }, + } + + err := cfg.Validate() + assert.Nil(t, err) + }) + + t.Run("with one subject part of both group lists", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Groups: []string{ + "band", + "posse", + }, + ReadOnlyGroups: []string{ + "posse", + }, + } + + err := cfg.Validate() + assert.Equal(t, err, fmt.Errorf("admin list: subject 'posse' is present on both admin and read-only list")) + }) + + t.Run("with both admin user and groups present", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Users: []string{ + "alice", + "johndoe", + }, + Groups: []string{ + "band", + "posse", + }, + } + + err := cfg.Validate() + assert.Nil(t, err) + }) + + t.Run("with an admin user and read only group set", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Users: []string{ + "alice", + "johndoe", + }, + ReadOnlyGroups: []string{ + "band", + "posse", + }, + } + + err := cfg.Validate() + assert.Nil(t, err) + }) + + t.Run("with both read only user and groups present", func(t *testing.T) { + cfg := Config{ + Enabled: true, + ReadOnlyUsers: []string{ + "alice", + "johndoe", + }, + ReadOnlyGroups: []string{ + "band", + "posse", + }, + } + + err := cfg.Validate() + assert.Nil(t, err) + }) + + t.Run("with a read only user and admin group set", func(t *testing.T) { + cfg := Config{ + Enabled: true, + ReadOnlyUsers: []string{ + "alice", + "johndoe", + }, + Groups: []string{ + "band", + "posse", + }, + } + + err := cfg.Validate() + assert.Nil(t, err) + }) + + t.Run("all user and group attributes present", func(t *testing.T) { + cfg := Config{ + Enabled: true, + Users: []string{ + "alice", + }, + ReadOnlyUsers: []string{ + "johndoe", + }, + Groups: []string{ + "band", + }, + ReadOnlyGroups: []string{ + "posse", + }, + } + + err := cfg.Validate() + assert.Nil(t, err) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/authorizer.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/authorizer.go new file mode 100644 index 0000000000000000000000000000000000000000..70030adb99512a092f7f3f5decbd10ee3f1e708a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/authorizer.go @@ -0,0 +1,49 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authorization + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" +) + +// Authorizer always makes a yes/no decision on a specific resource. Which +// authorization technique is used in the background (e.g. RBAC, adminlist, +// ...) is hidden through this interface +type Authorizer interface { + Authorize(ctx context.Context, principal *models.Principal, verb string, resources ...string) error + // AuthorizeSilent Silent authorization without audit logs + AuthorizeSilent(ctx context.Context, principal *models.Principal, verb string, resources ...string) error + // FilterAuthorizedResources authorize the passed resources with best effort approach, it will return + // list of allowed resources, if none, it will return an empty slice + FilterAuthorizedResources(ctx context.Context, principal *models.Principal, verb string, resources ...string) ([]string, error) +} + +// DummyAuthorizer is a pluggable Authorizer which can be used if no specific +// authorizer is configured. It will allow every auth decision, i.e. it is +// effectively the same as "no authorization at all" +type DummyAuthorizer struct{} + +// Authorize on the DummyAuthorizer will allow any subject access to any +// resource +func (d *DummyAuthorizer) Authorize(ctx context.Context, principal *models.Principal, verb string, resources ...string) error { + return nil +} + +func (d *DummyAuthorizer) AuthorizeSilent(ctx context.Context, principal *models.Principal, verb string, resources ...string) error { + return nil +} + +func (d *DummyAuthorizer) FilterAuthorizedResources(ctx context.Context, principal *models.Principal, verb string, resources ...string) ([]string, error) { + return resources, nil +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/controller.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/controller.go new file mode 100644 index 0000000000000000000000000000000000000000..573ecbf750ee4ce8ed4e556578e4ce74a62caa6d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/controller.go @@ -0,0 +1,30 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authorization + +import ( + "github.com/weaviate/weaviate/usecases/auth/authentication" +) + +type Controller interface { + UpdateRolesPermissions(roles map[string][]Policy) error + CreateRolesPermissions(roles map[string][]Policy) error + GetRoles(names ...string) (map[string][]Policy, error) + DeleteRoles(roles ...string) error + AddRolesForUser(user string, roles []string) error + GetRolesForUserOrGroup(user string, authMethod authentication.AuthType, isGroup bool) (map[string][]Policy, error) + GetUsersOrGroupForRole(role string, authMethod authentication.AuthType, IsGroup bool) ([]string, error) + RevokeRolesForUser(user string, roles ...string) error + RemovePermissions(role string, permissions []*Policy) error + HasPermission(role string, permission *Policy) (bool, error) + GetUsersOrGroupsWithRoles(isGroup bool, authMethod authentication.AuthType) ([]string, error) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/conv/casbin_converter.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/conv/casbin_converter.go new file mode 100644 index 0000000000000000000000000000000000000000..71185d0c7f0f6c40e799ab7f18f26fd1132fef47 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/conv/casbin_converter.go @@ -0,0 +1,107 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package conv + +import ( + "fmt" + "slices" + "strings" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func RolesToPolicies(roles ...*models.Role) (map[string][]authorization.Policy, error) { + rolesmap := make(map[string][]authorization.Policy) + for idx := range roles { + rolesmap[*roles[idx].Name] = []authorization.Policy{} + for _, permission := range roles[idx].Permissions { + policy, err := policy(permission) + if err != nil { + return rolesmap, fmt.Errorf("policy: %w", err) + } + rolesmap[*roles[idx].Name] = append(rolesmap[*roles[idx].Name], *policy) + } + } + + return rolesmap, nil +} + +func PermissionToPolicies(permissions ...*models.Permission) ([]*authorization.Policy, error) { + policies := []*authorization.Policy{} + for idx := range permissions { + policy, err := policy(permissions[idx]) + if err != nil { + return nil, fmt.Errorf("policy: %w", err) + } + policies = append(policies, policy) + } + + return policies, nil +} + +func PathToPermission(verb, path string) (*models.Permission, error) { + parts := strings.Split(path, "/") + if len(parts) < 1 { + return nil, fmt.Errorf("invalid path") + } + + return permission([]string{"", path, verb, parts[0]}, false) +} + +func PoliciesToPermission(policies ...authorization.Policy) ([]*models.Permission, error) { + permissions := []*models.Permission{} + for idx := range policies { + // 1st empty string to replace casbin pattern of having policy name as 1st place + // e.g. tester, roles/.*, (C)|(R)|(U)|(D), roles + // see newPolicy() + perm, err := permission([]string{"", policies[idx].Resource, policies[idx].Verb, policies[idx].Domain}, true) + if err != nil { + return nil, err + } + if perm.Action == nil { + continue + } + permissions = append(permissions, perm) + } + return permissions, nil +} + +func CasbinPolicies(casbinPolicies ...[][]string) (map[string][]authorization.Policy, error) { + rolesPermissions := make(map[string][]authorization.Policy) + for _, p := range casbinPolicies { + for _, policyParts := range p { + name := TrimRoleNamePrefix(policyParts[0]) + if slices.Contains(authorization.BuiltInRoles, name) { + perms := authorization.BuiltInPermissions[name] + for _, p := range perms { + perm, err := policy(p) + if err != nil { + return nil, fmt.Errorf("policy: %w", err) + } + rolesPermissions[name] = append(rolesPermissions[name], *perm) + } + } else { + perm, err := permission(policyParts, true) + if err != nil { + return nil, fmt.Errorf("permission: %w", err) + } + weaviatePerm, err := policy(perm) + if err != nil { + return nil, fmt.Errorf("policy: %w", err) + } + rolesPermissions[name] = append(rolesPermissions[name], *weaviatePerm) + } + } + } + return rolesPermissions, nil +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/mock_authorizer.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/mock_authorizer.go new file mode 100644 index 0000000000000000000000000000000000000000..4e61c53450c90a6574380e1517e9d9c0fe461c7a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/mock_authorizer.go @@ -0,0 +1,249 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +// Code generated by mockery v2.53.2. DO NOT EDIT. + +package authorization + +import ( + context "context" + + mock "github.com/stretchr/testify/mock" + models "github.com/weaviate/weaviate/entities/models" +) + +// MockAuthorizer is an autogenerated mock type for the Authorizer type +type MockAuthorizer struct { + mock.Mock +} + +type MockAuthorizer_Expecter struct { + mock *mock.Mock +} + +func (_m *MockAuthorizer) EXPECT() *MockAuthorizer_Expecter { + return &MockAuthorizer_Expecter{mock: &_m.Mock} +} + +// Authorize provides a mock function with given fields: ctx, principal, verb, resources +func (_m *MockAuthorizer) Authorize(ctx context.Context, principal *models.Principal, verb string, resources ...string) error { + _va := make([]interface{}, len(resources)) + for _i := range resources { + _va[_i] = resources[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, principal, verb) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for Authorize") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *models.Principal, string, ...string) error); ok { + r0 = rf(ctx, principal, verb, resources...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// MockAuthorizer_Authorize_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Authorize' +type MockAuthorizer_Authorize_Call struct { + *mock.Call +} + +// Authorize is a helper method to define mock.On call +// - ctx context.Context +// - principal *models.Principal +// - verb string +// - resources ...string +func (_e *MockAuthorizer_Expecter) Authorize(ctx interface{}, principal interface{}, verb interface{}, resources ...interface{}) *MockAuthorizer_Authorize_Call { + return &MockAuthorizer_Authorize_Call{Call: _e.mock.On("Authorize", + append([]interface{}{ctx, principal, verb}, resources...)...)} +} + +func (_c *MockAuthorizer_Authorize_Call) Run(run func(ctx context.Context, principal *models.Principal, verb string, resources ...string)) *MockAuthorizer_Authorize_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]string, len(args)-3) + for i, a := range args[3:] { + if a != nil { + variadicArgs[i] = a.(string) + } + } + run(args[0].(context.Context), args[1].(*models.Principal), args[2].(string), variadicArgs...) + }) + return _c +} + +func (_c *MockAuthorizer_Authorize_Call) Return(_a0 error) *MockAuthorizer_Authorize_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockAuthorizer_Authorize_Call) RunAndReturn(run func(context.Context, *models.Principal, string, ...string) error) *MockAuthorizer_Authorize_Call { + _c.Call.Return(run) + return _c +} + +// AuthorizeSilent provides a mock function with given fields: ctx, principal, verb, resources +func (_m *MockAuthorizer) AuthorizeSilent(ctx context.Context, principal *models.Principal, verb string, resources ...string) error { + _va := make([]interface{}, len(resources)) + for _i := range resources { + _va[_i] = resources[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, principal, verb) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for AuthorizeSilent") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *models.Principal, string, ...string) error); ok { + r0 = rf(ctx, principal, verb, resources...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// MockAuthorizer_AuthorizeSilent_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AuthorizeSilent' +type MockAuthorizer_AuthorizeSilent_Call struct { + *mock.Call +} + +// AuthorizeSilent is a helper method to define mock.On call +// - ctx context.Context +// - principal *models.Principal +// - verb string +// - resources ...string +func (_e *MockAuthorizer_Expecter) AuthorizeSilent(ctx interface{}, principal interface{}, verb interface{}, resources ...interface{}) *MockAuthorizer_AuthorizeSilent_Call { + return &MockAuthorizer_AuthorizeSilent_Call{Call: _e.mock.On("AuthorizeSilent", + append([]interface{}{ctx, principal, verb}, resources...)...)} +} + +func (_c *MockAuthorizer_AuthorizeSilent_Call) Run(run func(ctx context.Context, principal *models.Principal, verb string, resources ...string)) *MockAuthorizer_AuthorizeSilent_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]string, len(args)-3) + for i, a := range args[3:] { + if a != nil { + variadicArgs[i] = a.(string) + } + } + run(args[0].(context.Context), args[1].(*models.Principal), args[2].(string), variadicArgs...) + }) + return _c +} + +func (_c *MockAuthorizer_AuthorizeSilent_Call) Return(_a0 error) *MockAuthorizer_AuthorizeSilent_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockAuthorizer_AuthorizeSilent_Call) RunAndReturn(run func(context.Context, *models.Principal, string, ...string) error) *MockAuthorizer_AuthorizeSilent_Call { + _c.Call.Return(run) + return _c +} + +// FilterAuthorizedResources provides a mock function with given fields: ctx, principal, verb, resources +func (_m *MockAuthorizer) FilterAuthorizedResources(ctx context.Context, principal *models.Principal, verb string, resources ...string) ([]string, error) { + _va := make([]interface{}, len(resources)) + for _i := range resources { + _va[_i] = resources[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, principal, verb) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for FilterAuthorizedResources") + } + + var r0 []string + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *models.Principal, string, ...string) ([]string, error)); ok { + return rf(ctx, principal, verb, resources...) + } + if rf, ok := ret.Get(0).(func(context.Context, *models.Principal, string, ...string) []string); ok { + r0 = rf(ctx, principal, verb, resources...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]string) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *models.Principal, string, ...string) error); ok { + r1 = rf(ctx, principal, verb, resources...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockAuthorizer_FilterAuthorizedResources_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'FilterAuthorizedResources' +type MockAuthorizer_FilterAuthorizedResources_Call struct { + *mock.Call +} + +// FilterAuthorizedResources is a helper method to define mock.On call +// - ctx context.Context +// - principal *models.Principal +// - verb string +// - resources ...string +func (_e *MockAuthorizer_Expecter) FilterAuthorizedResources(ctx interface{}, principal interface{}, verb interface{}, resources ...interface{}) *MockAuthorizer_FilterAuthorizedResources_Call { + return &MockAuthorizer_FilterAuthorizedResources_Call{Call: _e.mock.On("FilterAuthorizedResources", + append([]interface{}{ctx, principal, verb}, resources...)...)} +} + +func (_c *MockAuthorizer_FilterAuthorizedResources_Call) Run(run func(ctx context.Context, principal *models.Principal, verb string, resources ...string)) *MockAuthorizer_FilterAuthorizedResources_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]string, len(args)-3) + for i, a := range args[3:] { + if a != nil { + variadicArgs[i] = a.(string) + } + } + run(args[0].(context.Context), args[1].(*models.Principal), args[2].(string), variadicArgs...) + }) + return _c +} + +func (_c *MockAuthorizer_FilterAuthorizedResources_Call) Return(_a0 []string, _a1 error) *MockAuthorizer_FilterAuthorizedResources_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockAuthorizer_FilterAuthorizedResources_Call) RunAndReturn(run func(context.Context, *models.Principal, string, ...string) ([]string, error)) *MockAuthorizer_FilterAuthorizedResources_Call { + _c.Call.Return(run) + return _c +} + +// NewMockAuthorizer creates a new instance of MockAuthorizer. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewMockAuthorizer(t interface { + mock.TestingT + Cleanup(func()) +}) *MockAuthorizer { + mock := &MockAuthorizer{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/mock_controller.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/mock_controller.go new file mode 100644 index 0000000000000000000000000000000000000000..5f1dcacfd9522b90d35830894f3a36309777a463 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/mock_controller.go @@ -0,0 +1,659 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +// Code generated by mockery v2.53.2. DO NOT EDIT. + +package authorization + +import ( + mock "github.com/stretchr/testify/mock" + authentication "github.com/weaviate/weaviate/usecases/auth/authentication" +) + +// MockController is an autogenerated mock type for the Controller type +type MockController struct { + mock.Mock +} + +type MockController_Expecter struct { + mock *mock.Mock +} + +func (_m *MockController) EXPECT() *MockController_Expecter { + return &MockController_Expecter{mock: &_m.Mock} +} + +// AddRolesForUser provides a mock function with given fields: user, roles +func (_m *MockController) AddRolesForUser(user string, roles []string) error { + ret := _m.Called(user, roles) + + if len(ret) == 0 { + panic("no return value specified for AddRolesForUser") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string, []string) error); ok { + r0 = rf(user, roles) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// MockController_AddRolesForUser_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AddRolesForUser' +type MockController_AddRolesForUser_Call struct { + *mock.Call +} + +// AddRolesForUser is a helper method to define mock.On call +// - user string +// - roles []string +func (_e *MockController_Expecter) AddRolesForUser(user interface{}, roles interface{}) *MockController_AddRolesForUser_Call { + return &MockController_AddRolesForUser_Call{Call: _e.mock.On("AddRolesForUser", user, roles)} +} + +func (_c *MockController_AddRolesForUser_Call) Run(run func(user string, roles []string)) *MockController_AddRolesForUser_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].([]string)) + }) + return _c +} + +func (_c *MockController_AddRolesForUser_Call) Return(_a0 error) *MockController_AddRolesForUser_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockController_AddRolesForUser_Call) RunAndReturn(run func(string, []string) error) *MockController_AddRolesForUser_Call { + _c.Call.Return(run) + return _c +} + +// CreateRolesPermissions provides a mock function with given fields: roles +func (_m *MockController) CreateRolesPermissions(roles map[string][]Policy) error { + ret := _m.Called(roles) + + if len(ret) == 0 { + panic("no return value specified for CreateRolesPermissions") + } + + var r0 error + if rf, ok := ret.Get(0).(func(map[string][]Policy) error); ok { + r0 = rf(roles) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// MockController_CreateRolesPermissions_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateRolesPermissions' +type MockController_CreateRolesPermissions_Call struct { + *mock.Call +} + +// CreateRolesPermissions is a helper method to define mock.On call +// - roles map[string][]Policy +func (_e *MockController_Expecter) CreateRolesPermissions(roles interface{}) *MockController_CreateRolesPermissions_Call { + return &MockController_CreateRolesPermissions_Call{Call: _e.mock.On("CreateRolesPermissions", roles)} +} + +func (_c *MockController_CreateRolesPermissions_Call) Run(run func(roles map[string][]Policy)) *MockController_CreateRolesPermissions_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(map[string][]Policy)) + }) + return _c +} + +func (_c *MockController_CreateRolesPermissions_Call) Return(_a0 error) *MockController_CreateRolesPermissions_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockController_CreateRolesPermissions_Call) RunAndReturn(run func(map[string][]Policy) error) *MockController_CreateRolesPermissions_Call { + _c.Call.Return(run) + return _c +} + +// DeleteRoles provides a mock function with given fields: roles +func (_m *MockController) DeleteRoles(roles ...string) error { + _va := make([]interface{}, len(roles)) + for _i := range roles { + _va[_i] = roles[_i] + } + var _ca []interface{} + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for DeleteRoles") + } + + var r0 error + if rf, ok := ret.Get(0).(func(...string) error); ok { + r0 = rf(roles...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// MockController_DeleteRoles_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteRoles' +type MockController_DeleteRoles_Call struct { + *mock.Call +} + +// DeleteRoles is a helper method to define mock.On call +// - roles ...string +func (_e *MockController_Expecter) DeleteRoles(roles ...interface{}) *MockController_DeleteRoles_Call { + return &MockController_DeleteRoles_Call{Call: _e.mock.On("DeleteRoles", + append([]interface{}{}, roles...)...)} +} + +func (_c *MockController_DeleteRoles_Call) Run(run func(roles ...string)) *MockController_DeleteRoles_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]string, len(args)-0) + for i, a := range args[0:] { + if a != nil { + variadicArgs[i] = a.(string) + } + } + run(variadicArgs...) + }) + return _c +} + +func (_c *MockController_DeleteRoles_Call) Return(_a0 error) *MockController_DeleteRoles_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockController_DeleteRoles_Call) RunAndReturn(run func(...string) error) *MockController_DeleteRoles_Call { + _c.Call.Return(run) + return _c +} + +// GetRoles provides a mock function with given fields: names +func (_m *MockController) GetRoles(names ...string) (map[string][]Policy, error) { + _va := make([]interface{}, len(names)) + for _i := range names { + _va[_i] = names[_i] + } + var _ca []interface{} + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for GetRoles") + } + + var r0 map[string][]Policy + var r1 error + if rf, ok := ret.Get(0).(func(...string) (map[string][]Policy, error)); ok { + return rf(names...) + } + if rf, ok := ret.Get(0).(func(...string) map[string][]Policy); ok { + r0 = rf(names...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string][]Policy) + } + } + + if rf, ok := ret.Get(1).(func(...string) error); ok { + r1 = rf(names...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockController_GetRoles_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetRoles' +type MockController_GetRoles_Call struct { + *mock.Call +} + +// GetRoles is a helper method to define mock.On call +// - names ...string +func (_e *MockController_Expecter) GetRoles(names ...interface{}) *MockController_GetRoles_Call { + return &MockController_GetRoles_Call{Call: _e.mock.On("GetRoles", + append([]interface{}{}, names...)...)} +} + +func (_c *MockController_GetRoles_Call) Run(run func(names ...string)) *MockController_GetRoles_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]string, len(args)-0) + for i, a := range args[0:] { + if a != nil { + variadicArgs[i] = a.(string) + } + } + run(variadicArgs...) + }) + return _c +} + +func (_c *MockController_GetRoles_Call) Return(_a0 map[string][]Policy, _a1 error) *MockController_GetRoles_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockController_GetRoles_Call) RunAndReturn(run func(...string) (map[string][]Policy, error)) *MockController_GetRoles_Call { + _c.Call.Return(run) + return _c +} + +// GetRolesForUserOrGroup provides a mock function with given fields: user, authMethod, isGroup +func (_m *MockController) GetRolesForUserOrGroup(user string, authMethod authentication.AuthType, isGroup bool) (map[string][]Policy, error) { + ret := _m.Called(user, authMethod, isGroup) + + if len(ret) == 0 { + panic("no return value specified for GetRolesForUserOrGroup") + } + + var r0 map[string][]Policy + var r1 error + if rf, ok := ret.Get(0).(func(string, authentication.AuthType, bool) (map[string][]Policy, error)); ok { + return rf(user, authMethod, isGroup) + } + if rf, ok := ret.Get(0).(func(string, authentication.AuthType, bool) map[string][]Policy); ok { + r0 = rf(user, authMethod, isGroup) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string][]Policy) + } + } + + if rf, ok := ret.Get(1).(func(string, authentication.AuthType, bool) error); ok { + r1 = rf(user, authMethod, isGroup) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockController_GetRolesForUserOrGroup_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetRolesForUserOrGroup' +type MockController_GetRolesForUserOrGroup_Call struct { + *mock.Call +} + +// GetRolesForUserOrGroup is a helper method to define mock.On call +// - user string +// - authMethod authentication.AuthType +// - isGroup bool +func (_e *MockController_Expecter) GetRolesForUserOrGroup(user interface{}, authMethod interface{}, isGroup interface{}) *MockController_GetRolesForUserOrGroup_Call { + return &MockController_GetRolesForUserOrGroup_Call{Call: _e.mock.On("GetRolesForUserOrGroup", user, authMethod, isGroup)} +} + +func (_c *MockController_GetRolesForUserOrGroup_Call) Run(run func(user string, authMethod authentication.AuthType, isGroup bool)) *MockController_GetRolesForUserOrGroup_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(authentication.AuthType), args[2].(bool)) + }) + return _c +} + +func (_c *MockController_GetRolesForUserOrGroup_Call) Return(_a0 map[string][]Policy, _a1 error) *MockController_GetRolesForUserOrGroup_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockController_GetRolesForUserOrGroup_Call) RunAndReturn(run func(string, authentication.AuthType, bool) (map[string][]Policy, error)) *MockController_GetRolesForUserOrGroup_Call { + _c.Call.Return(run) + return _c +} + +// GetUsersOrGroupForRole provides a mock function with given fields: role, authMethod, IsGroup +func (_m *MockController) GetUsersOrGroupForRole(role string, authMethod authentication.AuthType, IsGroup bool) ([]string, error) { + ret := _m.Called(role, authMethod, IsGroup) + + if len(ret) == 0 { + panic("no return value specified for GetUsersOrGroupForRole") + } + + var r0 []string + var r1 error + if rf, ok := ret.Get(0).(func(string, authentication.AuthType, bool) ([]string, error)); ok { + return rf(role, authMethod, IsGroup) + } + if rf, ok := ret.Get(0).(func(string, authentication.AuthType, bool) []string); ok { + r0 = rf(role, authMethod, IsGroup) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]string) + } + } + + if rf, ok := ret.Get(1).(func(string, authentication.AuthType, bool) error); ok { + r1 = rf(role, authMethod, IsGroup) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockController_GetUsersOrGroupForRole_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetUsersOrGroupForRole' +type MockController_GetUsersOrGroupForRole_Call struct { + *mock.Call +} + +// GetUsersOrGroupForRole is a helper method to define mock.On call +// - role string +// - authMethod authentication.AuthType +// - IsGroup bool +func (_e *MockController_Expecter) GetUsersOrGroupForRole(role interface{}, authMethod interface{}, IsGroup interface{}) *MockController_GetUsersOrGroupForRole_Call { + return &MockController_GetUsersOrGroupForRole_Call{Call: _e.mock.On("GetUsersOrGroupForRole", role, authMethod, IsGroup)} +} + +func (_c *MockController_GetUsersOrGroupForRole_Call) Run(run func(role string, authMethod authentication.AuthType, IsGroup bool)) *MockController_GetUsersOrGroupForRole_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(authentication.AuthType), args[2].(bool)) + }) + return _c +} + +func (_c *MockController_GetUsersOrGroupForRole_Call) Return(_a0 []string, _a1 error) *MockController_GetUsersOrGroupForRole_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockController_GetUsersOrGroupForRole_Call) RunAndReturn(run func(string, authentication.AuthType, bool) ([]string, error)) *MockController_GetUsersOrGroupForRole_Call { + _c.Call.Return(run) + return _c +} + +// GetUsersOrGroupsWithRoles provides a mock function with given fields: isGroup, authMethod +func (_m *MockController) GetUsersOrGroupsWithRoles(isGroup bool, authMethod authentication.AuthType) ([]string, error) { + ret := _m.Called(isGroup, authMethod) + + if len(ret) == 0 { + panic("no return value specified for GetUsersOrGroupsWithRoles") + } + + var r0 []string + var r1 error + if rf, ok := ret.Get(0).(func(bool, authentication.AuthType) ([]string, error)); ok { + return rf(isGroup, authMethod) + } + if rf, ok := ret.Get(0).(func(bool, authentication.AuthType) []string); ok { + r0 = rf(isGroup, authMethod) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]string) + } + } + + if rf, ok := ret.Get(1).(func(bool, authentication.AuthType) error); ok { + r1 = rf(isGroup, authMethod) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockController_GetUsersOrGroupsWithRoles_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetUsersOrGroupsWithRoles' +type MockController_GetUsersOrGroupsWithRoles_Call struct { + *mock.Call +} + +// GetUsersOrGroupsWithRoles is a helper method to define mock.On call +// - isGroup bool +// - authMethod authentication.AuthType +func (_e *MockController_Expecter) GetUsersOrGroupsWithRoles(isGroup interface{}, authMethod interface{}) *MockController_GetUsersOrGroupsWithRoles_Call { + return &MockController_GetUsersOrGroupsWithRoles_Call{Call: _e.mock.On("GetUsersOrGroupsWithRoles", isGroup, authMethod)} +} + +func (_c *MockController_GetUsersOrGroupsWithRoles_Call) Run(run func(isGroup bool, authMethod authentication.AuthType)) *MockController_GetUsersOrGroupsWithRoles_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(bool), args[1].(authentication.AuthType)) + }) + return _c +} + +func (_c *MockController_GetUsersOrGroupsWithRoles_Call) Return(_a0 []string, _a1 error) *MockController_GetUsersOrGroupsWithRoles_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockController_GetUsersOrGroupsWithRoles_Call) RunAndReturn(run func(bool, authentication.AuthType) ([]string, error)) *MockController_GetUsersOrGroupsWithRoles_Call { + _c.Call.Return(run) + return _c +} + +// HasPermission provides a mock function with given fields: role, permission +func (_m *MockController) HasPermission(role string, permission *Policy) (bool, error) { + ret := _m.Called(role, permission) + + if len(ret) == 0 { + panic("no return value specified for HasPermission") + } + + var r0 bool + var r1 error + if rf, ok := ret.Get(0).(func(string, *Policy) (bool, error)); ok { + return rf(role, permission) + } + if rf, ok := ret.Get(0).(func(string, *Policy) bool); ok { + r0 = rf(role, permission) + } else { + r0 = ret.Get(0).(bool) + } + + if rf, ok := ret.Get(1).(func(string, *Policy) error); ok { + r1 = rf(role, permission) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockController_HasPermission_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'HasPermission' +type MockController_HasPermission_Call struct { + *mock.Call +} + +// HasPermission is a helper method to define mock.On call +// - role string +// - permission *Policy +func (_e *MockController_Expecter) HasPermission(role interface{}, permission interface{}) *MockController_HasPermission_Call { + return &MockController_HasPermission_Call{Call: _e.mock.On("HasPermission", role, permission)} +} + +func (_c *MockController_HasPermission_Call) Run(run func(role string, permission *Policy)) *MockController_HasPermission_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(*Policy)) + }) + return _c +} + +func (_c *MockController_HasPermission_Call) Return(_a0 bool, _a1 error) *MockController_HasPermission_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockController_HasPermission_Call) RunAndReturn(run func(string, *Policy) (bool, error)) *MockController_HasPermission_Call { + _c.Call.Return(run) + return _c +} + +// RemovePermissions provides a mock function with given fields: role, permissions +func (_m *MockController) RemovePermissions(role string, permissions []*Policy) error { + ret := _m.Called(role, permissions) + + if len(ret) == 0 { + panic("no return value specified for RemovePermissions") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string, []*Policy) error); ok { + r0 = rf(role, permissions) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// MockController_RemovePermissions_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RemovePermissions' +type MockController_RemovePermissions_Call struct { + *mock.Call +} + +// RemovePermissions is a helper method to define mock.On call +// - role string +// - permissions []*Policy +func (_e *MockController_Expecter) RemovePermissions(role interface{}, permissions interface{}) *MockController_RemovePermissions_Call { + return &MockController_RemovePermissions_Call{Call: _e.mock.On("RemovePermissions", role, permissions)} +} + +func (_c *MockController_RemovePermissions_Call) Run(run func(role string, permissions []*Policy)) *MockController_RemovePermissions_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].([]*Policy)) + }) + return _c +} + +func (_c *MockController_RemovePermissions_Call) Return(_a0 error) *MockController_RemovePermissions_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockController_RemovePermissions_Call) RunAndReturn(run func(string, []*Policy) error) *MockController_RemovePermissions_Call { + _c.Call.Return(run) + return _c +} + +// RevokeRolesForUser provides a mock function with given fields: user, roles +func (_m *MockController) RevokeRolesForUser(user string, roles ...string) error { + _va := make([]interface{}, len(roles)) + for _i := range roles { + _va[_i] = roles[_i] + } + var _ca []interface{} + _ca = append(_ca, user) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for RevokeRolesForUser") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string, ...string) error); ok { + r0 = rf(user, roles...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// MockController_RevokeRolesForUser_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RevokeRolesForUser' +type MockController_RevokeRolesForUser_Call struct { + *mock.Call +} + +// RevokeRolesForUser is a helper method to define mock.On call +// - user string +// - roles ...string +func (_e *MockController_Expecter) RevokeRolesForUser(user interface{}, roles ...interface{}) *MockController_RevokeRolesForUser_Call { + return &MockController_RevokeRolesForUser_Call{Call: _e.mock.On("RevokeRolesForUser", + append([]interface{}{user}, roles...)...)} +} + +func (_c *MockController_RevokeRolesForUser_Call) Run(run func(user string, roles ...string)) *MockController_RevokeRolesForUser_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]string, len(args)-1) + for i, a := range args[1:] { + if a != nil { + variadicArgs[i] = a.(string) + } + } + run(args[0].(string), variadicArgs...) + }) + return _c +} + +func (_c *MockController_RevokeRolesForUser_Call) Return(_a0 error) *MockController_RevokeRolesForUser_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockController_RevokeRolesForUser_Call) RunAndReturn(run func(string, ...string) error) *MockController_RevokeRolesForUser_Call { + _c.Call.Return(run) + return _c +} + +// UpdateRolesPermissions provides a mock function with given fields: roles +func (_m *MockController) UpdateRolesPermissions(roles map[string][]Policy) error { + ret := _m.Called(roles) + + if len(ret) == 0 { + panic("no return value specified for UpdateRolesPermissions") + } + + var r0 error + if rf, ok := ret.Get(0).(func(map[string][]Policy) error); ok { + r0 = rf(roles) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// MockController_UpdateRolesPermissions_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateRolesPermissions' +type MockController_UpdateRolesPermissions_Call struct { + *mock.Call +} + +// UpdateRolesPermissions is a helper method to define mock.On call +// - roles map[string][]Policy +func (_e *MockController_Expecter) UpdateRolesPermissions(roles interface{}) *MockController_UpdateRolesPermissions_Call { + return &MockController_UpdateRolesPermissions_Call{Call: _e.mock.On("UpdateRolesPermissions", roles)} +} + +func (_c *MockController_UpdateRolesPermissions_Call) Run(run func(roles map[string][]Policy)) *MockController_UpdateRolesPermissions_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(map[string][]Policy)) + }) + return _c +} + +func (_c *MockController_UpdateRolesPermissions_Call) Return(_a0 error) *MockController_UpdateRolesPermissions_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockController_UpdateRolesPermissions_Call) RunAndReturn(run func(map[string][]Policy) error) *MockController_UpdateRolesPermissions_Call { + _c.Call.Return(run) + return _c +} + +// NewMockController creates a new instance of MockController. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewMockController(t interface { + mock.TestingT + Cleanup(func()) +}) *MockController { + mock := &MockController{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/types.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/types.go new file mode 100644 index 0000000000000000000000000000000000000000..19cbcef921a2862f9864c2ff31453a2a0c550841 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/types.go @@ -0,0 +1,606 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authorization + +import ( + "fmt" + "strings" + + "github.com/weaviate/weaviate/usecases/auth/authentication" + + "github.com/go-openapi/strfmt" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/verbosity" +) + +const ( + // CREATE Represents the action to create a new resource. + CREATE = "C" + // READ Represents the action to retrieve a resource. + READ = "R" + // UPDATE Represents the action to update an existing resource. + UPDATE = "U" + // DELETE Represents the action to delete a resource. + DELETE = "D" + + ROLE_SCOPE_ALL = "ALL" + ROLE_SCOPE_MATCH = "MATCH" + + USER_AND_GROUP_ASSIGN_AND_REVOKE = "A" +) + +const ( + GroupsDomain = "groups" + UsersDomain = "users" + RolesDomain = "roles" + ClusterDomain = "cluster" + NodesDomain = "nodes" + BackupsDomain = "backups" + SchemaDomain = "schema" + CollectionsDomain = "collections" + TenantsDomain = "tenants" + DataDomain = "data" + ReplicateDomain = "replicate" + AliasesDomain = "aliases" +) + +var ( + All = String("*") + + AllBackups = &models.PermissionBackups{ + Collection: All, + } + AllData = &models.PermissionData{ + Collection: All, + Tenant: All, + Object: All, + } + AllTenants = &models.PermissionTenants{ + Collection: All, + Tenant: All, + } + AllNodes = &models.PermissionNodes{ + Verbosity: String(verbosity.OutputVerbose), + Collection: All, + } + AllOIDCGroups = &models.PermissionGroups{ + Group: All, + GroupType: models.GroupTypeOidc, + } + AllRoles = &models.PermissionRoles{ + Role: All, + Scope: String(models.PermissionRolesScopeAll), + } + AllUsers = &models.PermissionUsers{ + Users: All, + } + AllCollections = &models.PermissionCollections{ + Collection: All, + } + AllReplicate = &models.PermissionReplicate{ + Collection: All, + Shard: All, + } + AllAliases = &models.PermissionAliases{ + Collection: All, + Alias: All, + } + + ComponentName = "RBAC" + + // Note: if a new action added, don't forget to add it to availableWeaviateActions + // to be added to built in roles + // any action has to contain of `{verb}_{domain}` verb: CREATE, READ, UPDATE, DELETE domain: roles, users, cluster, collections, data + ReadRoles = "read_roles" + CreateRoles = "create_roles" + UpdateRoles = "update_roles" + DeleteRoles = "delete_roles" + + ReadCluster = "read_cluster" + ReadNodes = "read_nodes" + + AssignAndRevokeGroups = "assign_and_revoke_groups" + ReadGroups = "read_groups" + + AssignAndRevokeUsers = "assign_and_revoke_users" + CreateUsers = "create_users" + ReadUsers = "read_users" + UpdateUsers = "update_users" + DeleteUsers = "delete_users" + + ManageBackups = "manage_backups" + + CreateCollections = "create_collections" + ReadCollections = "read_collections" + UpdateCollections = "update_collections" + DeleteCollections = "delete_collections" + + CreateData = "create_data" + ReadData = "read_data" + UpdateData = "update_data" + DeleteData = "delete_data" + + CreateTenants = "create_tenants" + ReadTenants = "read_tenants" + UpdateTenants = "update_tenants" + DeleteTenants = "delete_tenants" + + CreateReplicate = "create_replicate" + ReadReplicate = "read_replicate" + UpdateReplicate = "update_replicate" + DeleteReplicate = "delete_replicate" + + CreateAliases = "create_aliases" + ReadAliases = "read_aliases" + UpdateAliases = "update_aliases" + DeleteAliases = "delete_aliases" + + availableWeaviateActions = []string{ + // Roles domain + CreateRoles, + ReadRoles, + UpdateRoles, + DeleteRoles, + + // Backups domain + ManageBackups, + + // Users domain + AssignAndRevokeUsers, + CreateUsers, + ReadUsers, + UpdateUsers, + DeleteUsers, + + // Cluster domain + ReadCluster, + + // Groups domain + AssignAndRevokeGroups, + ReadGroups, + + // Nodes domain + ReadNodes, + + // Collections domain + CreateCollections, + ReadCollections, + UpdateCollections, + DeleteCollections, + + // Data domain + CreateData, + ReadData, + UpdateData, + DeleteData, + + // Tenant domain + CreateTenants, + ReadTenants, + UpdateTenants, + DeleteTenants, + + // Replicate domain + CreateReplicate, + ReadReplicate, + UpdateReplicate, + DeleteReplicate, + + // Aliases domain + CreateAliases, + ReadAliases, + UpdateAliases, + DeleteAliases, + } +) + +var ( + // build-in roles that can be assigned via API + Viewer = "viewer" + Admin = "admin" + // build-in roles that can be assigned via env vars and cannot be changed via APIS + Root = "root" + ReadOnly = "read-only" + BuiltInRoles = []string{Viewer, Admin, Root, ReadOnly} + + // viewer : can view everything , roles, users, schema, data + // editor : can create/read/update everything , roles, users, schema, data + // Admin : aka basically super Admin or root + BuiltInPermissions = map[string][]*models.Permission{ + Viewer: viewerPermissions(), + Admin: adminPermissions(), + Root: adminPermissions(), + ReadOnly: viewerPermissions(), + } + EnvVarRoles = []string{ReadOnly, Root} +) + +type Policy struct { + Resource string + Verb string + Domain string +} + +// Cluster returns a string representing the cluster authorization scope. +// The returned string is "cluster/*", which can be used to specify that +// the authorization applies to all resources within the cluster. +func Cluster() string { + return fmt.Sprintf("%s/*", ClusterDomain) +} + +func nodes(verbosity, class string) string { + if verbosity == "" { + verbosity = "minimal" + } + if verbosity == "minimal" { + return fmt.Sprintf("%s/verbosity/%s", NodesDomain, verbosity) + } + return fmt.Sprintf("%s/verbosity/%s/collections/%s", NodesDomain, verbosity, class) +} + +func Nodes(verbosity string, classes ...string) []string { + classes = schema.UppercaseClassesNames(classes...) + + if len(classes) == 0 || (len(classes) == 1 && (classes[0] == "" || classes[0] == "*")) { + return []string{nodes(verbosity, "*")} + } + + resources := make([]string, len(classes)) + for idx := range classes { + if classes[idx] == "" { + resources[idx] = nodes(verbosity, "*") + } else { + resources[idx] = nodes(verbosity, classes[idx]) + } + } + + return resources +} + +// Groups generates a list of user resource strings based on the provided group names. +// If no group names are provided, it returns a default user resource string "groups/*". +// +// Parameters: +// +// groups - A variadic parameter representing the group names. +// +// Returns: +// +// A slice of strings where each string is a formatted user resource string. +func Groups(groupType authentication.AuthType, groups ...string) []string { + if len(groups) == 0 || (len(groups) == 1 && (groups[0] == "" || groups[0] == "*")) { + return []string{ + fmt.Sprintf("%s/%s/*", GroupsDomain, groupType), + } + } + + resources := make([]string, len(groups)) + for idx := range groups { + resources[idx] = fmt.Sprintf("%s/%s/%s", GroupsDomain, groupType, groups[idx]) + } + + return resources +} + +// Users generates a list of user resource strings based on the provided user names. +// If no user names are provided, it returns a default user resource string "users/*". +// +// Parameters: +// +// users - A variadic parameter representing the user names. +// +// Returns: +// +// A slice of strings where each string is a formatted user resource string. +func Users(users ...string) []string { + if len(users) == 0 || (len(users) == 1 && (users[0] == "" || users[0] == "*")) { + return []string{ + fmt.Sprintf("%s/*", UsersDomain), + } + } + + resources := make([]string, len(users)) + for idx := range users { + resources[idx] = fmt.Sprintf("%s/%s", UsersDomain, users[idx]) + } + + return resources +} + +// Roles generates a list of role resource strings based on the provided role names. +// If no role names are provided, it returns a default role resource string "roles/*". +// +// Parameters: +// +// roles - A variadic parameter representing the role names. +// +// Returns: +// +// A slice of strings where each string is a formatted role resource string. +func Roles(roles ...string) []string { + if len(roles) == 0 || (len(roles) == 1 && (roles[0] == "" || roles[0] == "*")) { + return []string{ + fmt.Sprintf("%s/*", RolesDomain), + } + } + + resources := make([]string, len(roles)) + for idx := range roles { + resources[idx] = fmt.Sprintf("%s/%s", RolesDomain, roles[idx]) + } + + return resources +} + +// CollectionsMetadata generates a list of resource strings for the given classes. +// If no classes are provided, it returns a default resource string "collections/*". +// Each class is formatted as "collection/{class}". +// +// Parameters: +// +// classes - a variadic parameter representing the class names. +// +// Returns: +// +// A slice of strings representing the resource paths. +func CollectionsMetadata(classes ...string) []string { + classes = schema.UppercaseClassesNames(classes...) + + if len(classes) == 0 || (len(classes) == 1 && (classes[0] == "" || classes[0] == "*")) { + return []string{fmt.Sprintf("%s/collections/*/shards/#", SchemaDomain)} + } + + resources := make([]string, len(classes)) + for idx := range classes { + if classes[idx] == "" { + resources[idx] = fmt.Sprintf("%s/collections/*/shards/#", SchemaDomain) + } else { + resources[idx] = fmt.Sprintf("%s/collections/%s/shards/#", SchemaDomain, classes[idx]) + } + } + + return resources +} + +func Aliases(class string, aliases ...string) []string { + class = schema.UppercaseClassName(class) + aliases = schema.UppercaseClassesNames(aliases...) + + if class == "" { + class = "*" + } + + if len(aliases) == 0 || (len(aliases) == 1 && (aliases[0] == "" || aliases[0] == "*")) { + return []string{fmt.Sprintf("%s/collections/%s/aliases/*", AliasesDomain, class)} + } + + resources := make([]string, len(aliases)) + for idx := range aliases { + if aliases[idx] == "" { + resources[idx] = fmt.Sprintf("%s/collections/%s/aliases/*", AliasesDomain, class) + } else { + resources[idx] = fmt.Sprintf("%s/collections/%s/aliases/%s", AliasesDomain, class, aliases[idx]) + } + } + + return resources +} + +func CollectionsData(classes ...string) []string { + classes = schema.UppercaseClassesNames(classes...) + + if len(classes) == 0 || (len(classes) == 1 && (classes[0] == "" || classes[0] == "*")) { + return []string{Objects("*", "*", "*")} + } + + var paths []string + for _, class := range classes { + paths = append(paths, Objects(class, "*", "*")) + } + return paths +} + +func Collections(classes ...string) []string { + classes = schema.UppercaseClassesNames(classes...) + return append(CollectionsData(classes...), CollectionsMetadata(classes...)...) +} + +// ShardsMetadata generates a list of shard resource strings for a given class and shards. +// If the class is an empty string, it defaults to "*". If no shards are provided, +// it returns a single resource string with a wildcard for shards. If shards are +// provided, it returns a list of resource strings for each shard. +// +// Parameters: +// - class: The class name for the resource. If empty, defaults to "*". +// - shards: A variadic list of shard names. If empty, it will replace it with '#' to mark it as collection only check +// +// Returns: +// +// A slice of strings representing the resource paths for the given class and shards. +func ShardsMetadata(class string, shards ...string) []string { + class = schema.UppercaseClassesNames(class)[0] + if class == "" { + class = "*" + } + + if len(shards) == 0 || (len(shards) == 1 && (shards[0] == "" || shards[0] == "*")) { + return []string{fmt.Sprintf("%s/collections/%s/shards/*", SchemaDomain, class)} + } + + resources := make([]string, len(shards)) + for idx := range shards { + if shards[idx] == "" { + resources[idx] = fmt.Sprintf("%s/collections/%s/shards/*", SchemaDomain, class) + } else { + resources[idx] = fmt.Sprintf("%s/collections/%s/shards/%s", SchemaDomain, class, shards[idx]) + } + } + + return resources +} + +func ShardsData(class string, shards ...string) []string { + class = schema.UppercaseClassesNames(class)[0] + var paths []string + for _, shard := range shards { + paths = append(paths, Objects(class, shard, "*")) + } + return paths +} + +// Objects generates a string representing a path to objects within a collection and shard. +// The path format varies based on the provided class, shard, and id parameters. +// +// Parameters: +// - class: the class of the collection (string) +// - shard: the shard identifier (string) +// - id: the unique identifier of the object (strfmt.UUID) +// +// Returns: +// - A string representing the path to the objects, with wildcards (*) used for any empty parameters. +// +// Example outputs: +// - "collections/*/shards/*/objects/*" if all parameters are empty +// - "collections/*/shards/*/objects/{id}" if only id is provided +// - "collections/{class}/shards/{shard}/objects/{id}" if all parameters are provided +func Objects(class, shard string, id strfmt.UUID) string { + class = schema.UppercaseClassesNames(class)[0] + if class == "" { + class = "*" + } + if shard == "" { + shard = "*" + } + if id == "" { + id = "*" + } + return fmt.Sprintf("%s/collections/%s/shards/%s/objects/%s", DataDomain, class, shard, id) +} + +// Backups generates a resource string for the given classes. +// If the backend is an empty string, it defaults to "*". + +// Parameters: +// - class: the class name (string) + +// Returns: +// - A string representing the resource path for the given classes. + +// Example outputs: +// - "backups/*" if the backend is an empty string +// - "backups/{backend}" for the provided backend +func Backups(classes ...string) []string { + classes = schema.UppercaseClassesNames(classes...) + if len(classes) == 0 || (len(classes) == 1 && (classes[0] == "" || classes[0] == "*")) { + return []string{fmt.Sprintf("%s/collections/*", BackupsDomain)} + } + + resources := make([]string, len(classes)) + for idx := range classes { + if classes[idx] == "" { + resources[idx] = fmt.Sprintf("%s/collections/*", BackupsDomain) + } else { + resources[idx] = fmt.Sprintf("%s/collections/%s", BackupsDomain, classes[idx]) + } + } + + return resources +} + +// Replications generates a replication resource string for a given class and shard. +// +// Parameters: +// - class: The class name for the resource. If empty, defaults to "*". +// - shard: The shard name for the resource. If empty, defaults to "*". +// +// Returns: +// +// A slice of strings representing the resource paths for the given class and shards. +func Replications(class, shard string) string { + class = schema.UppercaseClassName(class) + if class == "" { + class = "*" + } + if shard == "" { + shard = "*" + } + return fmt.Sprintf("%s/collections/%s/shards/%s", ReplicateDomain, class, shard) +} + +// WildcardPath returns the appropriate wildcard path based on the domain and original resource path. +// The domain is expected to be the first part of the resource path. +func WildcardPath(resource string) string { + parts := strings.Split(resource, "/") + parts[len(parts)-1] = "*" + return strings.Join(parts, "/") +} + +func String(s string) *string { + return &s +} + +// viewer : can view everything , roles, users, schema, data +func viewerPermissions() []*models.Permission { + perms := []*models.Permission{} + for _, action := range availableWeaviateActions { + if strings.ToUpper(action)[0] != READ[0] { + continue + } + + perms = append(perms, &models.Permission{ + Action: &action, + Backups: AllBackups, + Data: AllData, + Nodes: AllNodes, + Roles: AllRoles, + Collections: AllCollections, + Tenants: AllTenants, + Users: AllUsers, + Aliases: AllAliases, + Groups: AllOIDCGroups, + }) + } + + return perms +} + +// Admin : aka basically super Admin or root +func adminPermissions() []*models.Permission { + // TODO ignore CRUD if there is manage + perms := []*models.Permission{} + for _, action := range availableWeaviateActions { + perms = append(perms, &models.Permission{ + Action: &action, + Backups: AllBackups, + Data: AllData, + Nodes: AllNodes, + Roles: AllRoles, + Collections: AllCollections, + Tenants: AllTenants, + Users: AllUsers, + Aliases: AllAliases, + Groups: AllOIDCGroups, + }) + } + + return perms +} + +func VerbWithScope(verb, scope string) string { + if strings.Contains(verb, "_") { + return verb + } + + return fmt.Sprintf("%s_%s", verb, scope) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/types_test.go b/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/types_test.go new file mode 100644 index 0000000000000000000000000000000000000000..4fc2590f2cb793ca41088418d69a2b0c336b2e74 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/auth/authorization/types_test.go @@ -0,0 +1,287 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package authorization + +import ( + "fmt" + "testing" + + "github.com/weaviate/weaviate/usecases/auth/authentication" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" +) + +func TestUsers(t *testing.T) { + tests := []struct { + name string + users []string + expected []string + }{ + {"No users", []string{}, []string{fmt.Sprintf("%s/*", UsersDomain)}}, + {"Single user", []string{"user1"}, []string{fmt.Sprintf("%s/user1", UsersDomain)}}, + {"Multiple users", []string{"user1", "user2"}, []string{fmt.Sprintf("%s/user1", UsersDomain), fmt.Sprintf("%s/user2", UsersDomain)}}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := Users(tt.users...) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestGroups(t *testing.T) { + tests := []struct { + name string + groups []string + expected []string + }{ + {"No groups", []string{}, []string{fmt.Sprintf("%s/%s/*", GroupsDomain, authentication.AuthTypeOIDC)}}, + {"Single group", []string{"group1"}, []string{fmt.Sprintf("%s/%s/group1", GroupsDomain, authentication.AuthTypeOIDC)}}, + {"Multiple groups", []string{"group1", "group2"}, []string{fmt.Sprintf("%s/%s/group1", GroupsDomain, authentication.AuthTypeOIDC), fmt.Sprintf("%s/%s/group2", GroupsDomain, authentication.AuthTypeOIDC)}}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := Groups(authentication.AuthTypeOIDC, tt.groups...) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestRoles(t *testing.T) { + tests := []struct { + name string + roles []string + expected []string + }{ + {"No roles", []string{}, []string{fmt.Sprintf("%s/*", RolesDomain)}}, + {"Single role", []string{"admin"}, []string{fmt.Sprintf("%s/admin", RolesDomain)}}, + {"Multiple roles", []string{"admin", "user"}, []string{fmt.Sprintf("%s/admin", RolesDomain), fmt.Sprintf("%s/user", RolesDomain)}}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := Roles(tt.roles...) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestCluster(t *testing.T) { + expected := "cluster/*" + result := Cluster() + assert.Equal(t, expected, result) +} + +func TestNodes(t *testing.T) { + tests := []struct { + name string + verbosity string + classes []string + expected []string + }{ + {"Empty verbosity", "", []string{}, []string{fmt.Sprintf("%s/verbosity/minimal", NodesDomain)}}, + {"Minimal verbosity", "minimal", []string{}, []string{fmt.Sprintf("%s/verbosity/minimal", NodesDomain)}}, + {"Minimal verbosity with classes", "minimal", []string{"class1"}, []string{fmt.Sprintf("%s/verbosity/minimal", NodesDomain)}}, + {"Verbose verbosity with no classes", "verbose", []string{}, []string{fmt.Sprintf("%s/verbosity/verbose/collections/*", NodesDomain)}}, + {"Verbose verbosity with classes", "verbose", []string{"class1", "class2"}, []string{fmt.Sprintf("%s/verbosity/verbose/collections/Class1", NodesDomain), fmt.Sprintf("%s/verbosity/verbose/collections/Class2", NodesDomain)}}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := Nodes(tt.verbosity, tt.classes...) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestBackups(t *testing.T) { + tests := []struct { + name string + backend string + expected []string + }{ + {"No collection", "", []string{fmt.Sprintf("%s/collections/*", BackupsDomain)}}, + {"Collection", "class1", []string{fmt.Sprintf("%s/collections/Class1", BackupsDomain)}}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := Backups(tt.backend) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestCollections(t *testing.T) { + tests := []struct { + name string + classes []string + expected []string + }{ + {"No classes", []string{}, []string{fmt.Sprintf("%s/collections/*/shards/#", SchemaDomain)}}, + {"Single empty class", []string{""}, []string{fmt.Sprintf("%s/collections/*/shards/#", SchemaDomain)}}, + {"Single class", []string{"class1"}, []string{fmt.Sprintf("%s/collections/Class1/shards/#", SchemaDomain)}}, + {"Multiple classes", []string{"class1", "class2"}, []string{fmt.Sprintf("%s/collections/Class1/shards/#", SchemaDomain), fmt.Sprintf("%s/collections/Class2/shards/#", SchemaDomain)}}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := CollectionsMetadata(tt.classes...) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestShards(t *testing.T) { + tests := []struct { + name string + class string + shards []string + expected []string + }{ + {"No class, no shards", "", []string{}, []string{fmt.Sprintf("%s/collections/*/shards/*", SchemaDomain)}}, + {"Class, no shards", "class1", []string{}, []string{fmt.Sprintf("%s/collections/Class1/shards/*", SchemaDomain)}}, + {"No class, single shard", "", []string{"shard1"}, []string{fmt.Sprintf("%s/collections/*/shards/shard1", SchemaDomain)}}, + {"Class, single shard", "class1", []string{"shard1"}, []string{fmt.Sprintf("%s/collections/Class1/shards/shard1", SchemaDomain)}}, + {"Class, multiple shards", "class1", []string{"shard1", "shard2"}, []string{fmt.Sprintf("%s/collections/Class1/shards/shard1", SchemaDomain), fmt.Sprintf("%s/collections/Class1/shards/shard2", SchemaDomain)}}, + {"Class, empty shard", "class1", []string{"shard1", ""}, []string{fmt.Sprintf("%s/collections/Class1/shards/shard1", SchemaDomain), fmt.Sprintf("%s/collections/Class1/shards/*", SchemaDomain)}}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := ShardsMetadata(tt.class, tt.shards...) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestObjects(t *testing.T) { + tests := []struct { + name string + class string + shard string + id strfmt.UUID + expected string + }{ + {"No class, no shard, no id", "", "", "", fmt.Sprintf("%s/collections/*/shards/*/objects/*", DataDomain)}, + {"Class, no shard, no id", "class1", "", "", fmt.Sprintf("%s/collections/Class1/shards/*/objects/*", DataDomain)}, + {"No class, shard, no id", "", "shard1", "", fmt.Sprintf("%s/collections/*/shards/shard1/objects/*", DataDomain)}, + {"No class, no shard, id", "", "", "id1", fmt.Sprintf("%s/collections/*/shards/*/objects/id1", DataDomain)}, + {"Class, shard, no id", "class1", "shard1", "", fmt.Sprintf("%s/collections/Class1/shards/shard1/objects/*", DataDomain)}, + {"Class, no shard, id", "class1", "", "id1", fmt.Sprintf("%s/collections/Class1/shards/*/objects/id1", DataDomain)}, + {"No class, shard, id", "", "shard1", "id1", fmt.Sprintf("%s/collections/*/shards/shard1/objects/id1", DataDomain)}, + {"Class, shard, id", "class1", "shard1", "id1", fmt.Sprintf("%s/collections/Class1/shards/shard1/objects/id1", DataDomain)}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := Objects(tt.class, tt.shard, tt.id) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestTenants(t *testing.T) { + tests := []struct { + name string + class string + shards []string + expected []string + }{ + {"No class, no tenant", "", []string{}, []string{fmt.Sprintf("%s/collections/*/shards/*", SchemaDomain)}}, + {"Class, no tenant", "class1", []string{}, []string{fmt.Sprintf("%s/collections/Class1/shards/*", SchemaDomain)}}, + {"No class, single tenant", "", []string{"tenant1"}, []string{fmt.Sprintf("%s/collections/*/shards/tenant1", SchemaDomain)}}, + {"Class, single tenants", "class1", []string{"tenant1"}, []string{fmt.Sprintf("%s/collections/Class1/shards/tenant1", SchemaDomain)}}, + {"Class, multiple tenants", "class1", []string{"tenant1", "tenant2"}, []string{fmt.Sprintf("%s/collections/Class1/shards/tenant1", SchemaDomain), fmt.Sprintf("%s/collections/Class1/shards/tenant2", SchemaDomain)}}, + {"Class, empty tenant", "class1", []string{"tenant1", ""}, []string{fmt.Sprintf("%s/collections/Class1/shards/tenant1", SchemaDomain), fmt.Sprintf("%s/collections/Class1/shards/*", SchemaDomain)}}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := ShardsMetadata(tt.class, tt.shards...) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestGetWildcardPath(t *testing.T) { + tests := []struct { + name string + resource string + expected string + }{ + // Data domain tests + { + name: "data domain full path", + resource: "data/collections/Class1/shards/Tenant1/objects/123", + expected: "data/collections/Class1/shards/Tenant1/objects/*", + }, + { + name: "data domain incomplete path", + resource: "data/collections/Class1/shards/Tenant1", + expected: "data/collections/Class1/shards/*", + }, + + // Schema domain tests + { + name: "schema domain full path", + resource: "schema/collections/Class1/shards/Tenant1", + expected: "schema/collections/Class1/shards/*", + }, + { + name: "schema domain full path", + resource: "schema/collections/Class1/shards/Tenant1", + expected: "schema/collections/Class1/shards/*", + }, + { + name: "schema domain incomplete path", + resource: "schema/collections/Class1", + expected: "schema/collections/*", + }, + + // Backups domain tests + { + name: "backups domain full path", + resource: "backups/collections/Class1", + expected: "backups/collections/*", + }, + { + name: "backups domain incomplete path", + resource: "backups/collections", + expected: "backups/*", + }, + + // Users domain tests + { + name: "users domain", + resource: "users/user1", + expected: "users/*", + }, + + // Roles domain tests + { + name: "roles domain", + resource: "roles/role1", + expected: "roles/*", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := WildcardPath(tt.resource) + assert.Equal(t, tt.expected, result, "WildcardPath(%q) = %q, want %q", + tt.resource, result, tt.expected) + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/replica/transport.go b/platform/dbops/binaries/weaviate-src/usecases/replica/transport.go new file mode 100644 index 0000000000000000000000000000000000000000..02fa6ca0192b9b4c6b6397e0678d3f8bbd6bd5d6 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/replica/transport.go @@ -0,0 +1,293 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package replica + +import ( + "context" + "fmt" + "time" + + "github.com/go-openapi/strfmt" + "github.com/weaviate/weaviate/cluster/router/types" + "github.com/weaviate/weaviate/entities/additional" + "github.com/weaviate/weaviate/entities/filters" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/entities/storobj" + "github.com/weaviate/weaviate/usecases/objects" + "github.com/weaviate/weaviate/usecases/replica/hashtree" +) + +const ( + // RequestKey is used to marshalling request IDs + RequestKey = "request_id" + SchemaVersionKey = "schema_version" +) + +// Client is used to read and write objects on replicas +type Client interface { + RClient + WClient +} + +// StatusCode is communicate the cause of failure during replication +type StatusCode int + +const ( + StatusOK = 0 + StatusClassNotFound = iota + 200 + StatusShardNotFound + StatusNotFound + StatusAlreadyExisted + StatusNotReady + StatusConflict = iota + 300 + StatusPreconditionFailed + StatusReadOnly + StatusObjectNotFound +) + +// Error reports error happening during replication +type Error struct { + Code StatusCode `json:"code"` + Msg string `json:"msg,omitempty"` + Err error `json:"-"` +} + +// Empty checks whether e is an empty error which equivalent to e == nil +func (e *Error) Empty() bool { + return e.Code == StatusOK && e.Msg == "" && e.Err == nil +} + +// NewError create new replication error +func NewError(code StatusCode, msg string) *Error { + return &Error{code, msg, nil} +} + +func (e *Error) Clone() *Error { + return &Error{Code: e.Code, Msg: e.Msg, Err: e.Err} +} + +// Unwrap underlying error +func (e *Error) Unwrap() error { return e.Err } + +func (e *Error) Error() string { + return fmt.Sprintf("%s %q: %v", StatusText(e.Code), e.Msg, e.Err) +} + +func (e *Error) IsStatusCode(sc StatusCode) bool { + return e.Code == sc +} + +// StatusText returns a text for the status code. It returns the empty +// string if the code is unknown. +func StatusText(code StatusCode) string { + switch code { + case StatusOK: + return "ok" + case StatusNotFound: + return "not found" + case StatusClassNotFound: + return "class not found" + case StatusShardNotFound: + return "shard not found" + case StatusConflict: + return "conflict" + case StatusPreconditionFailed: + return "precondition failed" + case StatusAlreadyExisted: + return "already existed" + case StatusNotReady: + return "local index not ready" + case StatusReadOnly: + return "read only" + case StatusObjectNotFound: + return "object not found" + default: + return "" + } +} + +func (e *Error) Timeout() bool { + t, ok := e.Err.(interface { + Timeout() bool + }) + return ok && t.Timeout() +} + +type SimpleResponse struct { + Errors []Error `json:"errors,omitempty"` +} + +func (r *SimpleResponse) FirstError() error { + for i, err := range r.Errors { + if !err.Empty() { + return &r.Errors[i] + } + } + return nil +} + +// DeleteBatchResponse represents the response returned by DeleteObjects +type DeleteBatchResponse struct { + Batch []UUID2Error `json:"batch,omitempty"` +} + +type UUID2Error struct { + UUID string `json:"uuid,omitempty"` + Error Error `json:"error,omitempty"` +} + +// FirstError returns the first found error +func (r *DeleteBatchResponse) FirstError() error { + for i, ue := range r.Batch { + if !ue.Error.Empty() { + return &r.Batch[i].Error + } + } + return nil +} + +func fromReplicas(xs []Replica) []*storobj.Object { + rs := make([]*storobj.Object, len(xs)) + for i := range xs { + rs[i] = xs[i].Object + } + return rs +} + +type DigestObjectsInRangeReq struct { + InitialUUID strfmt.UUID `json:"initialUUID,omitempty"` + FinalUUID strfmt.UUID `json:"finalUUID,omitempty"` + Limit int `json:"limit,omitempty"` +} + +type DigestObjectsInRangeResp struct { + Digests []types.RepairResponse `json:"digests,omitempty"` +} + +// WClient is the client used to write to replicas +type WClient interface { + PutObject(ctx context.Context, host, index, shard, requestID string, + obj *storobj.Object, schemaVersion uint64) (SimpleResponse, error) + DeleteObject(ctx context.Context, host, index, shard, requestID string, + id strfmt.UUID, deletionTime time.Time, schemaVersion uint64) (SimpleResponse, error) + PutObjects(ctx context.Context, host, index, shard, requestID string, + objs []*storobj.Object, schemaVersion uint64) (SimpleResponse, error) + MergeObject(ctx context.Context, host, index, shard, requestID string, + mergeDoc *objects.MergeDocument, schemaVersion uint64) (SimpleResponse, error) + DeleteObjects(ctx context.Context, host, index, shard, requestID string, + uuids []strfmt.UUID, deletionTime time.Time, dryRun bool, schemaVersion uint64) (SimpleResponse, error) + AddReferences(ctx context.Context, host, index, shard, requestID string, + refs []objects.BatchReference, schemaVersion uint64) (SimpleResponse, error) + Commit(ctx context.Context, host, index, shard, requestID string, resp interface{}) error + Abort(ctx context.Context, host, index, shard, requestID string) (SimpleResponse, error) +} + +// RClient is the client used to read from remote replicas +type RClient interface { + // FetchObject fetches one object + FetchObject(_ context.Context, host, index, shard string, + id strfmt.UUID, props search.SelectProperties, + additional additional.Properties, numRetries int) (Replica, error) + + // FetchObjects fetches objects specified in ids list. + FetchObjects(_ context.Context, host, index, shard string, + ids []strfmt.UUID) ([]Replica, error) + + // OverwriteObjects conditionally updates existing objects. + OverwriteObjects(_ context.Context, host, index, shard string, + _ []*objects.VObject) ([]types.RepairResponse, error) + + // DigestObjects finds a list of objects and returns a compact representation + // of a list of the objects. This is used by the replicator to optimize the + // number of bytes transferred over the network when fetching a replicated + // object + DigestObjects(ctx context.Context, host, index, shard string, + ids []strfmt.UUID, numRetries int) ([]types.RepairResponse, error) + + FindUUIDs(ctx context.Context, host, index, shard string, + filters *filters.LocalFilter) ([]strfmt.UUID, error) + + DigestObjectsInRange(ctx context.Context, host, index, shard string, + initialUUID, finalUUID strfmt.UUID, limit int) ([]types.RepairResponse, error) + + HashTreeLevel(ctx context.Context, host, index, shard string, level int, + discriminant *hashtree.Bitset) (digests []hashtree.Digest, err error) +} + +// FinderClient extends RClient with consistency checks +type FinderClient struct { + cl RClient +} + +// FullRead reads full object +func (fc FinderClient) FullRead(ctx context.Context, + host, index, shard string, + id strfmt.UUID, + props search.SelectProperties, + additional additional.Properties, + numRetries int, +) (Replica, error) { + return fc.cl.FetchObject(ctx, host, index, shard, id, props, additional, numRetries) +} + +func (fc FinderClient) HashTreeLevel(ctx context.Context, + host, index, shard string, level int, discriminant *hashtree.Bitset, +) (digests []hashtree.Digest, err error) { + return fc.cl.HashTreeLevel(ctx, host, index, shard, level, discriminant) +} + +// DigestReads reads digests of all specified objects +func (fc FinderClient) DigestReads(ctx context.Context, + host, index, shard string, + ids []strfmt.UUID, numRetries int, +) ([]types.RepairResponse, error) { + n := len(ids) + rs, err := fc.cl.DigestObjects(ctx, host, index, shard, ids, numRetries) + if err == nil && len(rs) != n { + err = fmt.Errorf("malformed digest read response: length expected %d got %d", n, len(rs)) + } + return rs, err +} + +func (fc FinderClient) DigestObjectsInRange(ctx context.Context, + host, index, shard string, + initialUUID, finalUUID strfmt.UUID, limit int, +) ([]types.RepairResponse, error) { + return fc.cl.DigestObjectsInRange(ctx, host, index, shard, initialUUID, finalUUID, limit) +} + +// FullReads read full objects +func (fc FinderClient) FullReads(ctx context.Context, + host, index, shard string, + ids []strfmt.UUID, +) ([]Replica, error) { + n := len(ids) + rs, err := fc.cl.FetchObjects(ctx, host, index, shard, ids) + if m := len(rs); err == nil && n != m { + err = fmt.Errorf("malformed full read response: length expected %d got %d", n, m) + } + return rs, err +} + +// Overwrite specified object with most recent contents +func (fc FinderClient) Overwrite(ctx context.Context, + host, index, shard string, + xs []*objects.VObject, +) ([]types.RepairResponse, error) { + return fc.cl.OverwriteObjects(ctx, host, index, shard, xs) +} + +func (fc FinderClient) FindUUIDs(ctx context.Context, + host, class, shard string, filters *filters.LocalFilter, +) ([]strfmt.UUID, error) { + return fc.cl.FindUUIDs(ctx, host, class, shard, filters) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/replica/transport_test.go b/platform/dbops/binaries/weaviate-src/usecases/replica/transport_test.go new file mode 100644 index 0000000000000000000000000000000000000000..e84cf081cd144e51f8ae1630a7b768ed50b41f70 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/replica/transport_test.go @@ -0,0 +1,67 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package replica_test + +import ( + "encoding/json" + "testing" + "time" + + "github.com/weaviate/weaviate/usecases/replica" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "golang.org/x/net/context" +) + +func TestReplicationErrorTimeout(t *testing.T) { + ctx := context.Background() + ctx, cancel := context.WithDeadline(ctx, time.Now()) + defer cancel() + err := &replica.Error{Err: ctx.Err()} + assert.True(t, err.Timeout()) + err = err.Clone() + assert.ErrorIs(t, err, context.DeadlineExceeded) +} + +func TestReplicationErrorMarshal(t *testing.T) { + rawErr := replica.Error{Code: replica.StatusClassNotFound, Msg: "Article", Err: errors.New("error cannot be marshalled")} + bytes, err := json.Marshal(&rawErr) + assert.Nil(t, err) + got := replica.NewError(0, "") + assert.Nil(t, json.Unmarshal(bytes, got)) + want := &replica.Error{Code: replica.StatusClassNotFound, Msg: "Article"} + assert.Equal(t, want, got) +} + +func TestReplicationErrorStatus(t *testing.T) { + tests := []struct { + code replica.StatusCode + desc string + }{ + {-1, ""}, + {replica.StatusOK, "ok"}, + {replica.StatusClassNotFound, "class not found"}, + {replica.StatusShardNotFound, "shard not found"}, + {replica.StatusNotFound, "not found"}, + {replica.StatusAlreadyExisted, "already existed"}, + {replica.StatusConflict, "conflict"}, + {replica.StatusPreconditionFailed, "precondition failed"}, + {replica.StatusReadOnly, "read only"}, + } + for _, test := range tests { + got := replica.StatusText(test.code) + if got != test.desc { + t.Errorf("StatusText(%d) want %v got %v", test.code, test.desc, got) + } + } +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/alias.go b/platform/dbops/binaries/weaviate-src/usecases/schema/alias.go new file mode 100644 index 0000000000000000000000000000000000000000..9c96258ad9719849b14beb0659552a13e5cc69b7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/alias.go @@ -0,0 +1,144 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "context" + "errors" + "fmt" + + cschema "github.com/weaviate/weaviate/cluster/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/auth/authorization" + "github.com/weaviate/weaviate/usecases/auth/authorization/filter" +) + +func (h *Handler) GetAliases(ctx context.Context, principal *models.Principal, alias, className string) ([]*models.Alias, error) { + var class *models.Class + if className != "" { + name := schema.UppercaseClassName(className) + class = h.schemaReader.ReadOnlyClass(name) + } + aliases, err := h.schemaManager.GetAliases(ctx, alias, class) + if err != nil { + return nil, err + } + + filteredAliases := filter.New[*models.Alias](h.Authorizer, h.config.Authorization.Rbac).Filter( + ctx, + h.logger, + principal, + aliases, + authorization.READ, + func(alias *models.Alias) string { + return authorization.Aliases(className, alias.Alias)[0] + }, + ) + + return filteredAliases, nil +} + +func (h *Handler) GetAlias(ctx context.Context, principal *models.Principal, alias string) (*models.Alias, error) { + alias = schema.UppercaseClassName(alias) + // NOTE: We pass empty class, because this endpoint doesn't know what collection the alias belongs to + // hence if RBAC is enabled, the user has to have read permission for all the collection for api to go discover + // right collection for the alias. + if err := h.Authorizer.Authorize(ctx, principal, authorization.READ, authorization.Aliases("", alias)...); err != nil { + return nil, err + } + + a, err := h.schemaManager.GetAlias(ctx, alias) + if err != nil { + if errors.Is(err, cschema.ErrAliasNotFound) { + return nil, fmt.Errorf("alias %s not found: %w", alias, ErrNotFound) + } + return nil, err + } + return a, nil +} + +func (h *Handler) AddAlias(ctx context.Context, principal *models.Principal, + alias *models.Alias, +) (*models.Alias, uint64, error) { + alias.Class = schema.UppercaseClassName(alias.Class) + alias.Alias = schema.UppercaseClassName(alias.Alias) + + err := h.Authorizer.Authorize(ctx, principal, authorization.CREATE, authorization.Aliases(alias.Class, alias.Alias)...) + if err != nil { + return nil, 0, err + } + + // alias should have same validation as collection. + al, err := schema.ValidateAliasName(alias.Alias) + if err != nil { + return nil, 0, err + } + alias.Alias = al + + class := h.schemaReader.ReadOnlyClass(alias.Class) + version, err := h.schemaManager.CreateAlias(ctx, alias.Alias, class) + if err != nil { + return nil, 0, err + } + return &models.Alias{Alias: alias.Alias, Class: class.Class}, version, nil +} + +func (h *Handler) UpdateAlias(ctx context.Context, principal *models.Principal, + aliasName, targetClassName string, +) (*models.Alias, error) { + targetClassName = schema.UppercaseClassName(targetClassName) + aliasName = schema.UppercaseClassName(aliasName) + err := h.Authorizer.Authorize(ctx, principal, authorization.UPDATE, authorization.Aliases(targetClassName, aliasName)...) + if err != nil { + return nil, err + } + aliases, err := h.schemaManager.GetAliases(ctx, aliasName, nil) + if err != nil { + return nil, err + } + + if len(aliases) != 1 { + return nil, fmt.Errorf("%w, no alias found with name: %s", ErrNotFound, aliasName) + } + + alias := aliases[0] + targetClass := h.schemaReader.ReadOnlyClass(targetClassName) + + _, err = h.schemaManager.ReplaceAlias(ctx, alias, targetClass) + if err != nil { + return nil, err + } + + return &models.Alias{Alias: alias.Alias, Class: targetClass.Class}, nil +} + +func (h *Handler) DeleteAlias(ctx context.Context, principal *models.Principal, aliasName string) error { + aliasName = schema.UppercaseClassName(aliasName) + err := h.Authorizer.Authorize(ctx, principal, authorization.DELETE, authorization.Aliases("", aliasName)...) + if err != nil { + return err + } + + aliases, err := h.schemaManager.GetAliases(ctx, aliasName, nil) + if err != nil { + return err + } + if len(aliases) == 0 { + return fmt.Errorf("alias not found: %w", ErrNotFound) + } + + if _, err = h.schemaManager.DeleteAlias(ctx, aliasName); err != nil { + return err + } + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/authorization_test.go b/platform/dbops/binaries/weaviate-src/usecases/schema/authorization_test.go new file mode 100644 index 0000000000000000000000000000000000000000..d08412cefd4b180270b14589855e11dd96319b64 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/authorization_test.go @@ -0,0 +1,241 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "context" + "errors" + "fmt" + "reflect" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/usecases/auth/authorization" + "github.com/weaviate/weaviate/usecases/auth/authorization/mocks" +) + +// A component-test like test suite that makes sure that every available UC is +// potentially protected with the Authorization plugin +func Test_Schema_Authorization(t *testing.T) { + type testCase struct { + methodName string + additionalArgs []interface{} + expectedVerb string + expectedResources []string + } + + tests := []testCase{ + { + methodName: "GetClass", + additionalArgs: []interface{}{"classname"}, + expectedVerb: authorization.READ, + expectedResources: authorization.CollectionsMetadata("classname"), + }, + { + methodName: "GetConsistentClass", + additionalArgs: []interface{}{"classname", false}, + expectedVerb: authorization.READ, + expectedResources: authorization.CollectionsMetadata("classname"), + }, + { + methodName: "GetCachedClass", + additionalArgs: []interface{}{"classname"}, + expectedVerb: authorization.READ, + expectedResources: authorization.CollectionsMetadata("classname"), + }, + { + methodName: "AddClass", + additionalArgs: []interface{}{&models.Class{Class: "classname"}}, + expectedVerb: authorization.CREATE, + expectedResources: authorization.CollectionsMetadata("Classname"), + }, + { + methodName: "UpdateClass", + additionalArgs: []interface{}{"class", &models.Class{Class: "class"}}, + expectedVerb: authorization.UPDATE, + expectedResources: authorization.CollectionsMetadata("class"), + }, + { + methodName: "DeleteClass", + additionalArgs: []interface{}{"somename"}, + expectedVerb: authorization.DELETE, + expectedResources: authorization.CollectionsMetadata("somename"), + }, + { + methodName: "AddClassProperty", + additionalArgs: []interface{}{&models.Class{Class: "classname"}, "classname", false, &models.Property{}}, + expectedVerb: authorization.UPDATE, + expectedResources: authorization.CollectionsMetadata("classname"), + }, + { + methodName: "DeleteClassProperty", + additionalArgs: []interface{}{"somename", "someprop"}, + expectedVerb: authorization.UPDATE, + expectedResources: authorization.CollectionsMetadata("somename"), + }, + { + methodName: "UpdateShardStatus", + additionalArgs: []interface{}{"className", "shardName", "targetStatus"}, + expectedVerb: authorization.UPDATE, + expectedResources: authorization.ShardsMetadata("className", "shardName"), + }, + { + methodName: "ShardsStatus", + additionalArgs: []interface{}{"className", "tenant"}, + expectedVerb: authorization.READ, + expectedResources: authorization.ShardsMetadata("className", "tenant"), + }, + { + methodName: "AddTenants", + additionalArgs: []interface{}{"className", []*models.Tenant{{Name: "P1"}}}, + expectedVerb: authorization.CREATE, + expectedResources: authorization.ShardsMetadata("className", "P1"), + }, + { + methodName: "UpdateTenants", + additionalArgs: []interface{}{"className", []*models.Tenant{ + {Name: "P1", ActivityStatus: models.TenantActivityStatusHOT}, + }}, + expectedVerb: authorization.UPDATE, + expectedResources: authorization.ShardsMetadata("className", "P1"), + }, + { + methodName: "DeleteTenants", + additionalArgs: []interface{}{"className", []string{"P1"}}, + expectedVerb: authorization.DELETE, + expectedResources: authorization.ShardsMetadata("className", "P1"), + }, + { + methodName: "ConsistentTenantExists", + additionalArgs: []interface{}{"className", false, "P1"}, + expectedVerb: authorization.READ, + expectedResources: authorization.ShardsMetadata("className", "P1"), + }, + { + methodName: "AddAlias", + additionalArgs: []interface{}{&models.Alias{Class: "classname", Alias: "aliasName"}}, + expectedVerb: authorization.CREATE, + expectedResources: authorization.Aliases("Classname", "AliasName"), + }, + { + methodName: "UpdateAlias", + additionalArgs: []interface{}{"aliasName", "class"}, + expectedVerb: authorization.UPDATE, + expectedResources: authorization.Aliases("class", "aliasName"), + }, + { + methodName: "DeleteAlias", + additionalArgs: []interface{}{"aliasName"}, + expectedVerb: authorization.DELETE, + expectedResources: authorization.Aliases("", "aliasName"), + }, + { + methodName: "GetAlias", + additionalArgs: []interface{}{"aliasName"}, + expectedVerb: authorization.READ, + expectedResources: authorization.Aliases("", "aliasName"), + }, + } + + t.Run("verify that a test for every public method exists", func(t *testing.T) { + testedMethods := make([]string, len(tests)) + for i, test := range tests { + testedMethods[i] = test.methodName + } + + for _, method := range allExportedMethods(&Handler{classGetter: nil}) { + switch method { + case "RegisterSchemaUpdateCallback", + // introduced by sync.Mutex in go 1.18 + "UpdateMeta", "GetSchemaSkipAuth", "IndexedInverted", "RLock", "RUnlock", "Lock", "Unlock", + "TryLock", "RLocker", "TryRLock", "TxManager", "RestoreClass", + "ShardOwner", "TenantShard", "ShardFromUUID", "LockGuard", "RLockGuard", "ShardReplicas", + "GetCachedClassNoAuth", + // internal methods to indicate readiness state + "StartServing", "Shutdown", "Statistics", + // Cluster/nodes related endpoint + "JoinNode", "RemoveNode", "Nodes", "NodeName", "ClusterHealthScore", "ClusterStatus", "ResolveParentNodes", + // revert to schema v0 (non raft), + "GetConsistentSchema", "GetConsistentTenants", "GetConsistentTenant", "GetAliases", + // ignored because it will check if schema has collections otherwise returns nothing + "StoreSchemaV1": + // don't require auth on methods which are exported because other + // packages need to call them for maintenance and other regular jobs, + // but aren't user facing + continue + } + assert.Contains(t, testedMethods, method) + } + }) + + t.Run("verify the tested methods require correct permissions from the Authorizer", func(t *testing.T) { + principal := &models.Principal{} + for _, test := range tests { + t.Run(test.methodName, func(t *testing.T) { + authorizer := mocks.NewMockAuthorizer() + authorizer.SetErr(errors.New("just a test fake")) + handler, fakeSchemaManager := newTestHandlerWithCustomAuthorizer(t, &fakeDB{}, authorizer) + fakeSchemaManager.On("ReadOnlySchema").Return(models.Schema{}) + fakeSchemaManager.On("ReadOnlyClass", mock.Anything).Return(models.Class{}) + fakeSchemaManager.On("GetAliases", mock.Anything, mock.Anything, mock.Anything).Return([]*models.Alias{{}}, nil) + fakeSchemaManager.On("GetAlias", mock.Anything, mock.Anything).Return(&models.Alias{}, nil) + + var args []interface{} + if test.methodName == "GetSchema" || test.methodName == "GetConsistentSchema" { + // no context on this method + args = append([]interface{}{principal}, test.additionalArgs...) + } else { + args = append([]interface{}{context.Background(), principal}, test.additionalArgs...) + } + out, _ := callFuncByName(handler, test.methodName, args...) + + require.Len(t, authorizer.Calls(), 1, "Authorizer must be called") + assert.Equal(t, errors.New("just a test fake"), out[len(out)-1].Interface(), + "execution must abort with Authorizer error") + assert.Equal(t, mocks.AuthZReq{Principal: principal, Verb: test.expectedVerb, Resources: test.expectedResources}, + authorizer.Calls()[0], "correct parameters must have been used on Authorizer") + }) + } + }) +} + +// inspired by https://stackoverflow.com/a/33008200 +func callFuncByName(manager interface{}, funcName string, params ...interface{}) (out []reflect.Value, err error) { + managerValue := reflect.ValueOf(manager) + m := managerValue.MethodByName(funcName) + if !m.IsValid() { + return make([]reflect.Value, 0), fmt.Errorf("Method not found \"%s\"", funcName) + } + in := make([]reflect.Value, len(params)) + for i, param := range params { + in[i] = reflect.ValueOf(param) + } + out = m.Call(in) + return +} + +func allExportedMethods(subject interface{}) []string { + var methods []string + subjectType := reflect.TypeOf(subject) + for i := 0; i < subjectType.NumMethod(); i++ { + name := subjectType.Method(i).Name + if name[0] >= 'A' && name[0] <= 'Z' { + methods = append(methods, name) + } + } + + return methods +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/class.go b/platform/dbops/binaries/weaviate-src/usecases/schema/class.go new file mode 100644 index 0000000000000000000000000000000000000000..70de15f21a7791ba97c2da293ba3ee8c45b9911f --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/class.go @@ -0,0 +1,1009 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "context" + "encoding/json" + "fmt" + "os" + "reflect" + "strings" + + "github.com/pkg/errors" + "github.com/prometheus/client_golang/prometheus" + "github.com/weaviate/weaviate/entities/modelsext" + schemaConfig "github.com/weaviate/weaviate/entities/schema/config" + "github.com/weaviate/weaviate/entities/vectorindex/hnsw" + + "github.com/weaviate/weaviate/adapters/repos/db/inverted/stopwords" + cschema "github.com/weaviate/weaviate/cluster/schema" + "github.com/weaviate/weaviate/entities/backup" + "github.com/weaviate/weaviate/entities/classcache" + entcfg "github.com/weaviate/weaviate/entities/config" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/replication" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/vectorindex" + "github.com/weaviate/weaviate/entities/versioned" + "github.com/weaviate/weaviate/usecases/auth/authorization" + "github.com/weaviate/weaviate/usecases/config" + configRuntime "github.com/weaviate/weaviate/usecases/config/runtime" + "github.com/weaviate/weaviate/usecases/monitoring" + "github.com/weaviate/weaviate/usecases/replica" + "github.com/weaviate/weaviate/usecases/sharding" + shardingcfg "github.com/weaviate/weaviate/usecases/sharding/config" +) + +func (h *Handler) GetClass(ctx context.Context, principal *models.Principal, name string) (*models.Class, error) { + if err := h.Authorizer.Authorize(ctx, principal, authorization.READ, authorization.CollectionsMetadata(name)...); err != nil { + return nil, err + } + name = schema.UppercaseClassName(name) + + cl := h.schemaReader.ReadOnlyClass(name) + return cl, nil +} + +func (h *Handler) GetConsistentClass(ctx context.Context, principal *models.Principal, + name string, consistency bool, +) (*models.Class, uint64, error) { + if err := h.Authorizer.Authorize(ctx, principal, authorization.READ, authorization.CollectionsMetadata(name)...); err != nil { + return nil, 0, err + } + + name = schema.UppercaseClassName(name) + + if consistency { + vclasses, err := h.schemaManager.QueryReadOnlyClasses(name) + return vclasses[name].Class, vclasses[name].Version, err + } + class, err := h.schemaReader.ReadOnlyClassWithVersion(ctx, name, 0) + return class, 0, err +} + +// GetCachedClass will return the class from the cache if it exists. Note that the context cache +// will likely be at the "request" or "operation" level and not be shared between requests. +// Uses the Handler's getClassMethod to determine how to get the class data. +func (h *Handler) GetCachedClass(ctxWithClassCache context.Context, + principal *models.Principal, names ...string, +) (map[string]versioned.Class, error) { + if err := h.Authorizer.Authorize(ctxWithClassCache, principal, authorization.READ, authorization.CollectionsMetadata(names...)...); err != nil { + return nil, err + } + + return classcache.ClassesFromContext(ctxWithClassCache, func(names ...string) (map[string]versioned.Class, error) { + return h.classGetter.getClasses(names) + }, names...) +} + +// GetCachedClassNoAuth will return the class from the cache if it exists. Note that the context cache +// will likely be at the "request" or "operation" level and not be shared between requests. +// Uses the Handler's getClassMethod to determine how to get the class data. +func (h *Handler) GetCachedClassNoAuth(ctxWithClassCache context.Context, names ...string) (map[string]versioned.Class, error) { + return classcache.ClassesFromContext(ctxWithClassCache, func(names ...string) (map[string]versioned.Class, error) { + return h.classGetter.getClasses(names) + }, names...) +} + +// AddClass to the schema +func (h *Handler) AddClass(ctx context.Context, principal *models.Principal, + cls *models.Class, +) (*models.Class, uint64, error) { + cls.Class = schema.UppercaseClassName(cls.Class) + cls.Properties = schema.LowercaseAllPropertyNames(cls.Properties) + + err := h.Authorizer.Authorize(ctx, principal, authorization.CREATE, authorization.CollectionsMetadata(cls.Class)...) + if err != nil { + return nil, 0, err + } + + classGetterWithAuth := func(name string) (*models.Class, error) { + if err := h.Authorizer.Authorize(ctx, principal, authorization.READ, authorization.CollectionsMetadata(name)...); err != nil { + return nil, err + } + return h.schemaReader.ReadOnlyClass(name), nil + } + + if err := h.setNewClassDefaults(cls, h.config.Replication); err != nil { + return nil, 0, err + } + + if err := h.validateCanAddClass(ctx, cls, classGetterWithAuth, false); err != nil { + return nil, 0, err + } + // migrate only after validation in completed + h.migrateClassSettings(cls) + if err := h.parser.ParseClass(cls); err != nil { + return nil, 0, err + } + + err = h.invertedConfigValidator(cls.InvertedIndexConfig) + if err != nil { + return nil, 0, err + } + + existingCollectionsCount, err := h.schemaManager.QueryCollectionsCount() + if err != nil { + h.logger.WithField("error", err).Error("could not query the collections count") + } + + limit := h.schemaConfig.MaximumAllowedCollectionsCount.Get() + + if limit != config.DefaultMaximumAllowedCollectionsCount && existingCollectionsCount >= limit { + return nil, 0, fmt.Errorf( + "cannot create collection: maximum number of collections (%d) reached - "+ + "please consider switching to multi-tenancy or increasing the collection count limit - "+ + "see https://weaviate.io/collections-count-limit to learn about available options and best practices "+ + "when working with multiple collections and tenants", + limit) + } + + shardState, err := sharding.InitState(cls.Class, + cls.ShardingConfig.(shardingcfg.Config), + h.clusterState.LocalName(), h.schemaManager.StorageCandidates(), cls.ReplicationConfig.Factor, + schema.MultiTenancyEnabled(cls)) + if err != nil { + return nil, 0, errors.Wrap(err, "init sharding state") + } + + defaultQuantization := h.config.DefaultQuantization + h.enableQuantization(cls, defaultQuantization) + + version, err := h.schemaManager.AddClass(ctx, cls, shardState) + if err != nil { + return nil, 0, err + } + return cls, version, err +} + +func (h *Handler) enableQuantization(class *models.Class, defaultQuantization *configRuntime.DynamicValue[string]) { + compression := defaultQuantization.Get() + if !hasTargetVectors(class) || class.VectorIndexType != "" { + class.VectorIndexConfig = setDefaultQuantization(class.VectorIndexType, class.VectorIndexConfig.(schemaConfig.VectorIndexConfig), compression) + } + + for k, vectorConfig := range class.VectorConfig { + vectorConfig.VectorIndexConfig = setDefaultQuantization(class.VectorIndexType, vectorConfig.VectorIndexConfig.(schemaConfig.VectorIndexConfig), compression) + class.VectorConfig[k] = vectorConfig + } +} + +func setDefaultQuantization(vectorIndexType string, vectorIndexConfig schemaConfig.VectorIndexConfig, compression string) schemaConfig.VectorIndexConfig { + if len(vectorIndexType) == 0 { + vectorIndexType = vectorindex.DefaultVectorIndexType + } + if vectorIndexType == vectorindex.VectorIndexTypeHNSW && vectorIndexConfig.IndexType() == vectorindex.VectorIndexTypeHNSW { + hnswConfig := vectorIndexConfig.(hnsw.UserConfig) + pqEnabled := hnswConfig.PQ.Enabled + sqEnabled := hnswConfig.SQ.Enabled + rqEnabled := hnswConfig.RQ.Enabled + bqEnabled := hnswConfig.BQ.Enabled + skipDefaultQuantization := hnswConfig.SkipDefaultQuantization + hnswConfig.TrackDefaultQuantization = false + if pqEnabled || sqEnabled || rqEnabled || bqEnabled { + return hnswConfig + } + if skipDefaultQuantization { + return hnswConfig + } + switch compression { + case "pq": + hnswConfig.PQ.Enabled = true + case "sq": + hnswConfig.SQ.Enabled = true + case "rq-1": + hnswConfig.RQ.Enabled = true + hnswConfig.RQ.Bits = 1 + hnswConfig.RQ.RescoreLimit = hnsw.DefaultBRQRescoreLimit + case "rq-8": + hnswConfig.RQ.Enabled = true + hnswConfig.RQ.Bits = 8 + hnswConfig.RQ.RescoreLimit = hnsw.DefaultRQRescoreLimit + case "bq": + hnswConfig.BQ.Enabled = true + default: + return hnswConfig + } + hnswConfig.TrackDefaultQuantization = true + return hnswConfig + } + return vectorIndexConfig +} + +func (h *Handler) RestoreClass(ctx context.Context, d *backup.ClassDescriptor, m map[string]string, overwriteAlias bool) error { + // get schema and sharding state + class := &models.Class{} + if err := json.Unmarshal(d.Schema, &class); err != nil { + return fmt.Errorf("unmarshal class schema: %w", err) + } + var shardingState sharding.State + if d.ShardingState != nil { + err := json.Unmarshal(d.ShardingState, &shardingState) + if err != nil { + return fmt.Errorf("unmarshal sharding state: %w", err) + } + } + + aliases := make([]*models.Alias, 0) + if d.AliasesIncluded { + if err := json.Unmarshal(d.Aliases, &aliases); err != nil { + return fmt.Errorf("unmarshal aliases: %w", err) + } + } + + metric, err := monitoring.GetMetrics().BackupRestoreClassDurations.GetMetricWithLabelValues(class.Class) + if err == nil { + timer := prometheus.NewTimer(metric) + defer timer.ObserveDuration() + } + + class.Class = schema.UppercaseClassName(class.Class) + class.Properties = schema.LowercaseAllPropertyNames(class.Properties) + + if err := h.setClassDefaults(class, h.config.Replication); err != nil { + return err + } + + // no validation of reference for restore + classGetterWrapper := func(name string) (*models.Class, error) { + return h.schemaReader.ReadOnlyClass(name), nil + } + + err = h.validateClassInvariants(ctx, class, classGetterWrapper, true) + if err != nil { + return err + } + // migrate only after validation in completed + h.migrateClassSettings(class) + + if err := h.parser.ParseClass(class); err != nil { + return err + } + + err = h.invertedConfigValidator(class.InvertedIndexConfig) + if err != nil { + return err + } + + shardingState.MigrateFromOldFormat() + err = shardingState.MigrateShardingStateReplicationFactor() + if err != nil { + return fmt.Errorf("error while migrating replication factor: %w", err) + } + shardingState.ApplyNodeMapping(m) + _, err = h.schemaManager.RestoreClass(ctx, class, &shardingState) + if err != nil { + return fmt.Errorf("error when trying to restore class: %w", err) + } + + for _, alias := range aliases { + var err error + _, err = h.schemaManager.CreateAlias(ctx, alias.Alias, class) + if errors.Is(err, cschema.ErrAliasExists) { + // Overwrite if user asks to during restore + if overwriteAlias { + _, err = h.schemaManager.DeleteAlias(ctx, alias.Alias) + if err != nil { + return fmt.Errorf("failed to restore alias for class: delete alias failed: %w", err) + } + // retry again + _, err = h.schemaManager.CreateAlias(ctx, alias.Alias, class) + if err != nil { + return fmt.Errorf("failed to restore alias for class: create alias failed: %w", err) + } + return nil + } + // Schema returned alias already exists error. So let user know + // that there is a "flag overwrite" if she want's to overwrite alias. + return fmt.Errorf("failed to restore alias for class: alias already exists. You can overwrite using `overwrite_alias` param when restoring") + } + + if err != nil { + return fmt.Errorf("failed to restore alias for class: %w", err) + } + } + + return nil +} + +// DeleteClass from the schema +func (h *Handler) DeleteClass(ctx context.Context, principal *models.Principal, class string) error { + err := h.Authorizer.Authorize(ctx, principal, authorization.DELETE, authorization.CollectionsMetadata(class)...) + if err != nil { + return err + } + + class = schema.UppercaseClassName(class) + + if _, err = h.schemaManager.DeleteClass(ctx, class); err != nil { + return err + } + + return nil +} + +func (h *Handler) UpdateClass(ctx context.Context, principal *models.Principal, + className string, updated *models.Class, +) error { + err := h.Authorizer.Authorize(ctx, principal, authorization.UPDATE, authorization.CollectionsMetadata(className)...) + if err != nil || updated == nil { + return err + } + + return UpdateClassInternal(h, ctx, className, updated) +} + +// bypass the auth check for internal class update requests +func UpdateClassInternal(h *Handler, ctx context.Context, className string, updated *models.Class, +) error { + // make sure unset optionals on 'updated' don't lead to an error, as all + // optionals would have been set with defaults on the initial already + if err := h.setClassDefaults(updated, h.config.Replication); err != nil { + return err + } + + if err := h.parser.ParseClass(updated); err != nil { + return err + } + + // ideally, these calls would be encapsulated in ParseClass but ParseClass is + // used in many different areas of the codebase that may cause BC issues with the + // new validation logic. Issue ref: gh-5860 + // As our testing becomes more comprehensive, we can move these calls into ParseClass + if err := h.parser.parseModuleConfig(updated); err != nil { + return fmt.Errorf("parse module config: %w", err) + } + + if err := h.parser.parseVectorConfig(updated); err != nil { + return fmt.Errorf("parse vector config: %w", err) + } + + if err := h.validateVectorSettings(updated); err != nil { + return err + } + + if initial := h.schemaReader.ReadOnlyClass(className); initial != nil { + if err := validateImmutableFields(initial, updated); err != nil { + return err + } + } + // A nil sharding state means that the sharding state will not be updated. + _, err := h.schemaManager.UpdateClass(ctx, updated, nil) + return err +} + +func (m *Handler) setNewClassDefaults(class *models.Class, globalCfg replication.GlobalConfig) error { + if class.ShardingConfig != nil && schema.MultiTenancyEnabled(class) { + return fmt.Errorf("cannot have both shardingConfig and multiTenancyConfig") + } else if class.MultiTenancyConfig == nil { + class.MultiTenancyConfig = &models.MultiTenancyConfig{} + } else if class.MultiTenancyConfig.Enabled { + class.ShardingConfig = shardingcfg.Config{DesiredCount: 0} // tenant shards will be created dynamically + } + + if err := m.setClassDefaults(class, globalCfg); err != nil { + return err + } + + if class.ReplicationConfig == nil { + class.ReplicationConfig = &models.ReplicationConfig{ + Factor: int64(m.config.Replication.MinimumFactor), + DeletionStrategy: models.ReplicationConfigDeletionStrategyNoAutomatedResolution, + } + return nil + } + + if class.ReplicationConfig.DeletionStrategy == "" { + class.ReplicationConfig.DeletionStrategy = models.ReplicationConfigDeletionStrategyNoAutomatedResolution + } + return nil +} + +func (h *Handler) setClassDefaults(class *models.Class, globalCfg replication.GlobalConfig) error { + // set legacy vector index defaults only when: + // - no target vectors are configured + // - OR, there are target vectors configured AND there is a legacy vector configured + if !hasTargetVectors(class) || modelsext.ClassHasLegacyVectorIndex(class) { + if class.Vectorizer == "" { + class.Vectorizer = h.config.DefaultVectorizerModule + } + + if class.VectorIndexType == "" { + class.VectorIndexType = vectorindex.DefaultVectorIndexType + } + + if h.config.DefaultVectorDistanceMetric != "" { + if class.VectorIndexConfig == nil { + class.VectorIndexConfig = map[string]interface{}{"distance": h.config.DefaultVectorDistanceMetric} + } else if vIdxCfgMap, ok := class.VectorIndexConfig.(map[string]interface{}); ok && vIdxCfgMap["distance"] == nil { + class.VectorIndexConfig.(map[string]interface{})["distance"] = h.config.DefaultVectorDistanceMetric + } + } + } + + setInvertedConfigDefaults(class) + for _, prop := range class.Properties { + setPropertyDefaults(prop) + } + + if class.ReplicationConfig == nil { + class.ReplicationConfig = &models.ReplicationConfig{Factor: int64(globalCfg.MinimumFactor)} + } + + if class.ReplicationConfig.Factor > 0 && class.ReplicationConfig.Factor < int64(globalCfg.MinimumFactor) { + return fmt.Errorf("invalid replication factor: setup requires a minimum replication factor of %d: got %d", + globalCfg.MinimumFactor, class.ReplicationConfig.Factor) + } + + if class.ReplicationConfig.Factor < 1 { + class.ReplicationConfig.Factor = int64(globalCfg.MinimumFactor) + } + + h.moduleConfig.SetClassDefaults(class) + return nil +} + +func setPropertyDefaults(props ...*models.Property) { + setPropertyDefaultTokenization(props...) + setPropertyDefaultIndexing(props...) + for _, prop := range props { + setNestedPropertiesDefaults(prop.NestedProperties) + } +} + +func setPropertyDefaultTokenization(props ...*models.Property) { + for _, prop := range props { + switch dataType, _ := schema.AsPrimitive(prop.DataType); dataType { + case schema.DataTypeString, schema.DataTypeStringArray: + // deprecated as of v1.19, default tokenization was word + // which will be migrated to text+whitespace + if prop.Tokenization == "" { + prop.Tokenization = models.PropertyTokenizationWord + } + case schema.DataTypeText, schema.DataTypeTextArray: + if prop.Tokenization == "" { + if os.Getenv("DEFAULT_TOKENIZATION") != "" { + prop.Tokenization = os.Getenv("DEFAULT_TOKENIZATION") + } else { + prop.Tokenization = models.PropertyTokenizationWord + } + } + default: + // tokenization not supported for other data types + } + } +} + +func setPropertyDefaultIndexing(props ...*models.Property) { + for _, prop := range props { + // if IndexInverted is set but IndexFilterable and IndexSearchable are not + // migrate IndexInverted later. + if prop.IndexInverted != nil && + prop.IndexFilterable == nil && + prop.IndexSearchable == nil && + prop.IndexRangeFilters == nil { + continue + } + + vTrue := true + vFalse := false + if prop.IndexFilterable == nil { + prop.IndexFilterable = &vTrue + } + if prop.IndexSearchable == nil { + switch dataType, _ := schema.AsPrimitive(prop.DataType); dataType { + case schema.DataTypeString, schema.DataTypeStringArray: + // string/string[] are migrated to text/text[] later, + // at this point they are still valid data types, therefore should be handled here + prop.IndexSearchable = &vTrue + case schema.DataTypeText, schema.DataTypeTextArray: + prop.IndexSearchable = &vTrue + default: + prop.IndexSearchable = &vFalse + } + } + if prop.IndexRangeFilters == nil { + prop.IndexRangeFilters = &vFalse + } + } +} + +func setNestedPropertiesDefaults(properties []*models.NestedProperty) { + for _, property := range properties { + primitiveDataType, isPrimitive := schema.AsPrimitive(property.DataType) + nestedDataType, isNested := schema.AsNested(property.DataType) + + setNestedPropertyDefaultTokenization(property, primitiveDataType, nestedDataType, isPrimitive, isNested) + setNestedPropertyDefaultIndexing(property, primitiveDataType, nestedDataType, isPrimitive, isNested) + + if isNested { + setNestedPropertiesDefaults(property.NestedProperties) + } + } +} + +func setNestedPropertyDefaultTokenization(property *models.NestedProperty, + primitiveDataType, nestedDataType schema.DataType, + isPrimitive, isNested bool, +) { + if property.Tokenization == "" && isPrimitive { + switch primitiveDataType { + case schema.DataTypeText, schema.DataTypeTextArray: + property.Tokenization = models.NestedPropertyTokenizationWord + default: + // do nothing + } + } +} + +func setNestedPropertyDefaultIndexing(property *models.NestedProperty, + primitiveDataType, nestedDataType schema.DataType, + isPrimitive, isNested bool, +) { + vTrue := true + vFalse := false + + if property.IndexFilterable == nil { + property.IndexFilterable = &vTrue + + if isPrimitive && primitiveDataType == schema.DataTypeBlob { + property.IndexFilterable = &vFalse + } + } + + if property.IndexSearchable == nil { + property.IndexSearchable = &vFalse + + if isPrimitive { + switch primitiveDataType { + case schema.DataTypeText, schema.DataTypeTextArray: + property.IndexSearchable = &vTrue + default: + // do nothing + } + } + } + + if property.IndexRangeFilters == nil { + property.IndexRangeFilters = &vFalse + } +} + +func (h *Handler) migrateClassSettings(class *models.Class) { + for _, prop := range class.Properties { + migratePropertySettings(prop) + } +} + +func migratePropertySettings(props ...*models.Property) { + migratePropertyDataTypeAndTokenization(props...) + migratePropertyIndexInverted(props...) +} + +// as of v1.19 DataTypeString and DataTypeStringArray are deprecated +// here both are changed to Text/TextArray +// and proper, backward compatible tokenization +func migratePropertyDataTypeAndTokenization(props ...*models.Property) { + for _, prop := range props { + switch dataType, _ := schema.AsPrimitive(prop.DataType); dataType { + case schema.DataTypeString: + prop.DataType = schema.DataTypeText.PropString() + case schema.DataTypeStringArray: + prop.DataType = schema.DataTypeTextArray.PropString() + default: + // other types need no migration and do not support tokenization + continue + } + + switch prop.Tokenization { + case models.PropertyTokenizationWord: + prop.Tokenization = models.PropertyTokenizationWhitespace + case models.PropertyTokenizationField: + // stays field + } + } +} + +// as of v1.19 IndexInverted is deprecated and replaced with +// IndexFilterable (set inverted index) +// and IndexSearchable (map inverted index with term frequencies; +// therefore applicable only to text/text[] data types) +func migratePropertyIndexInverted(props ...*models.Property) { + vFalse := false + + for _, prop := range props { + // if none of new options is set, use inverted settings + if prop.IndexInverted != nil && + prop.IndexFilterable == nil && + prop.IndexSearchable == nil && + prop.IndexRangeFilters == nil { + prop.IndexFilterable = prop.IndexInverted + switch dataType, _ := schema.AsPrimitive(prop.DataType); dataType { + // string/string[] are already migrated into text/text[], can be skipped here + case schema.DataTypeText, schema.DataTypeTextArray: + prop.IndexSearchable = prop.IndexInverted + default: + prop.IndexSearchable = &vFalse + } + prop.IndexRangeFilters = &vFalse + } + // new options have precedence so inverted can be reset + prop.IndexInverted = nil + } +} + +func (h *Handler) validateProperty( + class *models.Class, existingPropertyNames map[string]bool, + relaxCrossRefValidation bool, classGetterWithAuth func(string) (*models.Class, error), props ...*models.Property, +) error { + for _, property := range props { + if _, err := schema.ValidatePropertyName(property.Name); err != nil { + return err + } + + if err := schema.ValidateReservedPropertyName(property.Name); err != nil { + return err + } + + if existingPropertyNames[strings.ToLower(property.Name)] { + return fmt.Errorf("class %q: conflict for property %q: already in use or provided multiple times", class.Class, property.Name) + } + + // Validate data type of property. + propertyDataType, err := schema.FindPropertyDataTypeWithRefsAndAuth(classGetterWithAuth, property.DataType, + relaxCrossRefValidation, schema.ClassName(class.Class)) + if err != nil { + return fmt.Errorf("property '%s': invalid dataType: %v: %w", property.Name, property.DataType, err) + } + + if propertyDataType.IsNested() { + if err := validateNestedProperties(property.NestedProperties, property.Name); err != nil { + return err + } + } else { + if len(property.NestedProperties) > 0 { + return fmt.Errorf("property '%s': nestedProperties not allowed for data types other than object/object[]", + property.Name) + } + } + + if err := h.validatePropertyTokenization(property.Tokenization, propertyDataType); err != nil { + return err + } + + if err := h.validatePropertyIndexing(property); err != nil { + return err + } + + if err := h.validatePropModuleConfig(class, property); err != nil { + return err + } + } + + return nil +} + +func setInvertedConfigDefaults(class *models.Class) { + if class.InvertedIndexConfig == nil { + class.InvertedIndexConfig = &models.InvertedIndexConfig{ + UsingBlockMaxWAND: config.DefaultUsingBlockMaxWAND, + } + } + + if class.InvertedIndexConfig.CleanupIntervalSeconds == 0 { + class.InvertedIndexConfig.CleanupIntervalSeconds = config.DefaultCleanupIntervalSeconds + } + + if class.InvertedIndexConfig.Bm25 == nil { + class.InvertedIndexConfig.Bm25 = &models.BM25Config{ + K1: config.DefaultBM25k1, + B: config.DefaultBM25b, + } + } + + if class.InvertedIndexConfig.Stopwords == nil { + class.InvertedIndexConfig.Stopwords = &models.StopwordConfig{ + Preset: stopwords.EnglishPreset, + } + } +} + +func (h *Handler) validateCanAddClass(ctx context.Context, class *models.Class, classGetterWithAuth func(string) (*models.Class, error), + relaxCrossRefValidation bool, +) error { + if modelsext.ClassHasLegacyVectorIndex(class) && len(class.VectorConfig) > 0 { + return fmt.Errorf("creating a class with both a class level vector index and named vectors is forbidden") + } + + return h.validateClassInvariants(ctx, class, classGetterWithAuth, relaxCrossRefValidation) +} + +func (h *Handler) validateClassInvariants( + ctx context.Context, class *models.Class, classGetterWithAuth func(string) (*models.Class, error), + relaxCrossRefValidation bool, +) error { + if _, err := schema.ValidateClassName(class.Class); err != nil { + return err + } + + existingPropertyNames := map[string]bool{} + for _, property := range class.Properties { + if err := h.validateProperty(class, existingPropertyNames, relaxCrossRefValidation, classGetterWithAuth, property); err != nil { + return err + } + existingPropertyNames[strings.ToLower(property.Name)] = true + } + + if err := h.validateVectorSettings(class); err != nil { + return err + } + + if err := h.moduleConfig.ValidateClass(ctx, class); err != nil { + return err + } + + if err := validateMT(class); err != nil { + return err + } + + if err := replica.ValidateConfig(class, h.config.Replication); err != nil { + return err + } + + // all is fine! + return nil +} + +func (h *Handler) validatePropertyTokenization(tokenization string, propertyDataType schema.PropertyDataType) error { + if propertyDataType.IsPrimitive() { + primitiveDataType := propertyDataType.AsPrimitive() + + switch primitiveDataType { + case schema.DataTypeString, schema.DataTypeStringArray: + // deprecated as of v1.19, will be migrated to DataTypeText/DataTypeTextArray + switch tokenization { + case models.PropertyTokenizationField, models.PropertyTokenizationWord: + return nil + } + case schema.DataTypeText, schema.DataTypeTextArray: + switch tokenization { + case models.PropertyTokenizationField, models.PropertyTokenizationWord, + models.PropertyTokenizationWhitespace, models.PropertyTokenizationLowercase, + models.PropertyTokenizationTrigram: + return nil + case models.PropertyTokenizationGse: + if !entcfg.Enabled(os.Getenv("USE_GSE")) && !entcfg.Enabled(os.Getenv("ENABLE_TOKENIZER_GSE")) { + return fmt.Errorf("the GSE tokenizer is not enabled; set 'ENABLE_TOKENIZER_GSE' to 'true' to enable") + } + return nil + case models.PropertyTokenizationKagomeKr: + if !entcfg.Enabled(os.Getenv("ENABLE_TOKENIZER_KAGOME_KR")) { + return fmt.Errorf("the Korean tokenizer is not enabled; set 'ENABLE_TOKENIZER_KAGOME_KR' to 'true' to enable") + } + return nil + case models.PropertyTokenizationKagomeJa: + if !entcfg.Enabled(os.Getenv("ENABLE_TOKENIZER_KAGOME_JA")) { + return fmt.Errorf("the Japanese tokenizer is not enabled; set 'ENABLE_TOKENIZER_KAGOME_JA' to 'true' to enable") + } + return nil + } + default: + if tokenization == "" { + return nil + } + return fmt.Errorf("tokenization is not allowed for data type '%s'", primitiveDataType) + } + return fmt.Errorf("tokenization '%s' is not allowed for data type '%s'", tokenization, primitiveDataType) + } + + if tokenization == "" { + return nil + } + + if propertyDataType.IsNested() { + return fmt.Errorf("tokenization is not allowed for object/object[] data types") + } + return fmt.Errorf("tokenization is not allowed for reference data type") +} + +func (h *Handler) validatePropertyIndexing(prop *models.Property) error { + if prop.IndexInverted != nil { + if prop.IndexFilterable != nil || prop.IndexSearchable != nil || prop.IndexRangeFilters != nil { + return fmt.Errorf("`indexInverted` is deprecated and can not be set together with `indexFilterable`, " + "`indexSearchable` or `indexRangeFilters`") + } + } + + dataType, _ := schema.AsPrimitive(prop.DataType) + if prop.IndexSearchable != nil { + switch dataType { + case schema.DataTypeString, schema.DataTypeStringArray: + // string/string[] are migrated to text/text[] later, + // at this point they are still valid data types, therefore should be handled here. + // true or false allowed + case schema.DataTypeText, schema.DataTypeTextArray: + // true or false allowed + default: + if *prop.IndexSearchable { + return fmt.Errorf("`indexSearchable` is allowed only for text/text[] data types. " + + "For other data types set false or leave empty") + } + } + } + if prop.IndexRangeFilters != nil { + switch dataType { + case schema.DataTypeNumber, schema.DataTypeInt, schema.DataTypeDate: + // true or false allowed + case schema.DataTypeNumberArray, schema.DataTypeIntArray, schema.DataTypeDateArray: + // not supported (yet?) + fallthrough + default: + if *prop.IndexRangeFilters { + return fmt.Errorf("`indexRangeFilters` is allowed only for number/int/date data types. " + + "For other data types set false or leave empty") + } + } + } + + return nil +} + +func (h *Handler) validateVectorSettings(class *models.Class) error { + if modelsext.ClassHasLegacyVectorIndex(class) { + if err := h.validateVectorIndexType(class.VectorIndexType); err != nil { + return err + } + + if err := h.validateVectorizer(class.Vectorizer); err != nil { + return err + } + + if asMap, ok := class.VectorIndexConfig.(map[string]interface{}); ok && len(asMap) > 0 { + parsed, err := h.parser.parseGivenVectorIndexConfig(class.VectorIndexType, class.VectorIndexConfig, h.parser.modules.IsMultiVector(class.Vectorizer), h.config.DefaultQuantization) + if err != nil { + return fmt.Errorf("class.VectorIndexConfig can not parse: %w", err) + } + if parsed.IsMultiVector() { + return errors.New("class.VectorIndexConfig multi vector type index type is only configurable using named vectors") + } + } + } + + for name, cfg := range class.VectorConfig { + // check only if vectorizer correctly configured (map with single key being vectorizer name) + // other cases are handled in module config validation + if vm, ok := cfg.Vectorizer.(map[string]interface{}); ok && len(vm) == 1 { + for vectorizer := range vm { + if err := h.validateVectorizer(vectorizer); err != nil { + return fmt.Errorf("target vector %q: %w", name, err) + } + } + } + if err := h.validateVectorIndexType(cfg.VectorIndexType); err != nil { + return fmt.Errorf("target vector %q: %w", name, err) + } + } + return nil +} + +func (h *Handler) validateVectorizer(vectorizer string) error { + if vectorizer == config.VectorizerModuleNone { + return nil + } + + if err := h.vectorizerValidator.ValidateVectorizer(vectorizer); err != nil { + return errors.Wrap(err, "vectorizer") + } + + return nil +} + +func (h *Handler) validateVectorIndexType(vectorIndexType string) error { + switch vectorIndexType { + case vectorindex.VectorIndexTypeHNSW, vectorindex.VectorIndexTypeFLAT: + return nil + case vectorindex.VectorIndexTypeDYNAMIC: + if !h.asyncIndexingEnabled { + return fmt.Errorf("the dynamic index can only be created under async indexing environment (ASYNC_INDEXING=true)") + } + return nil + default: + return errors.Errorf("unrecognized or unsupported vectorIndexType %q", + vectorIndexType) + } +} + +func validateMT(class *models.Class) error { + enabled := schema.MultiTenancyEnabled(class) + if !enabled && schema.AutoTenantCreationEnabled(class) { + return fmt.Errorf("can't enable autoTenantCreation on a non-multi-tenant class") + } + + if !enabled && schema.AutoTenantActivationEnabled(class) { + return fmt.Errorf("can't enable autoTenantActivation on a non-multi-tenant class") + } + + return nil +} + +// validateUpdatingMT validates toggling MT and returns whether mt is enabled +func validateUpdatingMT(current, update *models.Class) (enabled bool, err error) { + enabled = schema.MultiTenancyEnabled(current) + if schema.MultiTenancyEnabled(update) != enabled { + if enabled { + err = fmt.Errorf("disabling multi-tenancy for an existing class is not supported") + } else { + err = fmt.Errorf("enabling multi-tenancy for an existing class is not supported") + } + } else { + err = validateMT(update) + } + + return +} + +func validateImmutableFields(initial, updated *models.Class) error { + immutableFields := []immutableText{ + { + name: "class name", + accessor: func(c *models.Class) string { return c.Class }, + }, + } + + if err := validateImmutableTextFields(initial, updated, immutableFields...); err != nil { + return err + } + + for k, v := range updated.VectorConfig { + if _, ok := initial.VectorConfig[k]; !ok { + continue + } + + if !reflect.DeepEqual(initial.VectorConfig[k].Vectorizer, v.Vectorizer) { + return fmt.Errorf("vectorizer config of vector %q is immutable", k) + } + } + + return nil +} + +type immutableText struct { + accessor func(c *models.Class) string + name string +} + +func validateImmutableTextFields(previous, next *models.Class, + immutables ...immutableText, +) error { + for _, immutable := range immutables { + oldField := immutable.accessor(previous) + newField := immutable.accessor(next) + if oldField != newField { + return errors.Errorf("%s is immutable: attempted change from %q to %q", + immutable.name, oldField, newField) + } + } + return nil +} + +func validateLegacyVectorIndexConfigImmutableFields(initial, updated *models.Class) error { + return validateImmutableTextFields(initial, updated, []immutableText{ + { + name: "vectorizer", + accessor: func(c *models.Class) string { return c.Vectorizer }, + }, + { + name: "vector index type", + accessor: func(c *models.Class) string { return c.VectorIndexType }, + }, + }...) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/class_test.go b/platform/dbops/binaries/weaviate-src/usecases/schema/class_test.go new file mode 100644 index 0000000000000000000000000000000000000000..d4bfeafacca9f2971c05b64cfff812fec9b642bb --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/class_test.go @@ -0,0 +1,2338 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "context" + "encoding/json" + "fmt" + "strings" + "testing" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/modelsext" + "github.com/weaviate/weaviate/entities/tokenizer" + + "github.com/weaviate/weaviate/adapters/repos/db/inverted/stopwords" + "github.com/weaviate/weaviate/entities/backup" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/replication" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/vectorindex/hnsw" + "github.com/weaviate/weaviate/usecases/cluster/mocks" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/config/runtime" + "github.com/weaviate/weaviate/usecases/sharding" + shardingConfig "github.com/weaviate/weaviate/usecases/sharding/config" +) + +func Test_AddClass(t *testing.T) { + t.Parallel() + ctx := context.Background() + + t.Run("happy path", func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + + class := &models.Class{ + Class: "NewClass", + Properties: []*models.Property{ + {DataType: []string{"text"}, Name: "textProp"}, + {DataType: []string{"int"}, Name: "intProp"}, + }, + Vectorizer: "none", + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + fakeSchemaManager.On("AddClass", mock.Anything, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + + _, _, err := handler.AddClass(ctx, nil, class) + assert.Nil(t, err) + + fakeSchemaManager.AssertExpectations(t) + }) + + t.Run("happy path, named vectors", func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + + class := &models.Class{ + Class: "NewClass", + Properties: []*models.Property{ + {DataType: []string{"text"}, Name: "textProp"}, + }, + VectorConfig: map[string]models.VectorConfig{ + "vec1": { + VectorIndexType: hnswT, + Vectorizer: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{}, + }, + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + fakeSchemaManager.On("AddClass", mock.Anything, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + + _, _, err := handler.AddClass(ctx, nil, class) + require.NoError(t, err) + + fakeSchemaManager.AssertExpectations(t) + }) + + t.Run("mixed vector schema creation", func(t *testing.T) { + handler, _ := newTestHandler(t, &fakeDB{}) + + class := &models.Class{ + Class: "NewClass", + Properties: []*models.Property{ + {DataType: []string{"text"}, Name: "textProp"}, + }, + Vectorizer: "text2vec-contextionary", + VectorIndexType: hnswT, + VectorConfig: map[string]models.VectorConfig{ + "vec1": { + VectorIndexType: hnswT, + Vectorizer: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{}, + }, + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + + _, _, err := handler.AddClass(ctx, nil, class) + require.ErrorContains(t, err, "creating a class with both a class level vector index and named vectors is forbidden") + }) + + t.Run("with empty class name", func(t *testing.T) { + handler, _ := newTestHandler(t, &fakeDB{}) + class := models.Class{ReplicationConfig: &models.ReplicationConfig{Factor: 1}} + _, _, err := handler.AddClass(ctx, nil, &class) + assert.EqualError(t, err, "'' is not a valid class name") + }) + + t.Run("with reserved class name", func(t *testing.T) { + handler, _ := newTestHandler(t, &fakeDB{}) + class := models.Class{Class: config.DefaultRaftDir, ReplicationConfig: &models.ReplicationConfig{Factor: 1}} + _, _, err := handler.AddClass(ctx, nil, &class) + assert.EqualError(t, err, fmt.Sprintf("parse class name: class name `%s` is reserved", config.DefaultRaftDir)) + + class = models.Class{Class: "rAFT", ReplicationConfig: &models.ReplicationConfig{Factor: 1}} + _, _, err = handler.AddClass(ctx, nil, &class) + assert.EqualError(t, err, fmt.Sprintf("parse class name: class name `%s` is reserved", config.DefaultRaftDir)) + + class = models.Class{Class: "rAfT", ReplicationConfig: &models.ReplicationConfig{Factor: 1}} + _, _, err = handler.AddClass(ctx, nil, &class) + assert.EqualError(t, err, fmt.Sprintf("parse class name: class name `%s` is reserved", config.DefaultRaftDir)) + + class = models.Class{Class: "RaFT", ReplicationConfig: &models.ReplicationConfig{Factor: 1}} + _, _, err = handler.AddClass(ctx, nil, &class) + assert.EqualError(t, err, fmt.Sprintf("parse class name: class name `%s` is reserved", config.DefaultRaftDir)) + + class = models.Class{Class: "RAFT", ReplicationConfig: &models.ReplicationConfig{Factor: 1}} + _, _, err = handler.AddClass(ctx, nil, &class) + assert.EqualError(t, err, fmt.Sprintf("parse class name: class name `%s` is reserved", config.DefaultRaftDir)) + }) + + t.Run("with default params", func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + class := models.Class{ + Class: "NewClass", + Vectorizer: "none", + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + + expectedBM25Config := &models.BM25Config{ + K1: config.DefaultBM25k1, + B: config.DefaultBM25b, + } + expectedStopwordConfig := &models.StopwordConfig{ + Preset: stopwords.EnglishPreset, + } + expectedClass := &class + expectedClass.InvertedIndexConfig = &models.InvertedIndexConfig{ + Bm25: expectedBM25Config, + CleanupIntervalSeconds: 60, + Stopwords: expectedStopwordConfig, + UsingBlockMaxWAND: config.DefaultUsingBlockMaxWAND, + } + fakeSchemaManager.On("AddClass", expectedClass, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + _, _, err := handler.AddClass(ctx, nil, &class) + require.Nil(t, err) + fakeSchemaManager.AssertExpectations(t) + }) + + t.Run("with customized params", func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + expectedBM25Config := &models.BM25Config{ + K1: 1.88, + B: 0.44, + } + class := models.Class{ + Class: "NewClass", + InvertedIndexConfig: &models.InvertedIndexConfig{ + Bm25: expectedBM25Config, + UsingBlockMaxWAND: config.DefaultUsingBlockMaxWAND, + }, + Vectorizer: "none", + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + + expectedStopwordConfig := &models.StopwordConfig{ + Preset: "none", + Additions: []string{"monkey", "zebra", "octopus"}, + Removals: []string{"are"}, + } + expectedClass := &class + expectedClass.InvertedIndexConfig = &models.InvertedIndexConfig{ + Bm25: expectedBM25Config, + CleanupIntervalSeconds: 60, + Stopwords: expectedStopwordConfig, + UsingBlockMaxWAND: config.DefaultUsingBlockMaxWAND, + } + fakeSchemaManager.On("AddClass", expectedClass, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + _, _, err := handler.AddClass(ctx, nil, &class) + require.Nil(t, err) + fakeSchemaManager.AssertExpectations(t) + }) + + t.Run("with tokenizations", func(t *testing.T) { + type testCase struct { + propName string + dataType []string + tokenization string + expectedErrMsg string + callReadOnly bool + } + + propName := func(dataType schema.DataType, tokenization string) string { + dtStr := strings.ReplaceAll(string(dataType), "[]", "Array") + tStr := "empty" + if tokenization != "" { + tStr = tokenization + } + return fmt.Sprintf("%s_%s", dtStr, tStr) + } + + // These classes are necessary for tests using references + classes := map[string]models.Class{ + "SomeClass": {Class: "SomeClass", Vectorizer: "none", ReplicationConfig: &models.ReplicationConfig{Factor: 1}}, + "SomeOtherClass": {Class: "SomeOtherClass", Vectorizer: "none", ReplicationConfig: &models.ReplicationConfig{Factor: 1}}, + "YetAnotherClass": {Class: "YetAnotherClass", Vectorizer: "none", ReplicationConfig: &models.ReplicationConfig{Factor: 1}}, + } + + runTestCases := func(t *testing.T, testCases []testCase) { + for i, tc := range testCases { + t.Run(tc.propName, func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + + class := &models.Class{ + Class: fmt.Sprintf("NewClass_%d", i), + Properties: []*models.Property{ + { + Name: tc.propName, + DataType: tc.dataType, + Tokenization: tc.tokenization, + }, + }, + Vectorizer: "none", + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + classes[class.Class] = *class + + if tc.callReadOnly { + call := fakeSchemaManager.On("ReadOnlyClass", mock.Anything, mock.Anything).Return(nil) + call.RunFn = func(a mock.Arguments) { + existedClass := classes[a.Get(0).(string)] + call.ReturnArguments = mock.Arguments{&existedClass} + } + } + + // fakeSchemaManager.On("ReadOnlyClass", mock.Anything).Return(&models.Class{Class: classes[tc.dataType[0]].Class, Vectorizer: classes[tc.dataType[0]].Vectorizer}) + if tc.expectedErrMsg == "" { + fakeSchemaManager.On("AddClass", mock.Anything, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + } + + _, _, err := handler.AddClass(context.Background(), nil, class) + if tc.expectedErrMsg == "" { + require.Nil(t, err) + } else { + require.EqualError(t, err, tc.expectedErrMsg) + } + fakeSchemaManager.AssertExpectations(t) + }) + } + } + + t.Run("text/textArray and all tokenizations", func(t *testing.T) { + var testCases []testCase + for _, dataType := range []schema.DataType{ + schema.DataTypeText, schema.DataTypeTextArray, + } { + for _, tokenization := range append(tokenizer.Tokenizations, "") { + testCases = append(testCases, testCase{ + propName: propName(dataType, tokenization), + dataType: dataType.PropString(), + tokenization: tokenization, + expectedErrMsg: "", + }) + } + + tokenization := "non_existing" + testCases = append(testCases, testCase{ + propName: propName(dataType, tokenization), + dataType: dataType.PropString(), + tokenization: tokenization, + expectedErrMsg: fmt.Sprintf("tokenization '%s' is not allowed for data type '%s'", tokenization, dataType), + }) + } + + runTestCases(t, testCases) + }) + + t.Run("non text/textArray and all tokenizations", func(t *testing.T) { + var testCases []testCase + for _, dataType := range schema.PrimitiveDataTypes { + switch dataType { + case schema.DataTypeText, schema.DataTypeTextArray: + continue + default: + tokenization := "" + testCases = append(testCases, testCase{ + propName: propName(dataType, tokenization), + dataType: dataType.PropString(), + tokenization: tokenization, + expectedErrMsg: "", + }) + + for _, tokenization := range append(tokenizer.Tokenizations, "non_existing") { + testCases = append(testCases, testCase{ + propName: propName(dataType, tokenization), + dataType: dataType.PropString(), + tokenization: tokenization, + expectedErrMsg: fmt.Sprintf("tokenization is not allowed for data type '%s'", dataType), + }) + } + } + } + + runTestCases(t, testCases) + }) + + t.Run("non text/textArray and all tokenizations", func(t *testing.T) { + var testCases []testCase + for i, dataType := range [][]string{ + {"SomeClass"}, + {"SomeOtherClass", "YetAnotherClass"}, + } { + testCases = append(testCases, testCase{ + propName: fmt.Sprintf("RefProp_%d_empty", i), + dataType: dataType, + tokenization: "", + expectedErrMsg: "", + callReadOnly: true, + }) + + for _, tokenization := range append(tokenizer.Tokenizations, "non_existing") { + testCases = append(testCases, testCase{ + propName: fmt.Sprintf("RefProp_%d_%s", i, tokenization), + dataType: dataType, + tokenization: tokenization, + expectedErrMsg: "tokenization is not allowed for reference data type", + callReadOnly: true, + }) + } + } + + runTestCases(t, testCases) + }) + + t.Run("[deprecated string] string/stringArray and all tokenizations", func(t *testing.T) { + var testCases []testCase + for _, dataType := range []schema.DataType{ + schema.DataTypeString, schema.DataTypeStringArray, + } { + for _, tokenization := range []string{ + models.PropertyTokenizationWord, models.PropertyTokenizationField, "", + } { + testCases = append(testCases, testCase{ + propName: propName(dataType, tokenization), + dataType: dataType.PropString(), + tokenization: tokenization, + expectedErrMsg: "", + }) + } + + for _, tokenization := range append(tokenizer.Tokenizations, "non_existing") { + switch tokenization { + case models.PropertyTokenizationWord, models.PropertyTokenizationField: + continue + default: + testCases = append(testCases, testCase{ + propName: propName(dataType, tokenization), + dataType: dataType.PropString(), + tokenization: tokenization, + expectedErrMsg: fmt.Sprintf("tokenization '%s' is not allowed for data type '%s'", tokenization, dataType), + }) + } + } + } + + runTestCases(t, testCases) + }) + }) + + t.Run("with invalid settings", func(t *testing.T) { + handler, _ := newTestHandler(t, &fakeDB{}) + + _, _, err := handler.AddClass(ctx, nil, &models.Class{ + Class: "NewClass", + VectorIndexType: "invalid", + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }) + assert.EqualError(t, err, `unrecognized or unsupported vectorIndexType "invalid"`) + + // VectorConfig is invalid VectorIndexType + _, _, err = handler.AddClass(ctx, nil, &models.Class{ + Class: "NewClass", + VectorConfig: map[string]models.VectorConfig{ + "custom": { + VectorIndexType: "invalid", + VectorIndexConfig: hnsw.UserConfig{}, + Vectorizer: map[string]interface{}{"none": map[string]interface{}{}}, + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }) + assert.EqualError(t, err, `target vector "custom": unrecognized or unsupported vectorIndexType "invalid"`) + + // VectorConfig is invalid Vectorizer + _, _, err = handler.AddClass(ctx, nil, &models.Class{ + Class: "NewClass", + VectorConfig: map[string]models.VectorConfig{ + "custom": { + VectorIndexType: "flat", + VectorIndexConfig: hnsw.UserConfig{}, + Vectorizer: map[string]interface{}{"invalid": nil}, + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }) + assert.EqualError(t, err, `target vector "custom": vectorizer: invalid vectorizer "invalid"`) + }) +} + +func Test_AddClassWithLimits(t *testing.T) { + t.Parallel() + ctx := context.Background() + + t.Run("with max collections limit", func(t *testing.T) { + tests := []struct { + name string + existingCount int + maxAllowed int + expectedError error + }{ + { + name: "under the limit", + existingCount: 5, + maxAllowed: 10, + expectedError: nil, + }, + { + name: "at the limit", + existingCount: 10, + maxAllowed: 10, + expectedError: fmt.Errorf("maximum number of collections (10) reached"), + }, + { + name: "over the limit", + existingCount: 11, + maxAllowed: 10, + expectedError: fmt.Errorf("maximum number of collections (10) reached"), + }, + { + name: "no limit set", + existingCount: 100, + maxAllowed: -1, + expectedError: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + + // Mock the schema count + fakeSchemaManager.On("QueryCollectionsCount").Return(tt.existingCount, nil) + + // Set the max collections limit in config + handler.schemaConfig.MaximumAllowedCollectionsCount = runtime.NewDynamicValue(tt.maxAllowed) + + class := &models.Class{ + Class: "NewClass", + Vectorizer: "none", + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + + if tt.expectedError == nil { + fakeSchemaManager.On("AddClass", mock.Anything, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + } + + _, _, err := handler.AddClass(ctx, nil, class) + if tt.expectedError != nil { + require.NotNil(t, err) + assert.Contains(t, err.Error(), tt.expectedError.Error()) + } else { + require.Nil(t, err) + } + fakeSchemaManager.AssertExpectations(t) + }) + } + }) + + t.Run("adding dynamic index", func(t *testing.T) { + for _, tt := range []struct { + name string + asyncIndexingEnabled bool + + expectError string + }{ + { + name: "async indexing disabled", + asyncIndexingEnabled: false, + + expectError: "the dynamic index can only be created under async indexing environment (ASYNC_INDEXING=true)", + }, + { + name: "async indexing enabled", + asyncIndexingEnabled: true, + }, + } { + t.Run(tt.name, func(t *testing.T) { + handler, schemaManager := newTestHandler(t, &fakeDB{}) + handler.asyncIndexingEnabled = tt.asyncIndexingEnabled + + if tt.expectError == "" { + schemaManager.On("AddClass", mock.Anything, mock.Anything).Return(nil) + schemaManager.On("QueryCollectionsCount").Return(0, nil) + defer schemaManager.AssertExpectations(t) + } + + assertError := func(err error) { + if tt.expectError != "" { + require.ErrorContains(t, err, tt.expectError) + } else { + require.NoError(t, err) + } + } + + _, _, err := handler.AddClass(ctx, nil, &models.Class{ + Class: "NewClass", + VectorIndexType: "dynamic", + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }) + assertError(err) + + _, _, err = handler.AddClass(ctx, nil, &models.Class{ + Class: "NewClass", + VectorConfig: map[string]models.VectorConfig{ + "vec1": { + VectorIndexType: "dynamic", + Vectorizer: map[string]any{"text2vec-contextionary": map[string]any{}}, + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }) + assertError(err) + }) + } + }) +} + +func Test_AddClass_DefaultsAndMigration(t *testing.T) { + t.Parallel() + + t.Run("set defaults and migrate string|stringArray datatype and tokenization", func(t *testing.T) { + type testCase struct { + propName string + dataType schema.DataType + tokenization string + + expectedDataType schema.DataType + expectedTokenization string + } + + propName := func(dataType schema.DataType, tokenization string) string { + return strings.ReplaceAll(fmt.Sprintf("%s_%s", dataType, tokenization), "[]", "Array") + } + + ctx := context.Background() + className := "MigrationClass" + + var testCases []testCase + for _, dataType := range []schema.DataType{ + schema.DataTypeText, schema.DataTypeTextArray, + } { + for _, tokenization := range tokenizer.Tokenizations { + testCases = append(testCases, testCase{ + propName: propName(dataType, tokenization), + dataType: dataType, + tokenization: tokenization, + expectedDataType: dataType, + expectedTokenization: tokenization, + }) + } + tokenization := "" + testCases = append(testCases, testCase{ + propName: propName(dataType, tokenization), + dataType: dataType, + tokenization: tokenization, + expectedDataType: dataType, + expectedTokenization: models.PropertyTokenizationWord, + }) + } + for _, dataType := range []schema.DataType{ + schema.DataTypeString, schema.DataTypeStringArray, + } { + for _, tokenization := range []string{ + models.PropertyTokenizationWord, models.PropertyTokenizationField, "", + } { + var expectedDataType schema.DataType + switch dataType { + case schema.DataTypeStringArray: + expectedDataType = schema.DataTypeTextArray + default: + expectedDataType = schema.DataTypeText + } + + var expectedTokenization string + switch tokenization { + case models.PropertyTokenizationField: + expectedTokenization = models.PropertyTokenizationField + default: + expectedTokenization = models.PropertyTokenizationWhitespace + } + + testCases = append(testCases, testCase{ + propName: propName(dataType, tokenization), + dataType: dataType, + tokenization: tokenization, + expectedDataType: expectedDataType, + expectedTokenization: expectedTokenization, + }) + } + } + + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + var properties []*models.Property + for _, tc := range testCases { + properties = append(properties, &models.Property{ + Name: "created_" + tc.propName, + DataType: tc.dataType.PropString(), + Tokenization: tc.tokenization, + }) + } + + class := models.Class{ + Class: className, + Properties: properties, + Vectorizer: "none", + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + + t.Run("create class with all properties", func(t *testing.T) { + fakeSchemaManager.On("AddClass", mock.Anything, mock.Anything).Return(nil) + fakeSchemaManager.On("ReadOnlyClass", mock.Anything, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + handler.schemaConfig.MaximumAllowedCollectionsCount = runtime.NewDynamicValue(-1) + _, _, err := handler.AddClass(ctx, nil, &class) + require.Nil(t, err) + }) + + t.Run("add properties to existing class", func(t *testing.T) { + for _, tc := range testCases { + fakeSchemaManager.On("AddClass", mock.Anything, mock.Anything).Return(nil) + fakeSchemaManager.On("ReadOnlyClass", mock.Anything, mock.Anything).Return(&class) + fakeSchemaManager.On("AddProperty", mock.Anything, mock.Anything).Return(nil) + t.Run("added_"+tc.propName, func(t *testing.T) { + _, _, err := handler.AddClassProperty(ctx, nil, &class, class.Class, false, &models.Property{ + Name: "added_" + tc.propName, + DataType: tc.dataType.PropString(), + Tokenization: tc.tokenization, + }) + + require.Nil(t, err) + }) + } + }) + }) + + t.Run("set defaults and migrate IndexInverted to IndexFilterable + IndexSearchable", func(t *testing.T) { + vFalse := false + vTrue := true + allBoolPtrs := []*bool{nil, &vFalse, &vTrue} + + type testCase struct { + propName string + dataType schema.DataType + indexInverted *bool + indexFilterable *bool + indexSearchable *bool + + expectedInverted *bool + expectedFilterable *bool + expectedSearchable *bool + } + + boolPtrToStr := func(ptr *bool) string { + if ptr == nil { + return "nil" + } + return fmt.Sprintf("%v", *ptr) + } + propName := func(dt schema.DataType, inverted, filterable, searchable *bool) string { + return fmt.Sprintf("%s_inverted_%s_filterable_%s_searchable_%s", + dt.String(), boolPtrToStr(inverted), boolPtrToStr(filterable), boolPtrToStr(searchable)) + } + + ctx := context.Background() + className := "MigrationClass" + + var testCases []testCase + + for _, dataType := range []schema.DataType{schema.DataTypeText, schema.DataTypeInt} { + for _, inverted := range allBoolPtrs { + for _, filterable := range allBoolPtrs { + for _, searchable := range allBoolPtrs { + if inverted != nil { + if filterable != nil || searchable != nil { + // invalid combination, indexInverted can not be set + // together with indexFilterable or indexSearchable + continue + } + } + + if searchable != nil && *searchable { + if dataType != schema.DataTypeText { + // invalid combination, indexSearchable can not be enabled + // for non text/text[] data type + continue + } + } + + switch dataType { + case schema.DataTypeText: + if inverted != nil { + testCases = append(testCases, testCase{ + propName: propName(dataType, inverted, filterable, searchable), + dataType: dataType, + indexInverted: inverted, + indexFilterable: filterable, + indexSearchable: searchable, + expectedInverted: nil, + expectedFilterable: inverted, + expectedSearchable: inverted, + }) + } else { + expectedFilterable := filterable + if filterable == nil { + expectedFilterable = &vTrue + } + expectedSearchable := searchable + if searchable == nil { + expectedSearchable = &vTrue + } + testCases = append(testCases, testCase{ + propName: propName(dataType, inverted, filterable, searchable), + dataType: dataType, + indexInverted: inverted, + indexFilterable: filterable, + indexSearchable: searchable, + expectedInverted: nil, + expectedFilterable: expectedFilterable, + expectedSearchable: expectedSearchable, + }) + } + default: + if inverted != nil { + testCases = append(testCases, testCase{ + propName: propName(dataType, inverted, filterable, searchable), + dataType: dataType, + indexInverted: inverted, + indexFilterable: filterable, + indexSearchable: searchable, + expectedInverted: nil, + expectedFilterable: inverted, + expectedSearchable: &vFalse, + }) + } else { + expectedFilterable := filterable + if filterable == nil { + expectedFilterable = &vTrue + } + expectedSearchable := searchable + if searchable == nil { + expectedSearchable = &vFalse + } + testCases = append(testCases, testCase{ + propName: propName(dataType, inverted, filterable, searchable), + dataType: dataType, + indexInverted: inverted, + indexFilterable: filterable, + indexSearchable: searchable, + expectedInverted: nil, + expectedFilterable: expectedFilterable, + expectedSearchable: expectedSearchable, + }) + } + } + } + } + } + } + + var properties []*models.Property + for _, tc := range testCases { + properties = append(properties, &models.Property{ + Name: "created_" + tc.propName, + DataType: tc.dataType.PropString(), + IndexInverted: tc.indexInverted, + IndexFilterable: tc.indexFilterable, + IndexSearchable: tc.indexSearchable, + }) + } + + class := models.Class{ + Class: className, + Properties: properties, + Vectorizer: "none", + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + t.Run("create class with all properties", func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + fakeSchemaManager.On("AddClass", mock.Anything, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + _, _, err := handler.AddClass(ctx, nil, &class) + require.Nil(t, err) + fakeSchemaManager.AssertExpectations(t) + }) + + t.Run("add properties to existing class", func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + for _, tc := range testCases { + t.Run("added_"+tc.propName, func(t *testing.T) { + prop := &models.Property{ + Name: "added_" + tc.propName, + DataType: tc.dataType.PropString(), + IndexInverted: tc.indexInverted, + IndexFilterable: tc.indexFilterable, + IndexSearchable: tc.indexSearchable, + } + fakeSchemaManager.On("AddProperty", className, []*models.Property{prop}).Return(nil) + _, _, err := handler.AddClassProperty(ctx, nil, &class, class.Class, false, prop) + + require.Nil(t, err) + }) + } + fakeSchemaManager.AssertExpectations(t) + }) + }) +} + +func Test_Defaults_NestedProperties(t *testing.T) { + t.Parallel() + + for _, pdt := range schema.PrimitiveDataTypes { + t.Run(pdt.String(), func(t *testing.T) { + nestedProperties := []*models.NestedProperty{ + { + Name: "nested_" + pdt.String(), + DataType: pdt.PropString(), + }, + } + + for _, ndt := range schema.NestedDataTypes { + t.Run(ndt.String(), func(t *testing.T) { + propPrimitives := &models.Property{ + Name: "objectProp", + DataType: ndt.PropString(), + NestedProperties: nestedProperties, + } + propLvl2Primitives := &models.Property{ + Name: "objectPropLvl2", + DataType: ndt.PropString(), + NestedProperties: []*models.NestedProperty{ + { + Name: "nested_object", + DataType: ndt.PropString(), + NestedProperties: nestedProperties, + }, + }, + } + + setPropertyDefaults(propPrimitives) + setPropertyDefaults(propLvl2Primitives) + + t.Run("primitive data types", func(t *testing.T) { + for _, np := range []*models.NestedProperty{ + propPrimitives.NestedProperties[0], + propLvl2Primitives.NestedProperties[0].NestedProperties[0], + } { + switch pdt { + case schema.DataTypeText, schema.DataTypeTextArray: + require.NotNil(t, np.IndexFilterable) + assert.True(t, *np.IndexFilterable) + require.NotNil(t, np.IndexSearchable) + assert.True(t, *np.IndexSearchable) + assert.Equal(t, models.PropertyTokenizationWord, np.Tokenization) + case schema.DataTypeBlob: + require.NotNil(t, np.IndexFilterable) + assert.False(t, *np.IndexFilterable) + require.NotNil(t, np.IndexSearchable) + assert.False(t, *np.IndexSearchable) + assert.Equal(t, "", np.Tokenization) + default: + require.NotNil(t, np.IndexFilterable) + assert.True(t, *np.IndexFilterable) + require.NotNil(t, np.IndexSearchable) + assert.False(t, *np.IndexSearchable) + assert.Equal(t, "", np.Tokenization) + } + } + }) + + t.Run("nested data types", func(t *testing.T) { + for _, indexFilterable := range []*bool{ + propPrimitives.IndexFilterable, + propLvl2Primitives.IndexFilterable, + propLvl2Primitives.NestedProperties[0].IndexFilterable, + } { + require.NotNil(t, indexFilterable) + assert.True(t, *indexFilterable) + } + for _, indexSearchable := range []*bool{ + propPrimitives.IndexSearchable, + propLvl2Primitives.IndexSearchable, + propLvl2Primitives.NestedProperties[0].IndexSearchable, + } { + require.NotNil(t, indexSearchable) + assert.False(t, *indexSearchable) + } + for _, tokenization := range []string{ + propPrimitives.Tokenization, + propLvl2Primitives.Tokenization, + propLvl2Primitives.NestedProperties[0].Tokenization, + } { + assert.Equal(t, "", tokenization) + } + }) + }) + } + }) + } +} + +func Test_Validation_ClassNames(t *testing.T) { + t.Parallel() + + type testCase struct { + input string + valid bool + storedAs string + name string + } + + // all inputs represent class names (!) + tests := []testCase{ + // valid names + { + name: "Single uppercase word", + input: "Car", + valid: true, + storedAs: "Car", + }, + { + name: "Single lowercase word, stored as uppercase", + input: "car", + valid: true, + storedAs: "Car", + }, + { + name: "empty class", + input: "", + valid: false, + }, + } + + t.Run("adding a class", func(t *testing.T) { + t.Run("different class names without keywords or properties", func(t *testing.T) { + for _, test := range tests { + t.Run(test.name+" as thing class", func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + class := &models.Class{ + Vectorizer: "none", + Class: test.input, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + + if test.valid { + fakeSchemaManager.On("AddClass", class, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + } + _, _, err := handler.AddClass(context.Background(), nil, class) + t.Log(err) + assert.Equal(t, test.valid, err == nil) + fakeSchemaManager.AssertExpectations(t) + }) + } + }) + + t.Run("different class names with valid keywords", func(t *testing.T) { + for _, test := range tests { + t.Run(test.name+" as thing class", func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + class := &models.Class{ + Vectorizer: "none", + Class: test.input, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + + if test.valid { + fakeSchemaManager.On("AddClass", class, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + } + _, _, err := handler.AddClass(context.Background(), nil, class) + t.Log(err) + assert.Equal(t, test.valid, err == nil) + fakeSchemaManager.AssertExpectations(t) + }) + } + }) + }) +} + +func Test_Validation_PropertyNames(t *testing.T) { + t.Parallel() + type testCase struct { + input string + valid bool + storedAs string + name string + } + + // for all test cases keep in mind that the word "carrot" is not present in + // the fake c11y, but every other word is + // + // all inputs represent property names (!) + tests := []testCase{ + // valid names + { + name: "Single uppercase word, stored as lowercase", + input: "Brand", + valid: true, + storedAs: "brand", + }, + { + name: "Single lowercase word", + input: "brand", + valid: true, + storedAs: "brand", + }, + { + name: "Property with underscores", + input: "property_name", + valid: true, + storedAs: "property_name", + }, + { + name: "Property with underscores and numbers", + input: "property_name_2", + valid: true, + storedAs: "property_name_2", + }, + { + name: "Property starting with underscores", + input: "_property_name", + valid: true, + storedAs: "_property_name", + }, + { + name: "empty prop name", + input: "", + valid: false, + }, + { + name: "reserved prop name: id", + input: "id", + valid: false, + }, + { + name: "reserved prop name: _id", + input: "_id", + valid: false, + }, + { + name: "reserved prop name: _additional", + input: "_additional", + valid: false, + }, + } + + ctx := context.Background() + + t.Run("when adding a new class", func(t *testing.T) { + t.Run("different property names without keywords for the prop", func(t *testing.T) { + for _, test := range tests { + t.Run(test.name+" as thing class", func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + class := &models.Class{ + Vectorizer: "none", + Class: "ValidName", + Properties: []*models.Property{{ + DataType: schema.DataTypeText.PropString(), + Name: test.input, + }}, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + + if test.valid { + fakeSchemaManager.On("AddClass", class, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + } + handler.schemaConfig.MaximumAllowedCollectionsCount = runtime.NewDynamicValue(-1) + _, _, err := handler.AddClass(context.Background(), nil, class) + assert.Equal(t, test.valid, err == nil) + fakeSchemaManager.AssertExpectations(t) + }) + } + }) + + t.Run("different property names with valid keywords for the prop", func(t *testing.T) { + for _, test := range tests { + t.Run(test.name+" as thing class", func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + class := &models.Class{ + Vectorizer: "none", + Class: "ValidName", + Properties: []*models.Property{{ + DataType: schema.DataTypeText.PropString(), + Name: test.input, + }}, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + + if test.valid { + fakeSchemaManager.On("AddClass", class, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + } + _, _, err := handler.AddClass(context.Background(), nil, class) + t.Log(err) + assert.Equal(t, test.valid, err == nil) + fakeSchemaManager.AssertExpectations(t) + }) + } + }) + }) + + t.Run("when updating an existing class with a new property", func(t *testing.T) { + t.Run("different property names without keywords for the prop", func(t *testing.T) { + for _, test := range tests { + t.Run(test.name+" as thing class", func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + class := &models.Class{ + Vectorizer: "none", + Class: "ValidName", + Properties: []*models.Property{ + { + Name: "dummyPropSoWeDontRunIntoAllNoindexedError", + DataType: schema.DataTypeText.PropString(), + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + + fakeSchemaManager.On("AddClass", class, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + _, _, err := handler.AddClass(context.Background(), nil, class) + require.Nil(t, err) + + property := &models.Property{ + DataType: schema.DataTypeText.PropString(), + Name: test.input, + } + if test.valid { + fakeSchemaManager.On("AddProperty", class.Class, []*models.Property{property}).Return(nil) + } + _, _, err = handler.AddClassProperty(context.Background(), nil, class, class.Class, false, property) + t.Log(err) + require.Equal(t, test.valid, err == nil) + fakeSchemaManager.AssertExpectations(t) + }) + } + }) + + t.Run("different property names with valid keywords for the prop", func(t *testing.T) { + for _, test := range tests { + t.Run(test.name+" as thing class", func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + class := &models.Class{ + Vectorizer: "none", + Class: "ValidName", + Properties: []*models.Property{{ + DataType: schema.DataTypeText.PropString(), + Name: test.input, + }}, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + + if test.valid { + fakeSchemaManager.On("AddClass", class, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + } + _, _, err := handler.AddClass(ctx, nil, class) + t.Log(err) + assert.Equal(t, test.valid, err == nil) + fakeSchemaManager.AssertExpectations(t) + }) + } + }) + }) +} + +// As of now, most class settings are immutable, but we need to allow some +// specific updates, such as the vector index config +func Test_UpdateClass(t *testing.T) { + t.Run("class not found", func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + fakeSchemaManager.On("ReadOnlyClass", "WrongClass", mock.Anything).Return(nil) + fakeSchemaManager.On("UpdateClass", mock.Anything, mock.Anything).Return(ErrNotFound) + + err := handler.UpdateClass(context.Background(), nil, "WrongClass", &models.Class{ReplicationConfig: &models.ReplicationConfig{Factor: 1}}) + require.ErrorIs(t, err, ErrNotFound) + fakeSchemaManager.AssertExpectations(t) + }) + + t.Run("fields validation", func(t *testing.T) { + tests := []struct { + name string + initial *models.Class + update *models.Class + expectedError error + }{ + { + name: "ChangeName", + initial: &models.Class{Class: "InitialName", Vectorizer: "none", ReplicationConfig: &models.ReplicationConfig{Factor: 1}}, + update: &models.Class{Class: "UpdatedName", Vectorizer: "none", ReplicationConfig: &models.ReplicationConfig{Factor: 1}}, + expectedError: fmt.Errorf( + "class name is immutable: " + + "attempted change from \"InitialName\" to \"UpdatedName\""), + }, + { + name: "ModifyVectorizer", + initial: &models.Class{Class: "InitialName", Vectorizer: "model1", ReplicationConfig: &models.ReplicationConfig{Factor: 1}}, + update: &models.Class{Class: "InitialName", Vectorizer: "model2", ReplicationConfig: &models.ReplicationConfig{Factor: 1}}, + expectedError: fmt.Errorf( + "vectorizer is immutable: " + + "attempted change from \"model1\" to \"model2\""), + }, + { + name: "ModifyVectorIndexType", + initial: &models.Class{Class: "InitialName", VectorIndexType: "hnsw", Vectorizer: "none", ReplicationConfig: &models.ReplicationConfig{Factor: 1}}, + update: &models.Class{Class: "InitialName", VectorIndexType: "flat", Vectorizer: "none", ReplicationConfig: &models.ReplicationConfig{Factor: 1}}, + expectedError: fmt.Errorf( + "vector index type is immutable: " + + "attempted change from \"hnsw\" to \"flat\""), + }, + { + name: "UnsupportedVectorIndex", + initial: &models.Class{Class: "InitialName", VectorIndexType: "hnsw", Vectorizer: "none", ReplicationConfig: &models.ReplicationConfig{Factor: 1}}, + update: &models.Class{Class: "InitialName", VectorIndexType: "lsh", Vectorizer: "none", ReplicationConfig: &models.ReplicationConfig{Factor: 1}}, + expectedError: fmt.Errorf("unsupported vector"), + }, + { + name: "add property to an empty class", + initial: &models.Class{Class: "InitialName", Vectorizer: "none", ReplicationConfig: &models.ReplicationConfig{Factor: 1}}, + update: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "newProp", + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + expectedError: errPropertiesUpdatedInClassUpdate, + }, + { + name: "updating second property", + initial: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "prop1", + DataType: schema.DataTypeText.PropString(), + }, + { + Name: "prop2", + DataType: schema.DataTypeText.PropString(), + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + update: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "prop1", + DataType: schema.DataTypeText.PropString(), + }, + { + Name: "prop2", + DataType: schema.DataTypeInt.PropString(), + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + expectedError: errPropertiesUpdatedInClassUpdate, + }, + { + name: "properties order should not matter", + initial: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "prop1", + DataType: schema.DataTypeText.PropString(), + }, + { + Name: "prop2", + DataType: schema.DataTypeInt.PropString(), + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + update: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "prop2", + DataType: schema.DataTypeInt.PropString(), + }, + { + Name: "prop1", + DataType: schema.DataTypeText.PropString(), + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + expectedError: nil, + }, + { + name: "leaving properties unchanged", + initial: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "aProp", + DataType: schema.DataTypeText.PropString(), + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + update: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "aProp", + DataType: schema.DataTypeText.PropString(), + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + expectedError: nil, + }, + { + name: "attempting to rename a property", + initial: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "aProp", + DataType: schema.DataTypeText.PropString(), + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + update: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "changedProp", + DataType: schema.DataTypeText.PropString(), + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + expectedError: fmt.Errorf( + "property fields other than description cannot be updated through updating the class. Use the add " + + "property feature (e.g. \"POST /v1/schema/{className}/properties\") " + + "to add additional properties"), + }, + { + name: "attempting to update the inverted index cleanup interval", + initial: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + InvertedIndexConfig: &models.InvertedIndexConfig{ + CleanupIntervalSeconds: 17, + UsingBlockMaxWAND: config.DefaultUsingBlockMaxWAND, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + update: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + InvertedIndexConfig: &models.InvertedIndexConfig{ + CleanupIntervalSeconds: 18, + Bm25: &models.BM25Config{ + K1: config.DefaultBM25k1, + B: config.DefaultBM25b, + }, + UsingBlockMaxWAND: config.DefaultUsingBlockMaxWAND, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + }, + { + name: "attempting to update the inverted index BM25 config", + initial: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + InvertedIndexConfig: &models.InvertedIndexConfig{ + CleanupIntervalSeconds: 18, + Bm25: &models.BM25Config{ + K1: 1.012, + B: 0.125, + }, + UsingBlockMaxWAND: config.DefaultUsingBlockMaxWAND, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + update: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + InvertedIndexConfig: &models.InvertedIndexConfig{ + CleanupIntervalSeconds: 18, + Bm25: &models.BM25Config{ + K1: 1.012, + B: 0.125, + }, + UsingBlockMaxWAND: config.DefaultUsingBlockMaxWAND, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + }, + { + name: "attempting to update the inverted index Stopwords config", + initial: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + InvertedIndexConfig: &models.InvertedIndexConfig{ + CleanupIntervalSeconds: 18, + Stopwords: &models.StopwordConfig{ + Preset: "en", + }, + UsingBlockMaxWAND: config.DefaultUsingBlockMaxWAND, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + update: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + InvertedIndexConfig: &models.InvertedIndexConfig{ + CleanupIntervalSeconds: 18, + Stopwords: &models.StopwordConfig{ + Preset: "none", + Additions: []string{"banana", "passionfruit", "kiwi"}, + Removals: []string{"a", "the"}, + }, + UsingBlockMaxWAND: config.DefaultUsingBlockMaxWAND, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + }, + { + name: "attempting to update module config", + initial: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + ModuleConfig: map[string]interface{}{ + "my-module1": map[string]interface{}{ + "my-setting": "some-value", + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + update: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + ModuleConfig: map[string]interface{}{ + "my-module1": map[string]interface{}{ + "my-setting": "updated-value", + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + expectedError: fmt.Errorf("can only update generative and reranker module configs"), + }, + { + name: "adding new module configuration", + initial: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + ModuleConfig: map[string]interface{}{ + "my-module1": map[string]interface{}{ + "my-setting": "some-value", + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + update: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + ModuleConfig: map[string]interface{}{ + "my-module1": map[string]interface{}{ + "my-setting": "some-value", + }, + "my-module2": map[string]interface{}{ + "my-setting": "some-value", + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + expectedError: nil, + }, + { + name: "adding new module configuration for a property", + initial: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "text", + DataType: schema.DataTypeText.PropString(), + ModuleConfig: map[string]interface{}{ + "my-module1": map[string]interface{}{ + "my-setting": "some-value", + }, + }, + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + update: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "text", + DataType: schema.DataTypeText.PropString(), + ModuleConfig: map[string]interface{}{ + "my-module1": map[string]interface{}{ + "my-setting": "some-value", + }, + "my-module2": map[string]interface{}{ + "my-setting": "some-value", + }, + }, + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + expectedError: nil, + }, + { + name: "updating existing module configuration for a property", + initial: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "text", + DataType: schema.DataTypeText.PropString(), + ModuleConfig: map[string]interface{}{ + "my-module1": map[string]interface{}{ + "my-setting": "some-value", + }, + }, + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + update: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "text", + DataType: schema.DataTypeText.PropString(), + ModuleConfig: map[string]interface{}{ + "my-module1": map[string]interface{}{ + "my-setting": "new-value", + }, + }, + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + expectedError: errors.New(`module "my-module1" configuration cannot be updated`), + }, + { + name: "removing existing module configuration for a property", + initial: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "text", + DataType: schema.DataTypeText.PropString(), + ModuleConfig: map[string]interface{}{ + "my-module1": map[string]interface{}{ + "my-setting": "some-value", + }, + }, + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + update: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + Properties: []*models.Property{ + { + Name: "text", + DataType: schema.DataTypeText.PropString(), + ModuleConfig: map[string]interface{}{ + "my-module2": map[string]interface{}{ + "my-setting": "new-value", + }, + }, + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + expectedError: errors.New(`module "my-module1" configuration was removed`), + }, + { + name: "updating vector index config", + initial: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + VectorIndexConfig: map[string]interface{}{ + "some-setting": "old-value", + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + update: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + VectorIndexConfig: map[string]interface{}{ + "some-setting": "new-value", + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + expectedError: nil, + }, + { + name: "try to turn MT on when it was previously off", + initial: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: false, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + update: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + expectedError: fmt.Errorf("enabling multi-tenancy for an existing class is not supported"), + }, + { + name: "try to turn MT off when it was previously on", + initial: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + update: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: false, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + expectedError: fmt.Errorf("disabling multi-tenancy for an existing class is not supported"), + }, + { + name: "change auto tenant creation after creating the class", + initial: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + update: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + AutoTenantCreation: true, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + expectedError: nil, + }, + { + name: "change auto tenant activation after creating the class", + initial: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + update: &models.Class{ + Class: "InitialName", + Vectorizer: "none", + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + AutoTenantActivation: true, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + expectedError: nil, + }, + { + name: "adding named vector on a class with legacy index", + initial: &models.Class{ + Class: "InitialName", + Vectorizer: "text2vec-contextionary", + VectorIndexType: hnswT, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + update: &models.Class{ + Class: "InitialName", + Vectorizer: "text2vec-contextionary", + VectorIndexType: hnswT, + VectorConfig: map[string]models.VectorConfig{ + "vec1": { + VectorIndexType: hnswT, + Vectorizer: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{}, + }, + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + expectedError: nil, + }, + { + name: "adding new vector to a class with named vectors", + initial: &models.Class{ + Class: "InitialName", + VectorConfig: map[string]models.VectorConfig{ + "initial": { + VectorIndexType: hnswT, + Vectorizer: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{}, + }, + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + update: &models.Class{ + Class: "InitialName", + VectorConfig: map[string]models.VectorConfig{ + "initial": { + VectorIndexType: hnswT, + Vectorizer: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{}, + }, + }, + "new": { + VectorIndexType: hnswT, + Vectorizer: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{}, + }, + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + }, + { + name: "adding legacy vector to a class with named vectors", + initial: &models.Class{ + Class: "InitialName", + VectorConfig: map[string]models.VectorConfig{ + "initial": { + VectorIndexType: hnswT, + Vectorizer: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{}, + }, + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + update: &models.Class{ + Class: "InitialName", + Vectorizer: "text2vec-contextionary", + VectorIndexType: hnswT, + VectorConfig: map[string]models.VectorConfig{ + "initial": { + VectorIndexType: hnswT, + Vectorizer: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{}, + }, + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + expectedError: fmt.Errorf("vectorizer is immutable"), + }, + { + name: "removing existing named vector", + initial: &models.Class{ + Class: "InitialName", + VectorConfig: map[string]models.VectorConfig{ + "first": { + VectorIndexType: hnswT, + Vectorizer: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{}, + }, + }, + "second": { + VectorIndexType: hnswT, + Vectorizer: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{}, + }, + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + update: &models.Class{ + Class: "InitialName", + VectorConfig: map[string]models.VectorConfig{ + "first": { + VectorIndexType: hnswT, + Vectorizer: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{}, + }, + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + expectedError: fmt.Errorf(`missing config for vector "second"`), + }, + { + name: "removing existing legacy vector", + initial: &models.Class{ + Class: "InitialName", + Vectorizer: "text2vec-contextionary", + VectorIndexType: hnswT, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + update: &models.Class{ + Class: "InitialName", + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + expectedError: fmt.Errorf("vectorizer is immutable"), + }, + { + name: "adding named vector with reserved named on a collection with legacy index", + initial: &models.Class{ + Class: "InitialName", + Vectorizer: "text2vec-contextionary", + VectorIndexType: hnswT, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + update: &models.Class{ + Class: "InitialName", + Vectorizer: "text2vec-contextionary", + VectorIndexType: hnswT, + VectorConfig: map[string]models.VectorConfig{ + modelsext.DefaultNamedVectorName: { + VectorIndexType: hnswT, + Vectorizer: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{}, + }, + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + expectedError: fmt.Errorf("vector named %s cannot be created when collection level vector index is configured", modelsext.DefaultNamedVectorName), + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + ctx := context.Background() + + store := NewFakeStore() + store.parser = handler.parser + + fakeSchemaManager.On("AddClass", test.initial, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + fakeSchemaManager.On("UpdateClass", mock.Anything, mock.Anything).Return(nil) + fakeSchemaManager.On("ReadOnlyClass", test.initial.Class, mock.Anything).Return(test.initial) + fakeSchemaManager.On("QueryShardingState", mock.Anything).Return(nil, nil) + if len(test.initial.Properties) > 0 { + fakeSchemaManager.On("ReadOnlyClass", test.initial.Class, mock.Anything).Return(test.initial) + } + handler.schemaConfig.MaximumAllowedCollectionsCount = runtime.NewDynamicValue(-1) + _, _, err := handler.AddClass(ctx, nil, test.initial) + assert.Nil(t, err) + store.AddClass(test.initial) + + fakeSchemaManager.On("UpdateClass", mock.Anything, mock.Anything).Return(nil) + err = handler.UpdateClass(ctx, nil, test.initial.Class, test.update) + if err == nil { + err = store.UpdateClass(test.update) + } + + if test.expectedError == nil { + assert.NoError(t, err) + } else { + assert.ErrorContains(t, err, test.expectedError.Error()) + } + }) + } + }) +} + +func TestRestoreClass_WithCircularRefs(t *testing.T) { + // When restoring a class, there could be circular refs between the classes, + // thus any validation that checks if linked classes exist would fail on the + // first class to import. Since we have no control over the order of imports + // when restoring, we need to relax this validation. + + t.Parallel() + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + + classes := []*models.Class{ + { + Class: "Class_A", + Properties: []*models.Property{{ + Name: "to_Class_B", + DataType: []string{"Class_B"}, + }, { + Name: "to_Class_C", + DataType: []string{"Class_C"}, + }}, + Vectorizer: "none", + }, + + { + Class: "Class_B", + Properties: []*models.Property{{ + Name: "to_Class_A", + DataType: []string{"Class_A"}, + }, { + Name: "to_Class_C", + DataType: []string{"Class_C"}, + }}, + Vectorizer: "none", + }, + + { + Class: "Class_C", + Properties: []*models.Property{{ + Name: "to_Class_A", + DataType: []string{"Class_A"}, + }, { + Name: "to_Class_B", + DataType: []string{"Class_B"}, + }}, + Vectorizer: "none", + }, + } + + for _, classRaw := range classes { + schemaBytes, err := json.Marshal(classRaw) + require.Nil(t, err) + + // for this particular test the sharding state does not matter, so we can + // initiate any new sharding state + shardingConfig, err := shardingConfig.ParseConfig(nil, 1) + require.Nil(t, err) + + nodes := mocks.NewMockNodeSelector("node1", "node2") + shardingState, err := sharding.InitState(classRaw.Class, shardingConfig, nodes.LocalName(), nodes.StorageCandidates(), 1, false) + require.Nil(t, err) + + shardingBytes, err := shardingState.JSON() + require.Nil(t, err) + + descriptor := backup.ClassDescriptor{Name: classRaw.Class, Schema: schemaBytes, ShardingState: shardingBytes} + fakeSchemaManager.On("RestoreClass", mock.Anything, mock.Anything).Return(nil) + err = handler.RestoreClass(context.Background(), &descriptor, map[string]string{}, false) + assert.Nil(t, err, "class passes validation") + fakeSchemaManager.AssertExpectations(t) + } +} + +func TestRestoreClass_WithNodeMapping(t *testing.T) { + classes := []*models.Class{{ + Class: "Class_A", + Vectorizer: "none", + }} + + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + + for _, classRaw := range classes { + schemaBytes, err := json.Marshal(classRaw) + require.Nil(t, err) + + shardingConfig, err := shardingConfig.ParseConfig(nil, 2) + require.Nil(t, err) + + nodes := mocks.NewMockNodeSelector("node1", "node2") + shardingState, err := sharding.InitState(classRaw.Class, shardingConfig, nodes.LocalName(), nodes.StorageCandidates(), 2, false) + require.Nil(t, err) + + shardingBytes, err := shardingState.JSON() + require.Nil(t, err) + + descriptor := backup.ClassDescriptor{Name: classRaw.Class, Schema: schemaBytes, ShardingState: shardingBytes} + expectedShardingState := shardingState + expectedShardingState.ApplyNodeMapping(map[string]string{"node1": "new-node1"}) + expectedShardingState.SetLocalName("") + fakeSchemaManager.On("RestoreClass", mock.Anything, shardingState).Return(nil) + err = handler.RestoreClass(context.Background(), &descriptor, map[string]string{"node1": "new-node1"}, false) + assert.NoError(t, err) + } +} + +func Test_DeleteClass(t *testing.T) { + t.Parallel() + ctx := context.Background() + + tests := []struct { + name string + classToDelete string + expErr bool + expErrMsg string + existing []*models.Class + expected []*models.Class + }{ + { + name: "class exists", + classToDelete: "C1", + existing: []*models.Class{ + {Class: "C1", VectorIndexType: "hnsw"}, + {Class: "OtherClass", VectorIndexType: "hnsw"}, + }, + expected: []*models.Class{ + classWithDefaultsSet(t, "OtherClass"), + }, + expErr: false, + }, + { + name: "class does not exist", + classToDelete: "C1", + existing: []*models.Class{ + {Class: "OtherClass", VectorIndexType: "hnsw"}, + }, + expected: []*models.Class{ + classWithDefaultsSet(t, "OtherClass"), + }, + expErr: false, + }, + { + name: "class delete should auto transform to GQL convention", + classToDelete: "c1", // all lower case form + existing: []*models.Class{ + {Class: "C1", VectorIndexType: "hnsw"}, // GQL form + {Class: "OtherClass", VectorIndexType: "hnsw"}, + }, + expected: []*models.Class{ + classWithDefaultsSet(t, "OtherClass"), // should still delete `C1` class name + }, + expErr: false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + + // NOTE: mocking schema manager's `DeleteClass` (not handler's) + // underlying schemaManager should still work with canonical class name. + canonical := schema.UppercaseClassName(test.classToDelete) + fakeSchemaManager.On("DeleteClass", canonical).Return(nil) + + // but layer above like handler's `DeleteClass` should work independent of case sensitivity. + err := handler.DeleteClass(ctx, nil, test.classToDelete) + if test.expErr { + require.NotNil(t, err) + assert.Contains(t, err.Error(), test.expErrMsg) + } else { + require.Nil(t, err) + } + fakeSchemaManager.AssertExpectations(t) + }) + } +} + +func Test_GetConsistentClass(t *testing.T) { + t.Parallel() + ctx := context.Background() + + tests := []struct { + name string + classToGet string + expErr bool + expErrMsg string + existing []*models.Class + expected *models.Class + }{ + { + name: "class exists", + classToGet: "C1", + existing: []*models.Class{ + {Class: "C1", VectorIndexType: "hnsw"}, + {Class: "OtherClass", VectorIndexType: "hnsw"}, + }, + expected: classWithDefaultsSet(t, "C1"), + expErr: false, + }, + { + name: "class does not exist", + classToGet: "C1", + existing: []*models.Class{ + {Class: "OtherClass", VectorIndexType: "hnsw"}, + }, + expected: &models.Class{}, // empty + expErr: false, + }, + { + name: "class get should auto transform to GQL convention", + classToGet: "c1", // lowercase + existing: []*models.Class{ + {Class: "C1", VectorIndexType: "hnsw"}, // original class is GQL form + {Class: "OtherClass", VectorIndexType: "hnsw"}, + }, + expected: classWithDefaultsSet(t, "C1"), + expErr: false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + + // underlying schemaManager should still work with canonical class name. + canonical := schema.UppercaseClassName(test.classToGet) + fakeSchemaManager.On("ReadOnlyClassWithVersion", mock.Anything, canonical, mock.Anything).Return(test.expected, nil) + + // but layer above like `GetConsistentClass` should work independent of case sensitivity. + got, _, err := handler.GetConsistentClass(ctx, nil, test.classToGet, false) + if test.expErr { + require.NotNil(t, err) + assert.Contains(t, err.Error(), test.expErrMsg) + } else { + require.Nil(t, err) + assert.Equal(t, got, test.expected) + } + fakeSchemaManager.AssertExpectations(t) + }) + } +} + +func classWithDefaultsSet(t *testing.T, name string) *models.Class { + class := &models.Class{Class: name, VectorIndexType: "hnsw"} + + sc, err := shardingConfig.ParseConfig(map[string]interface{}{}, 1) + require.Nil(t, err) + + class.ShardingConfig = sc + + class.VectorIndexConfig = fakeVectorConfig{} + class.ReplicationConfig = &models.ReplicationConfig{Factor: 1} + + return class +} + +func Test_AddClass_MultiTenancy(t *testing.T) { + ctx := context.Background() + + t.Run("with MT enabled and no optional settings", func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + class := models.Class{ + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: true}, + Class: "NewClass", + Vectorizer: "none", + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + + fakeSchemaManager.On("AddClass", mock.Anything, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + handler.schemaConfig.MaximumAllowedCollectionsCount = runtime.NewDynamicValue(-1) + c, _, err := handler.AddClass(ctx, nil, &class) + require.Nil(t, err) + assert.False(t, schema.AutoTenantCreationEnabled(c)) + assert.False(t, schema.AutoTenantActivationEnabled(c)) + }) + + t.Run("with MT enabled and all optional settings", func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + class := models.Class{ + MultiTenancyConfig: &models.MultiTenancyConfig{ + Enabled: true, + AutoTenantCreation: true, + AutoTenantActivation: true, + }, + Class: "NewClass", + Vectorizer: "none", + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + + fakeSchemaManager.On("AddClass", mock.Anything, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + handler.schemaConfig.MaximumAllowedCollectionsCount = runtime.NewDynamicValue(-1) + c, _, err := handler.AddClass(ctx, nil, &class) + require.Nil(t, err) + assert.True(t, schema.AutoTenantCreationEnabled(c)) + assert.True(t, schema.AutoTenantActivationEnabled(c)) + }) + + t.Run("with MT disabled, but auto tenant creation on", func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + class := models.Class{ + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: false, AutoTenantCreation: true}, + Class: "NewClass", + Vectorizer: "none", + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + + fakeSchemaManager.On("AddClass", mock.Anything, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + handler.schemaConfig.MaximumAllowedCollectionsCount = runtime.NewDynamicValue(-1) + _, _, err := handler.AddClass(ctx, nil, &class) + require.NotNil(t, err) + }) + + t.Run("with MT disabled, but auto tenant activation on", func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + class := models.Class{ + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: false, AutoTenantActivation: true}, + Class: "NewClass", + Vectorizer: "none", + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + + fakeSchemaManager.On("AddClass", mock.Anything, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + handler.schemaConfig.MaximumAllowedCollectionsCount = runtime.NewDynamicValue(-1) + _, _, err := handler.AddClass(ctx, nil, &class) + require.NotNil(t, err) + }) +} + +func Test_SetClassDefaults(t *testing.T) { + globalCfg := replication.GlobalConfig{MinimumFactor: 3} + tests := []struct { + name string + class *models.Class + expectedError string + expectedFactor int64 + }{ + { + name: "ReplicationConfig is nil", + class: &models.Class{}, + expectedError: "", + expectedFactor: 3, + }, + { + name: "ReplicationConfig factor less than MinimumFactor", + class: &models.Class{ + ReplicationConfig: &models.ReplicationConfig{ + Factor: 2, + }, + }, + expectedError: "invalid replication factor: setup requires a minimum replication factor of 3: got 2", + expectedFactor: 2, + }, + { + name: "ReplicationConfig factor less than 1", + class: &models.Class{ + ReplicationConfig: &models.ReplicationConfig{ + Factor: 0, + }, + }, + expectedError: "", + expectedFactor: 3, + }, + { + name: "ReplicationConfig factor greater than or equal to MinimumFactor", + class: &models.Class{ + ReplicationConfig: &models.ReplicationConfig{ + Factor: 4, + }, + }, + expectedError: "", + expectedFactor: 4, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + handler, _ := newTestHandler(t, &fakeDB{}) + err := handler.setClassDefaults(tt.class, globalCfg) + if tt.expectedError != "" { + assert.EqualError(t, err, tt.expectedError) + } else { + assert.NoError(t, err) + } + assert.Equal(t, tt.expectedFactor, tt.class.ReplicationConfig.Factor) + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/executor.go b/platform/dbops/binaries/weaviate-src/usecases/schema/executor.go new file mode 100644 index 0000000000000000000000000000000000000000..ead8b82546eb88befbd96c6ecfbcf005f66fbb00 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/executor.go @@ -0,0 +1,370 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "context" + "errors" + "fmt" + "runtime" + "sync" + + "github.com/sirupsen/logrus" + schemaConfig "github.com/weaviate/weaviate/entities/schema/config" + "golang.org/x/exp/slices" + + "github.com/weaviate/weaviate/cluster/proto/api" + enterrors "github.com/weaviate/weaviate/entities/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" +) + +var _NUMCPU = runtime.GOMAXPROCS(0) + +type executor struct { + schemaReader SchemaReader + migrator Migrator + + callbacksLock sync.RWMutex + callbacks []func(updatedSchema schema.SchemaWithAliases) + + logger logrus.FieldLogger + restoreClassDir func(string) error +} + +// NewManager creates a new manager +func NewExecutor(migrator Migrator, sr SchemaReader, + logger logrus.FieldLogger, classBackupDir func(string) error, +) *executor { + return &executor{ + migrator: migrator, + logger: logger, + schemaReader: sr, + restoreClassDir: classBackupDir, + } +} + +func (e *executor) Open(ctx context.Context) error { + return e.migrator.WaitForStartup(ctx) +} + +// ReloadLocalDB reloads the local database using the latest schema. +func (e *executor) ReloadLocalDB(ctx context.Context, all []api.UpdateClassRequest) error { + cs := make([]*models.Class, len(all)) + + g, ctx := enterrors.NewErrorGroupWithContextWrapper(e.logger, ctx) + g.SetLimit(_NUMCPU * 2) + + var errList error + var errMutex sync.Mutex + + for i, u := range all { + i, u := i, u + + g.Go(func() error { + e.logger.WithField("index", u.Class.Class).Info("reload local index") + cs[i] = u.Class + + if err := e.migrator.UpdateIndex(ctx, u.Class, u.State); err != nil { + e.logger.WithField("index", u.Class.Class).WithError(err).Error("failed to reload local index") + err := fmt.Errorf("failed to reload local index %q: %w", i, err) + + errMutex.Lock() + errList = errors.Join(errList, err) + errMutex.Unlock() + } + return nil + }) + } + + if err := g.Wait(); err != nil { + return err + } + return errList +} + +func (e *executor) Close(ctx context.Context) error { + return e.migrator.Shutdown(ctx) +} + +func (e *executor) AddClass(pl api.AddClassRequest) error { + ctx := context.Background() + if err := e.migrator.AddClass(ctx, pl.Class); err != nil { + return fmt.Errorf("apply add class: %w", err) + } + return nil +} + +func (e *executor) AddReplicaToShard(class string, shard string, targetNode string) error { + ctx := context.Background() + if replicas, err := e.schemaReader.ShardReplicas(class, shard); err != nil { + return fmt.Errorf("error reading replicas for collection %s shard %s: %w", class, shard, err) + } else if !slices.Contains(replicas, targetNode) { + return fmt.Errorf("replica %s does not exists for collection %s shard %s", targetNode, class, shard) + } + return e.migrator.LoadShard(ctx, class, shard) +} + +func (e *executor) DeleteReplicaFromShard(class string, shard string, targetNode string) error { + ctx := context.Background() + if replicas, err := e.schemaReader.ShardReplicas(class, shard); err != nil { + return fmt.Errorf("error reading replicas for collection %s shard %s: %w", class, shard, err) + } else if slices.Contains(replicas, targetNode) { + return fmt.Errorf("replica %s exists for collection %s shard %s", targetNode, class, shard) + } + return e.migrator.DropShard(ctx, class, shard) +} + +func (e *executor) LoadShard(class string, shard string) { + ctx := context.Background() + if err := e.migrator.LoadShard(ctx, class, shard); err != nil { + e.logger.WithFields(logrus.Fields{ + "action": "load_shard", + "class": class, + "shard": shard, + }).WithError(err).Warn("migrator") + } +} + +func (e *executor) ShutdownShard(class string, shard string) { + ctx := context.Background() + if err := e.migrator.ShutdownShard(ctx, class, shard); err != nil { + e.logger.WithFields(logrus.Fields{ + "action": "shutdown_shard", + "class": class, + "shard": shard, + }).WithError(err).Warn("migrator") + } +} + +func (e *executor) DropShard(class string, shard string) { + ctx := context.Background() + if err := e.migrator.DropShard(ctx, class, shard); err != nil { + e.logger.WithFields(logrus.Fields{ + "action": "drop_shard", + "class": class, + "shard": shard, + }).WithError(err).Warn("migrator") + } +} + +// RestoreClassDir restores classes on the filesystem directly from the temporary class backup stored on disk. +// This function is invoked by the Raft store when a restoration request is sent by the backup coordinator. +func (e *executor) RestoreClassDir(class string) error { + return e.restoreClassDir(class) +} + +func (e *executor) UpdateClass(req api.UpdateClassRequest) error { + className := req.Class.Class + ctx := context.Background() + + if cfg, ok := req.Class.VectorIndexConfig.(schemaConfig.VectorIndexConfig); ok { + if err := e.migrator.UpdateVectorIndexConfig(ctx, className, cfg); err != nil { + return fmt.Errorf("vector index config update: %w", err) + } + } + + if cfgs := asVectorIndexConfigs(req.Class); cfgs != nil { + if err := e.migrator.UpdateVectorIndexConfigs(ctx, className, cfgs); err != nil { + return fmt.Errorf("vector index configs update: %w", err) + } + } + + if err := e.migrator.UpdateInvertedIndexConfig(ctx, className, + req.Class.InvertedIndexConfig); err != nil { + return fmt.Errorf("inverted index config: %w", err) + } + + if err := e.migrator.UpdateReplicationConfig(ctx, className, req.Class.ReplicationConfig); err != nil { + return fmt.Errorf("update replication config: %w", err) + } + + return nil +} + +func (e *executor) UpdateIndex(req api.UpdateClassRequest) error { + ctx := context.Background() + if err := e.migrator.UpdateIndex(ctx, req.Class, req.State); err != nil { + return err + } + return nil +} + +func (e *executor) DeleteClass(cls string, hasFrozen bool) error { + ctx := context.Background() + if err := e.migrator.DropClass(ctx, cls, hasFrozen); err != nil { + e.logger.WithFields(logrus.Fields{ + "action": "delete_class", + "class": cls, + }).WithError(err).Errorf("migrator") + } + + e.logger.WithFields(logrus.Fields{ + "action": "delete_class", + "class": cls, + }).Debug("deleting class") + + return nil +} + +func (e *executor) AddProperty(className string, req api.AddPropertyRequest) error { + ctx := context.Background() + if err := e.migrator.AddProperty(ctx, className, req.Properties...); err != nil { + return err + } + + e.logger.WithFields(logrus.Fields{ + "action": "add_property", + "class": className, + }).Debug("adding property") + return nil +} + +func (e *executor) AddTenants(class string, req *api.AddTenantsRequest) error { + if len(req.Tenants) == 0 { + return nil + } + updates := make([]*CreateTenantPayload, len(req.Tenants)) + for i, p := range req.Tenants { + updates[i] = &CreateTenantPayload{ + Name: p.Name, + Status: p.Status, + } + } + cls := e.schemaReader.ReadOnlyClass(class) + if cls == nil { + return fmt.Errorf("class %q: %w", class, ErrNotFound) + } + ctx := context.Background() + if err := e.migrator.NewTenants(ctx, cls, updates); err != nil { + return fmt.Errorf("migrator.new_tenants: %w", err) + } + return nil +} + +func (e *executor) UpdateTenants(class string, req *api.UpdateTenantsRequest) error { + ctx := context.Background() + cls := e.schemaReader.ReadOnlyClass(class) + if cls == nil { + return fmt.Errorf("class %q: %w", class, ErrNotFound) + } + + updates := make([]*UpdateTenantPayload, 0, len(req.Tenants)) + for _, tu := range req.Tenants { + updates = append(updates, &UpdateTenantPayload{ + Name: tu.Name, + Status: tu.Status, + }) + } + + if err := e.migrator.UpdateTenants(ctx, cls, updates, req.ImplicitUpdateRequest); err != nil { + e.logger.WithFields(logrus.Fields{ + "action": "update_tenants", + "class": class, + }).WithError(err).Error("error updating tenants") + return err + } + return nil +} + +func (e *executor) UpdateTenantsProcess(class string, req *api.TenantProcessRequest) error { + ctx := context.Background() + cls := e.schemaReader.ReadOnlyClass(class) + if cls == nil { + return fmt.Errorf("class %q: %w", class, ErrNotFound) + } + // no error here because that means the process shouldn't be applied to db + if req.TenantsProcesses == nil { + return nil + } + + updates := []*UpdateTenantPayload{} + for idx := range req.TenantsProcesses { + if req.TenantsProcesses[idx] == nil || req.TenantsProcesses[idx].Tenant == nil { + continue + } + updates = append(updates, &UpdateTenantPayload{ + Name: req.TenantsProcesses[idx].Tenant.Name, + Status: req.TenantsProcesses[idx].Tenant.Status, + }) + } + + if err := e.migrator.UpdateTenants(ctx, cls, updates, false); err != nil { + e.logger.WithFields(logrus.Fields{ + "action": "update_tenants_process", + "sub-action": "update_tenants", + "class": class, + }).WithError(err).Error("error updating tenants") + return err + } + return nil +} + +func (e *executor) DeleteTenants(class string, tenants []*models.Tenant) error { + ctx := context.Background() + if err := e.migrator.DeleteTenants(ctx, class, tenants); err != nil { + e.logger.WithFields(logrus.Fields{ + "action": "delete_tenants", + "class": class, + }).WithError(err).Error("error deleting tenants") + } + + return nil +} + +func (e *executor) UpdateShardStatus(req *api.UpdateShardStatusRequest) error { + ctx := context.Background() + return e.migrator.UpdateShardStatus(ctx, req.Class, req.Shard, req.Status, req.SchemaVersion) +} + +func (e *executor) GetShardsStatus(class, tenant string) (models.ShardStatusList, error) { + ctx := context.Background() + shardsStatus, err := e.migrator.GetShardsStatus(ctx, class, tenant) + if err != nil { + return nil, err + } + + resp := models.ShardStatusList{} + + for name, status := range shardsStatus { + resp = append(resp, &models.ShardStatusGetResponse{ + Name: name, + Status: status, + }) + } + + return resp, nil +} + +func (e *executor) TriggerSchemaUpdateCallbacks() { + e.callbacksLock.RLock() + defer e.callbacksLock.RUnlock() + + s := e.schemaReader.ReadOnlySchema() + body := schema.SchemaWithAliases{ + Schema: schema.Schema{Objects: &s}, + Aliases: e.schemaReader.Aliases(), + } + for _, cb := range e.callbacks { + cb(body) + } +} + +// RegisterSchemaUpdateCallback allows other usecases to register a primitive +// type update callback. The callbacks will be called any time we persist a +// schema update +func (e *executor) RegisterSchemaUpdateCallback(callback func(updatedSchema schema.SchemaWithAliases)) { + e.callbacksLock.Lock() + defer e.callbacksLock.Unlock() + + e.callbacks = append(e.callbacks, callback) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/executor_test.go b/platform/dbops/binaries/weaviate-src/usecases/schema/executor_test.go new file mode 100644 index 0000000000000000000000000000000000000000..f4f5b4545b01a04f58ab373077c854282bff742c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/executor_test.go @@ -0,0 +1,218 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "context" + "errors" + "testing" + + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + + "github.com/weaviate/weaviate/cluster/proto/api" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/vectorindex/flat" +) + +var ( + Anything = mock.Anything + ErrAny = errors.New("any error") +) + +func newMockExecutor(m *fakeMigrator, s *fakeSchemaManager) *executor { + logger, _ := test.NewNullLogger() + x := NewExecutor(m, s, logger, func(string) error { return nil }) + x.RegisterSchemaUpdateCallback(func(updatedSchema schema.SchemaWithAliases) {}) + return x +} + +func TestExecutor(t *testing.T) { + ctx := context.Background() + store := &fakeSchemaManager{} + cls := &models.Class{ + Class: "A", + VectorIndexConfig: flat.NewDefaultUserConfig(), + ReplicationConfig: &models.ReplicationConfig{ + Factor: 1, + }, + } + store.On("ReadOnlySchema").Return(models.Schema{}) + store.On("ReadOnlyClass", "A", mock.Anything).Return(cls) + + t.Run("OpenClose", func(t *testing.T) { + migrator := &fakeMigrator{} + migrator.On("WaitForStartup", ctx).Return(nil) + migrator.On("Shutdown", ctx).Return(nil) + x := newMockExecutor(migrator, store) + assert.Nil(t, x.Open(ctx)) + assert.Nil(t, x.Close(ctx)) + }) + + t.Run("AddClass", func(t *testing.T) { + migrator := &fakeMigrator{} + migrator.On("AddClass", Anything, Anything, Anything).Return(nil) + x := newMockExecutor(migrator, store) + assert.Nil(t, x.AddClass(api.AddClassRequest{})) + }) + t.Run("AddClassWithError", func(t *testing.T) { + migrator := &fakeMigrator{} + migrator.On("AddClass", Anything, Anything, Anything).Return(ErrAny) + x := newMockExecutor(migrator, store) + assert.ErrorIs(t, x.AddClass(api.AddClassRequest{}), ErrAny) + }) + + t.Run("DropClass", func(t *testing.T) { + migrator := &fakeMigrator{} + migrator.On("DropClass", Anything, Anything).Return(nil) + x := newMockExecutor(migrator, store) + assert.Nil(t, x.DeleteClass("A", false)) + }) + t.Run("DropClassWithError", func(t *testing.T) { + migrator := &fakeMigrator{} + migrator.On("DropClass", Anything, Anything).Return(ErrAny) + x := newMockExecutor(migrator, store) + assert.Nil(t, x.DeleteClass("A", false)) + }) + + t.Run("UpdateIndex", func(t *testing.T) { + migrator := &fakeMigrator{} + migrator.On("UpdateVectorIndexConfig", Anything, "A", Anything).Return(nil) + migrator.On("UpdateInvertedIndexConfig", Anything, "A", Anything).Return(nil) + migrator.On("UpdateReplicationConfig", context.Background(), "A", false).Return(nil) + + x := newMockExecutor(migrator, store) + assert.Nil(t, x.UpdateClass(api.UpdateClassRequest{Class: cls})) + }) + + t.Run("UpdateVectorIndexConfig", func(t *testing.T) { + migrator := &fakeMigrator{} + migrator.On("UpdateVectorIndexConfig", Anything, "A", Anything).Return(ErrAny) + migrator.On("UpdateReplicationConfig", context.Background(), "A", false).Return(nil) + + x := newMockExecutor(migrator, store) + assert.ErrorIs(t, x.UpdateClass(api.UpdateClassRequest{Class: cls}), ErrAny) + }) + t.Run("UpdateInvertedIndexConfig", func(t *testing.T) { + migrator := &fakeMigrator{} + migrator.On("UpdateVectorIndexConfig", Anything, "A", Anything).Return(nil) + migrator.On("UpdateInvertedIndexConfig", Anything, "A", Anything).Return(ErrAny) + migrator.On("UpdateReplicationConfig", context.Background(), "A", false).Return(nil) + + x := newMockExecutor(migrator, store) + assert.ErrorIs(t, x.UpdateClass(api.UpdateClassRequest{Class: cls}), ErrAny) + }) + + t.Run("AddProperty", func(t *testing.T) { + migrator := &fakeMigrator{} + req := api.AddPropertyRequest{Properties: []*models.Property{}} + migrator.On("AddProperty", Anything, "A", req.Properties).Return(nil) + x := newMockExecutor(migrator, store) + assert.Nil(t, x.AddProperty("A", req)) + }) + + tenants := []*api.Tenant{{Name: "T1"}, {Name: "T2"}} + + t.Run("DeleteTenants", func(t *testing.T) { + migrator := &fakeMigrator{} + tenants := []*models.Tenant{} + migrator.On("DeleteTenants", Anything, "A", tenants).Return(nil) + x := newMockExecutor(migrator, store) + assert.Nil(t, x.DeleteTenants("A", tenants)) + }) + t.Run("DeleteTenantsWithError", func(t *testing.T) { + migrator := &fakeMigrator{} + tenants := []*models.Tenant{} + migrator.On("DeleteTenants", Anything, "A", tenants).Return(ErrAny) + x := newMockExecutor(migrator, store) + assert.Nil(t, x.DeleteTenants("A", tenants)) + }) + + t.Run("UpdateTenants", func(t *testing.T) { + migrator := &fakeMigrator{} + req := &api.UpdateTenantsRequest{Tenants: tenants} + migrator.On("UpdateTenants", Anything, cls, Anything).Return(nil) + x := newMockExecutor(migrator, store) + assert.Nil(t, x.UpdateTenants("A", req)) + }) + + t.Run("UpdateTenantsClassNotFound", func(t *testing.T) { + store := &fakeSchemaManager{} + store.On("ReadOnlyClass", "A", mock.Anything).Return(nil) + + req := &api.UpdateTenantsRequest{Tenants: tenants} + x := newMockExecutor(&fakeMigrator{}, store) + assert.ErrorIs(t, x.UpdateTenants("A", req), ErrNotFound) + }) + + t.Run("UpdateTenantsError", func(t *testing.T) { + migrator := &fakeMigrator{} + req := &api.UpdateTenantsRequest{Tenants: tenants} + migrator.On("UpdateTenants", Anything, cls, Anything).Return(ErrAny) + x := newMockExecutor(migrator, store) + assert.ErrorIs(t, x.UpdateTenants("A", req), ErrAny) + }) + + t.Run("AddTenants", func(t *testing.T) { + migrator := &fakeMigrator{} + req := &api.AddTenantsRequest{Tenants: tenants} + migrator.On("NewTenants", Anything, cls, Anything).Return(nil) + x := newMockExecutor(migrator, store) + assert.Nil(t, x.AddTenants("A", req)) + }) + t.Run("AddTenantsEmpty", func(t *testing.T) { + migrator := &fakeMigrator{} + req := &api.AddTenantsRequest{Tenants: nil} + x := newMockExecutor(migrator, store) + assert.Nil(t, x.AddTenants("A", req)) + }) + t.Run("AddTenantsError", func(t *testing.T) { + migrator := &fakeMigrator{} + req := &api.AddTenantsRequest{Tenants: tenants} + migrator.On("NewTenants", Anything, cls, Anything).Return(ErrAny) + x := newMockExecutor(migrator, store) + assert.ErrorIs(t, x.AddTenants("A", req), ErrAny) + }) + t.Run("AddTenantsClassNotFound", func(t *testing.T) { + store := &fakeSchemaManager{} + store.On("ReadOnlyClass", "A", mock.Anything).Return(nil) + req := &api.AddTenantsRequest{Tenants: tenants} + x := newMockExecutor(&fakeMigrator{}, store) + assert.ErrorIs(t, x.AddTenants("A", req), ErrNotFound) + }) + + t.Run("GetShardsStatus", func(t *testing.T) { + migrator := &fakeMigrator{} + status := map[string]string{"A": "B"} + migrator.On("GetShardsStatus", Anything, "A", "").Return(status, nil) + x := newMockExecutor(migrator, store) + _, err := x.GetShardsStatus("A", "") + assert.Nil(t, err) + }) + t.Run("GetShardsStatusError", func(t *testing.T) { + migrator := &fakeMigrator{} + status := map[string]string{"A": "B"} + migrator.On("GetShardsStatus", Anything, "A", "").Return(status, ErrAny) + x := newMockExecutor(migrator, store) + _, err := x.GetShardsStatus("A", "") + assert.ErrorIs(t, err, ErrAny) + }) + t.Run("UpdateShardStatus", func(t *testing.T) { + migrator := &fakeMigrator{} + req := &api.UpdateShardStatusRequest{Class: "A", Shard: "S", Status: "ST", SchemaVersion: 123} + migrator.On("UpdateShardStatus", Anything, "A", "S", "ST", uint64(123)).Return(nil) + x := newMockExecutor(migrator, store) + assert.Nil(t, x.UpdateShardStatus(req)) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/fakes_test.go b/platform/dbops/binaries/weaviate-src/usecases/schema/fakes_test.go new file mode 100644 index 0000000000000000000000000000000000000000..e213b3e537ab42f36274712d7c2e44d082eb0a90 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/fakes_test.go @@ -0,0 +1,349 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "context" + "encoding/json" + "fmt" + + "github.com/stretchr/testify/mock" + command "github.com/weaviate/weaviate/cluster/proto/api" + clusterSchema "github.com/weaviate/weaviate/cluster/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/versioned" + "github.com/weaviate/weaviate/usecases/fakes" + "github.com/weaviate/weaviate/usecases/sharding" +) + +type fakeSchemaManager struct { + mock.Mock + countClassEqual bool +} + +func (f *fakeSchemaManager) AddClass(_ context.Context, cls *models.Class, ss *sharding.State) (uint64, error) { + args := f.Called(cls, ss) + return 0, args.Error(0) +} + +func (f *fakeSchemaManager) RestoreClass(_ context.Context, cls *models.Class, ss *sharding.State) (uint64, error) { + args := f.Called(cls, ss) + return 0, args.Error(0) +} + +func (f *fakeSchemaManager) UpdateClass(_ context.Context, cls *models.Class, ss *sharding.State) (uint64, error) { + args := f.Called(cls, ss) + return 0, args.Error(0) +} + +func (f *fakeSchemaManager) DeleteClass(_ context.Context, name string) (uint64, error) { + args := f.Called(name) + return 0, args.Error(0) +} + +func (f *fakeSchemaManager) AddProperty(_ context.Context, class string, p ...*models.Property) (uint64, error) { + args := f.Called(class, p) + return 0, args.Error(0) +} + +func (f *fakeSchemaManager) UpdateShardStatus(c_ context.Context, class, shard, status string) (uint64, error) { + args := f.Called(class, shard, status) + return 0, args.Error(0) +} + +func (f *fakeSchemaManager) AddTenants(_ context.Context, class string, req *command.AddTenantsRequest) (uint64, error) { + args := f.Called(class, req) + return 0, args.Error(0) +} + +func (f *fakeSchemaManager) UpdateTenants(_ context.Context, class string, req *command.UpdateTenantsRequest) (uint64, error) { + args := f.Called(class, req) + return 0, args.Error(0) +} + +func (f *fakeSchemaManager) DeleteTenants(_ context.Context, class string, req *command.DeleteTenantsRequest) (uint64, error) { + args := f.Called(class, req) + return 0, args.Error(0) +} + +func (f *fakeSchemaManager) ResolveAlias(alias string) string { + return "" +} + +func (f *fakeSchemaManager) GetAliasesForClass(class string) []*models.Alias { + return nil +} + +func (f *fakeSchemaManager) Join(ctx context.Context, nodeID, raftAddr string, voter bool) error { + args := f.Called(ctx, nodeID, raftAddr, voter) + return args.Error(0) +} + +func (f *fakeSchemaManager) Remove(ctx context.Context, nodeID string) error { + args := f.Called(ctx, nodeID) + return args.Error(0) +} + +func (f *fakeSchemaManager) Stats() map[string]any { + return map[string]any{} +} + +func (f *fakeSchemaManager) StoreSchemaV1() error { + return nil +} + +func (f *fakeSchemaManager) ClassEqual(name string) string { + if f.countClassEqual { + args := f.Called(name) + return args.String(0) + } + return "" +} + +func (f *fakeSchemaManager) MultiTenancy(class string) models.MultiTenancyConfig { + args := f.Called(class) + return args.Get(0).(models.MultiTenancyConfig) +} + +func (f *fakeSchemaManager) MultiTenancyWithVersion(ctx context.Context, class string, version uint64) (models.MultiTenancyConfig, error) { + args := f.Called(ctx, class, version) + return args.Get(0).(models.MultiTenancyConfig), args.Error(1) +} + +func (f *fakeSchemaManager) ClassInfo(class string) (ci clusterSchema.ClassInfo) { + args := f.Called(class) + return args.Get(0).(clusterSchema.ClassInfo) +} + +func (f *fakeSchemaManager) StorageCandidates() []string { + return []string{"node-1"} +} + +func (f *fakeSchemaManager) ClassInfoWithVersion(ctx context.Context, class string, version uint64) (clusterSchema.ClassInfo, error) { + args := f.Called(ctx, class, version) + return args.Get(0).(clusterSchema.ClassInfo), args.Error(1) +} + +func (f *fakeSchemaManager) QuerySchema() (models.Schema, error) { + args := f.Called() + return args.Get(0).(models.Schema), args.Error(1) +} + +func (f *fakeSchemaManager) QueryCollectionsCount() (int, error) { + args := f.Called() + return args.Get(0).(int), args.Error(1) +} + +func (f *fakeSchemaManager) QueryReadOnlyClasses(classes ...string) (map[string]versioned.Class, error) { + args := f.Called(classes) + + models := args.Get(0) + if models == nil { + return nil, args.Error(1) + } + + return models.(map[string]versioned.Class), nil +} + +func (f *fakeSchemaManager) QueryClassVersions(classes ...string) (map[string]uint64, error) { + args := f.Called(classes) + + models := args.Get(0) + if models == nil { + return nil, args.Error(1) + } + + return models.(map[string]uint64), nil +} + +func (f *fakeSchemaManager) QueryTenants(class string, tenants []string) ([]*models.Tenant, uint64, error) { + args := f.Called(class, tenants) + return args.Get(0).([]*models.Tenant), 0, args.Error(2) +} + +func (f *fakeSchemaManager) QueryShardOwner(class, shard string) (string, uint64, error) { + args := f.Called(class, shard) + return args.Get(0).(string), 0, args.Error(0) +} + +func (f *fakeSchemaManager) QueryTenantsShards(class string, tenants ...string) (map[string]string, uint64, error) { + args := f.Called(class, tenants) + res := map[string]string{} + for idx := range tenants { + res[args.String(idx+1)] = "" + } + return res, 0, nil +} + +func (f *fakeSchemaManager) QueryShardingState(class string) (*sharding.State, uint64, error) { + args := f.Called(class) + return args.Get(0).(*sharding.State), 0, args.Error(0) +} + +func (f *fakeSchemaManager) ReadOnlyClass(class string) *models.Class { + args := f.Called(class) + model := args.Get(0) + if model == nil { + return nil + } + return model.(*models.Class) +} + +func (f *fakeSchemaManager) ReadOnlyVersionedClass(class string) versioned.Class { + args := f.Called(class) + model := args.Get(0) + return model.(versioned.Class) +} + +func (f *fakeSchemaManager) ReadOnlyClassWithVersion(ctx context.Context, class string, version uint64) (*models.Class, error) { + args := f.Called(ctx, class, version) + model := args.Get(0) + if model == nil { + return nil, args.Error(1) + } + return model.(*models.Class), args.Error(1) +} + +func (f *fakeSchemaManager) ReadOnlySchema() models.Schema { + args := f.Called() + return args.Get(0).(models.Schema) +} + +func (f *fakeSchemaManager) Aliases() map[string]string { + return nil +} + +func (f *fakeSchemaManager) ShardReplicas(class, shard string) ([]string, error) { + args := f.Called(class, shard) + return args.Get(0).([]string), args.Error(1) +} + +func (f *fakeSchemaManager) ShardReplicasWithVersion(ctx context.Context, class, shard string, version uint64) ([]string, error) { + args := f.Called(ctx, class, shard, version) + return args.Get(0).([]string), args.Error(1) +} + +func (f *fakeSchemaManager) ShardFromUUID(class string, uuid []byte) string { + args := f.Called(class, uuid) + return args.String(0) +} + +func (f *fakeSchemaManager) ShardFromUUIDWithVersion(ctx context.Context, class string, uuid []byte, version uint64) (string, error) { + args := f.Called(ctx, class, uuid, version) + return args.String(0), args.Error(1) +} + +func (f *fakeSchemaManager) ShardOwner(class, shard string) (string, error) { + args := f.Called(class, shard) + return args.String(0), args.Error(1) +} + +func (f *fakeSchemaManager) ShardOwnerWithVersion(ctx context.Context, class, shard string, version uint64) (string, error) { + args := f.Called(ctx, class, shard, version) + return args.String(0), args.Error(1) +} + +func (f *fakeSchemaManager) TenantsShardsWithVersion(ctx context.Context, version uint64, class string, tenants ...string) (tenantShards map[string]string, err error) { + args := f.Called(ctx, version, class, tenants) + return map[string]string{args.String(0): args.String(1)}, args.Error(2) +} + +func (f *fakeSchemaManager) Read(class string, reader func(*models.Class, *sharding.State) error) error { + args := f.Called(class, reader) + return args.Error(0) +} + +func (f *fakeSchemaManager) Shards(class string) ([]string, error) { + args := f.Called(class) + return args.Get(0).([]string), args.Error(1) +} + +func (f *fakeSchemaManager) LocalShards(class string) ([]string, error) { + args := f.Called(class) + return args.Get(0).([]string), args.Error(1) +} + +func (f *fakeSchemaManager) GetShardsStatus(class, tenant string) (models.ShardStatusList, error) { + args := f.Called(class, tenant) + return args.Get(0).(models.ShardStatusList), args.Error(1) +} + +func (f *fakeSchemaManager) WaitForUpdate(ctx context.Context, schemaVersion uint64) error { + return nil +} + +func (f *fakeSchemaManager) CreateAlias(ctx context.Context, alias string, class *models.Class) (uint64, error) { + args := f.Called(ctx, alias, class) + return 0, args.Error(0) +} + +func (f *fakeSchemaManager) ReplaceAlias(ctx context.Context, alias *models.Alias, newClass *models.Class) (uint64, error) { + args := f.Called(ctx, alias, newClass) + return 0, args.Error(0) +} + +func (f *fakeSchemaManager) DeleteAlias(ctx context.Context, alias string) (uint64, error) { + args := f.Called(ctx, alias) + return 0, args.Error(0) +} + +func (f *fakeSchemaManager) GetAlias(ctx context.Context, alias string) (*models.Alias, error) { + args := f.Called(ctx, alias) + return args.Get(0).(*models.Alias), args.Error(1) +} + +func (f *fakeSchemaManager) GetAliases(ctx context.Context, alias string, class *models.Class) ([]*models.Alias, error) { + args := f.Called(ctx, alias, class) + return args.Get(0).([]*models.Alias), args.Error(1) +} + +type fakeStore struct { + collections map[string]*models.Class + parser Parser +} + +func NewFakeStore() *fakeStore { + return &fakeStore{ + collections: make(map[string]*models.Class), + parser: *NewParser(fakes.NewFakeClusterState(), dummyParseVectorConfig, &fakeValidator{}, fakeModulesProvider{}, nil), + } +} + +func (f *fakeStore) AddClass(cls *models.Class) { + f.collections[cls.Class] = cls +} + +func (f *fakeStore) UpdateClass(cls *models.Class) error { + bytes, err := json.Marshal(cls) + if err != nil { + return fmt.Errorf("marshal request: %w", err) + } + + cls = f.collections[cls.Class] + if cls == nil { + return ErrNotFound + } + + cls2 := &models.Class{} + if err := json.Unmarshal(bytes, cls2); err != nil { + return fmt.Errorf("unmarshal: %w", err) + } + + u, err := f.parser.ParseClassUpdate(cls, cls2) + if err != nil { + return fmt.Errorf("parse class update: %w", err) + } + + cls.VectorIndexConfig = u.VectorIndexConfig + cls.InvertedIndexConfig = u.InvertedIndexConfig + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/get_class.go b/platform/dbops/binaries/weaviate-src/usecases/schema/get_class.go new file mode 100644 index 0000000000000000000000000000000000000000..856044fe67eddd2355a95e91cbd6ad22168b2a82 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/get_class.go @@ -0,0 +1,174 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/versioned" + configRuntime "github.com/weaviate/weaviate/usecases/config/runtime" +) + +type ClassGetter struct { + parser *Parser + schemaReader SchemaReader + schemaManager SchemaManager + logger logrus.FieldLogger + + collectionRetrievalStrategy *configRuntime.FeatureFlag[string] +} + +func NewClassGetter( + schemaParser *Parser, + schemaManager SchemaManager, + schemaReader SchemaReader, + collectionRetrievalStrategyFF *configRuntime.FeatureFlag[string], + logger logrus.FieldLogger, +) *ClassGetter { + return &ClassGetter{ + parser: schemaParser, + schemaReader: schemaReader, + schemaManager: schemaManager, + logger: logger, + collectionRetrievalStrategy: collectionRetrievalStrategyFF, + } +} + +func (cg *ClassGetter) getClasses(names []string) (map[string]versioned.Class, error) { + switch configRuntime.CollectionRetrievalStrategy(cg.collectionRetrievalStrategy.Get()) { + case configRuntime.LeaderOnly: + return cg.getClassesLeaderOnly(names) + case configRuntime.LeaderOnMismatch: + return cg.getClassesLeaderOnMismatch(names) + case configRuntime.LocalOnly: + return cg.getClassesLocalOnly(names) + + // This can happen if the feature flag gets configured with an invalid strategy + default: + return cg.getClassesLeaderOnly(names) + } +} + +func (cg *ClassGetter) getClassesLeaderOnly(names []string) (map[string]versioned.Class, error) { + vclasses, err := cg.schemaManager.QueryReadOnlyClasses(names...) + if err != nil { + return nil, err + } + + if len(vclasses) == 0 { + return nil, nil + } + + for _, vclass := range vclasses { + if err := cg.parser.ParseClass(vclass.Class); err != nil { + // remove invalid classes + cg.logger.WithFields(logrus.Fields{ + "class": vclass.Class.Class, + "error": err, + }).Warn("parsing class error") + delete(vclasses, vclass.Class.Class) + continue + } + } + + return vclasses, nil +} + +func (cg *ClassGetter) getClassesLocalOnly(names []string) (map[string]versioned.Class, error) { + vclasses := map[string]versioned.Class{} + for _, name := range names { + vc := cg.schemaReader.ReadOnlyVersionedClass(name) + if vc.Class == nil { + cg.logger.WithFields(logrus.Fields{ + "class": name, + }).Debug("could not find class in local schema") + continue + } + vclasses[name] = vc + } + + // Check if we have all the classes from the local schema + if len(vclasses) < len(names) { + missingClasses := []string{} + for _, name := range names { + if _, ok := vclasses[name]; !ok { + missingClasses = append(missingClasses, name) + } + } + cg.logger.WithFields(logrus.Fields{ + "missing": missingClasses, + "suggestion": "This node received a data request for a class that is not present on the local schema on the node. If the class was just updated in the schema and you want to be able to query it immediately consider changing the " + configRuntime.CollectionRetrievalStrategyEnvVariable + " config to \"" + configRuntime.LeaderOnly + "\".", + }).Warn("not all classes found locally") + } + return vclasses, nil +} + +func (cg *ClassGetter) getClassesLeaderOnMismatch(names []string) (map[string]versioned.Class, error) { + classVersions, err := cg.schemaManager.QueryClassVersions(names...) + if err != nil { + return nil, err + } + versionedClassesToReturn := map[string]versioned.Class{} + versionedClassesToQueryFromLeader := []string{} + for _, name := range names { + localVclass := cg.schemaReader.ReadOnlyVersionedClass(name) + // First check if we have the class locally, if not make sure we'll query from leader + if localVclass.Class == nil { + versionedClassesToQueryFromLeader = append(versionedClassesToQueryFromLeader, name) + continue + } + + // We have the class locally, compare the version from leader (if any) and add to query to leader if we have to refresh the version + leaderClassVersion, ok := classVersions[name] + // < leaderClassVersion instead of != because there is some chance that the local version + // could be ahead of the version returned by the leader if the response from the leader was + // delayed and i don't think it would be helpful to query the leader again in that case as + // it would likely return a version that is at least as large as the local version. + if !ok || localVclass.Version < leaderClassVersion { + versionedClassesToQueryFromLeader = append(versionedClassesToQueryFromLeader, name) + continue + } + + // We can use the local class version has not changed + versionedClassesToReturn[name] = localVclass + } + if len(versionedClassesToQueryFromLeader) == 0 { + return versionedClassesToReturn, nil + } + + versionedClassesFromLeader, err := cg.schemaManager.QueryReadOnlyClasses(versionedClassesToQueryFromLeader...) + if err != nil || len(versionedClassesFromLeader) == 0 { + cg.logger.WithFields(logrus.Fields{ + "classes": versionedClassesToQueryFromLeader, + "error": err, + "suggestion": "This node received a data request for a class that is not present on the local schema on the node. If the class was just updated in the schema and you want to be able to query it immediately consider changing the " + configRuntime.CollectionRetrievalStrategyEnvVariable + " config to \"" + configRuntime.LeaderOnly + "\".", + }).Warn("unable to query classes from leader") + // return as many classes as we could get (to match previous behavior of the caller) + return versionedClassesToReturn, err + } + + // We only need to ParseClass the ones we receive from the leader due to the Class model containing `interface{}` that are broken on + // marshall/unmarshall with gRPC. + for _, vclass := range versionedClassesFromLeader { + if err := cg.parser.ParseClass(vclass.Class); err != nil { + // silently remove invalid classes to match previous behavior + cg.logger.WithFields(logrus.Fields{ + "class": vclass.Class.Class, + "error": err, + }).Warn("parsing class error") + delete(versionedClassesFromLeader, vclass.Class.Class) + continue + } + versionedClassesToReturn[vclass.Class.Class] = vclass + } + + return versionedClassesToReturn, nil +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/get_class_test.go b/platform/dbops/binaries/weaviate-src/usecases/schema/get_class_test.go new file mode 100644 index 0000000000000000000000000000000000000000..23a7bb35bd50a6a10ea20aab6f5b3ffb7c80b2a5 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/get_class_test.go @@ -0,0 +1,169 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "testing" + + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/versioned" + configRuntime "github.com/weaviate/weaviate/usecases/config/runtime" + shardingCfg "github.com/weaviate/weaviate/usecases/sharding/config" +) + +func TestClassGetterFromSchema(t *testing.T) { + testCases := []struct { + name string + getFromSchema []string + strategy configRuntime.CollectionRetrievalStrategy + schemaExpect func(*fakeSchemaManager) + }{ + { + name: "Read only from leader", + getFromSchema: []string{"class1", "class2", "class3"}, + strategy: configRuntime.LeaderOnly, + schemaExpect: func(f *fakeSchemaManager) { + f.On("QueryReadOnlyClasses", []string{"class1", "class2", "class3"}).Return(map[string]versioned.Class{ + "class1": {Version: 1, Class: &models.Class{Class: "class1", VectorIndexType: "hnsw", ShardingConfig: make(map[string]interface{})}}, + "class2": {Version: 2, Class: &models.Class{Class: "class2", VectorIndexType: "hnsw", ShardingConfig: make(map[string]interface{})}}, + "class3": {Version: 3, Class: &models.Class{Class: "class3", VectorIndexType: "hnsw", ShardingConfig: make(map[string]interface{})}}, + }, nil) + }, + }, + { + name: "Read only from local", + getFromSchema: []string{"class1", "class2", "class3"}, + strategy: configRuntime.LocalOnly, + schemaExpect: func(f *fakeSchemaManager) { + f.On("ReadOnlyVersionedClass", "class1").Return(versioned.Class{Version: 1, Class: &models.Class{Class: "class1", VectorIndexType: "hnsw", ShardingConfig: shardingCfg.Config{}}}) + f.On("ReadOnlyVersionedClass", "class2").Return(versioned.Class{Version: 2, Class: &models.Class{Class: "class2", VectorIndexType: "hnsw", ShardingConfig: shardingCfg.Config{}}}) + f.On("ReadOnlyVersionedClass", "class3").Return(versioned.Class{Version: 3, Class: &models.Class{Class: "class3", VectorIndexType: "hnsw", ShardingConfig: shardingCfg.Config{}}}) + }, + }, + { + name: "Read all from leader if mismatch", + getFromSchema: []string{"class1", "class2", "class3"}, + strategy: configRuntime.LeaderOnMismatch, + schemaExpect: func(f *fakeSchemaManager) { + // First we will query the versions from the leader + f.On("QueryClassVersions", []string{"class1", "class2", "class3"}).Return(map[string]uint64{ + "class1": 4, + "class2": 5, + "class3": 6, + }, nil) + // Then we check the local version + f.On("ReadOnlyVersionedClass", "class1").Return(versioned.Class{Version: 1, Class: &models.Class{Class: "class1", VectorIndexType: "hnsw", ShardingConfig: shardingCfg.Config{}}}) + f.On("ReadOnlyVersionedClass", "class2").Return(versioned.Class{Version: 2, Class: &models.Class{Class: "class2", VectorIndexType: "hnsw", ShardingConfig: shardingCfg.Config{}}}) + f.On("ReadOnlyVersionedClass", "class3").Return(versioned.Class{Version: 3, Class: &models.Class{Class: "class3", VectorIndexType: "hnsw", ShardingConfig: shardingCfg.Config{}}}) + // Then we fetch what we need to update + f.On("QueryReadOnlyClasses", []string{"class1", "class2", "class3"}).Return(map[string]versioned.Class{ + "class1": {Version: 1, Class: &models.Class{Class: "class1", VectorIndexType: "hnsw"}}, + "class2": {Version: 2, Class: &models.Class{Class: "class2", VectorIndexType: "hnsw"}}, + "class3": {Version: 3, Class: &models.Class{Class: "class3", VectorIndexType: "hnsw"}}, + }, nil) + }, + }, + { + name: "Read subset from leader if mismatch", + getFromSchema: []string{"class1", "class2", "class3"}, + strategy: configRuntime.LeaderOnMismatch, + schemaExpect: func(f *fakeSchemaManager) { + // First we will query the versions from the leader + f.On("QueryClassVersions", []string{"class1", "class2", "class3"}).Return(map[string]uint64{ + "class1": 1, + "class2": 2, + "class3": 6, + }, nil) + // Then we check the local version + f.On("ReadOnlyVersionedClass", "class1").Return(versioned.Class{Version: 1, Class: &models.Class{Class: "class1", VectorIndexType: "hnsw", ShardingConfig: shardingCfg.Config{}}}) + f.On("ReadOnlyVersionedClass", "class2").Return(versioned.Class{Version: 2, Class: &models.Class{Class: "class2", VectorIndexType: "hnsw", ShardingConfig: shardingCfg.Config{}}}) + f.On("ReadOnlyVersionedClass", "class3").Return(versioned.Class{Version: 3, Class: &models.Class{Class: "class3", VectorIndexType: "hnsw", ShardingConfig: shardingCfg.Config{}}}) + // Then we fetch what we need to update + f.On("QueryReadOnlyClasses", []string{"class3"}).Return(map[string]versioned.Class{ + "class3": {Version: 6, Class: &models.Class{Class: "class3", VectorIndexType: "hnsw"}}, + }, nil) + }, + }, + { + name: "Read from leader local equal", + getFromSchema: []string{"class1", "class2", "class3"}, + strategy: configRuntime.LeaderOnMismatch, + schemaExpect: func(f *fakeSchemaManager) { + // First we will query the versions from the leader + f.On("QueryClassVersions", []string{"class1", "class2", "class3"}).Return(map[string]uint64{ + "class1": 1, + "class2": 2, + "class3": 3, + }, nil) + f.On("ReadOnlyVersionedClass", "class1").Return(versioned.Class{Version: 1, Class: &models.Class{Class: "class1", VectorIndexType: "hnsw", ShardingConfig: shardingCfg.Config{}}}) + f.On("ReadOnlyVersionedClass", "class2").Return(versioned.Class{Version: 2, Class: &models.Class{Class: "class2", VectorIndexType: "hnsw", ShardingConfig: shardingCfg.Config{}}}) + f.On("ReadOnlyVersionedClass", "class3").Return(versioned.Class{Version: 3, Class: &models.Class{Class: "class3", VectorIndexType: "hnsw", ShardingConfig: shardingCfg.Config{}}}) + }, + }, + { + name: "Read from leader local ahead", + getFromSchema: []string{"class1", "class2", "class3"}, + strategy: configRuntime.LeaderOnMismatch, + schemaExpect: func(f *fakeSchemaManager) { + // First we will query the versions from the leader + f.On("QueryClassVersions", []string{"class1", "class2", "class3"}).Return(map[string]uint64{ + "class1": 1, + "class2": 2, + "class3": 3, + }, nil) + // Here we assume a delay between the leader returning the version and a change, so local > leader + f.On("ReadOnlyVersionedClass", "class1").Return(versioned.Class{Version: 4, Class: &models.Class{Class: "class1", VectorIndexType: "hnsw", ShardingConfig: shardingCfg.Config{}}}) + f.On("ReadOnlyVersionedClass", "class2").Return(versioned.Class{Version: 5, Class: &models.Class{Class: "class2", VectorIndexType: "hnsw", ShardingConfig: shardingCfg.Config{}}}) + f.On("ReadOnlyVersionedClass", "class3").Return(versioned.Class{Version: 6, Class: &models.Class{Class: "class3", VectorIndexType: "hnsw", ShardingConfig: shardingCfg.Config{}}}) + }, + }, + } + + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + // Configure test setup + handler, fakeSchema := newTestHandler(t, &fakeDB{}) + log, _ := test.NewNullLogger() + classGetter := NewClassGetter( + &handler.parser, + fakeSchema, + fakeSchema, + configRuntime.NewFeatureFlag( + "fake-key", + string(testCase.strategy), + nil, + "", + log, + ), + log, + ) + require.NotNil(t, classGetter) + + // Configure expectation + testCase.schemaExpect(fakeSchema) + + // Get class and ensure we receive all classes as expected + classes, err := classGetter.getClasses(testCase.getFromSchema) + require.NoError(t, err) + require.Equal(t, len(testCase.getFromSchema), len(classes)) + for _, c := range classes { + require.Contains(t, testCase.getFromSchema, c.Class.Class) + } + + // Assert all the mock happened as expected + fakeSchema.AssertExpectations(t) + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/handler.go b/platform/dbops/binaries/weaviate-src/usecases/schema/handler.go new file mode 100644 index 0000000000000000000000000000000000000000..b54b83c8950ff43cc34f1149b448526c1488868c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/handler.go @@ -0,0 +1,302 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "context" + "fmt" + "os" + "strings" + + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + command "github.com/weaviate/weaviate/cluster/proto/api" + clusterSchema "github.com/weaviate/weaviate/cluster/schema" + entcfg "github.com/weaviate/weaviate/entities/config" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/schema" + schemaConfig "github.com/weaviate/weaviate/entities/schema/config" + "github.com/weaviate/weaviate/entities/versioned" + "github.com/weaviate/weaviate/usecases/auth/authorization" + "github.com/weaviate/weaviate/usecases/auth/authorization/filter" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/sharding" +) + +var ( + ErrNotFound = errors.New("not found") + ErrUnexpectedMultiple = errors.New("unexpected multiple results") +) + +// SchemaManager is responsible for consistent schema operations. +// It allows reading and writing the schema while directly talking to the leader, no matter which node it is. +// It also allows cluster related operations that can only be done on the leader (join/remove/stats/etc...) +// For details about each endpoint see [github.com/weaviate/weaviate/cluster.Raft]. +// For local schema lookup where eventual consistency is acceptable, see [SchemaReader]. +type SchemaManager interface { + // Schema writes operation. + AddClass(ctx context.Context, cls *models.Class, ss *sharding.State) (uint64, error) + RestoreClass(ctx context.Context, cls *models.Class, ss *sharding.State) (uint64, error) + UpdateClass(ctx context.Context, cls *models.Class, ss *sharding.State) (uint64, error) + DeleteClass(ctx context.Context, name string) (uint64, error) + AddProperty(ctx context.Context, class string, p ...*models.Property) (uint64, error) + UpdateShardStatus(ctx context.Context, class, shard, status string) (uint64, error) + AddTenants(ctx context.Context, class string, req *command.AddTenantsRequest) (uint64, error) + UpdateTenants(ctx context.Context, class string, req *command.UpdateTenantsRequest) (uint64, error) + DeleteTenants(ctx context.Context, class string, req *command.DeleteTenantsRequest) (uint64, error) + + // Cluster related operations + Join(_ context.Context, nodeID, raftAddr string, voter bool) error + Remove(_ context.Context, nodeID string) error + Stats() map[string]any + StorageCandidates() []string + StoreSchemaV1() error + + // Strongly consistent schema read. These endpoints will emit a query to the leader to ensure that the data is read + // from an up to date schema. + QueryReadOnlyClasses(names ...string) (map[string]versioned.Class, error) + QuerySchema() (models.Schema, error) + QueryTenants(class string, tenants []string) ([]*models.Tenant, uint64, error) + QueryCollectionsCount() (int, error) + QueryShardOwner(class, shard string) (string, uint64, error) + QueryTenantsShards(class string, tenants ...string) (map[string]string, uint64, error) + QueryShardingState(class string) (*sharding.State, uint64, error) + QueryClassVersions(names ...string) (map[string]uint64, error) + + // Aliases + CreateAlias(ctx context.Context, alias string, class *models.Class) (uint64, error) + ReplaceAlias(ctx context.Context, alias *models.Alias, newClass *models.Class) (uint64, error) + DeleteAlias(ctx context.Context, alias string) (uint64, error) + GetAliases(ctx context.Context, alias string, class *models.Class) ([]*models.Alias, error) + GetAlias(ctx context.Context, alias string) (*models.Alias, error) +} + +// SchemaReader allows reading the local schema with or without using a schema version. +type SchemaReader interface { + // WaitForUpdate ensures that the local schema has caught up to version. + WaitForUpdate(ctx context.Context, version uint64) error + + // These schema reads function reads the metadata immediately present in the local schema and can be eventually + // consistent. + // For details about each endpoint see [github.com/weaviate/weaviate/cluster/schema.SchemaReader]. + ClassEqual(name string) string + MultiTenancy(class string) models.MultiTenancyConfig + ClassInfo(class string) (ci clusterSchema.ClassInfo) + ReadOnlyClass(name string) *models.Class + ReadOnlyVersionedClass(name string) versioned.Class + ReadOnlySchema() models.Schema + Aliases() map[string]string + ShardReplicas(class, shard string) ([]string, error) + ShardFromUUID(class string, uuid []byte) string + ShardOwner(class, shard string) (string, error) + Read(class string, reader func(*models.Class, *sharding.State) error) error + Shards(class string) ([]string, error) + LocalShards(class string) ([]string, error) + GetShardsStatus(class, tenant string) (models.ShardStatusList, error) + ResolveAlias(alias string) string + GetAliasesForClass(class string) []*models.Alias + + // These schema reads function (...WithVersion) return the metadata once the local schema has caught up to the + // version parameter. If version is 0 is behaves exactly the same as eventual consistent reads. + // For details about each endpoint see [github.com/weaviate/weaviate/cluster/schema.VersionedSchemaReader]. + ClassInfoWithVersion(ctx context.Context, class string, version uint64) (clusterSchema.ClassInfo, error) + MultiTenancyWithVersion(ctx context.Context, class string, version uint64) (models.MultiTenancyConfig, error) + ReadOnlyClassWithVersion(ctx context.Context, class string, version uint64) (*models.Class, error) + ShardOwnerWithVersion(ctx context.Context, lass, shard string, version uint64) (string, error) + ShardFromUUIDWithVersion(ctx context.Context, class string, uuid []byte, version uint64) (string, error) + ShardReplicasWithVersion(ctx context.Context, class, shard string, version uint64) ([]string, error) + TenantsShardsWithVersion(ctx context.Context, version uint64, class string, tenants ...string) (map[string]string, error) +} + +type validator interface { + ValidateVectorIndexConfigUpdate(old, updated schemaConfig.VectorIndexConfig) error + ValidateInvertedIndexConfigUpdate(old, updated *models.InvertedIndexConfig) error + ValidateVectorIndexConfigsUpdate(old, updated map[string]schemaConfig.VectorIndexConfig) error +} + +// The handler manages API requests for manipulating class schemas. +// This separation of responsibilities helps decouple these tasks +// from the Manager class, which combines many unrelated functions. +// By delegating these clear responsibilities to the handler, it maintains +// a clean separation from the manager, enhancing code modularity and maintainability. +type Handler struct { + schemaManager SchemaManager + schemaReader SchemaReader + + cloud modulecapabilities.OffloadCloud + + validator validator + + logger logrus.FieldLogger + Authorizer authorization.Authorizer + schemaConfig *config.SchemaHandlerConfig + config config.Config + vectorizerValidator VectorizerValidator + moduleConfig ModuleConfig + clusterState clusterState + configParser VectorConfigParser + invertedConfigValidator InvertedConfigValidator + parser Parser + classGetter *ClassGetter + + asyncIndexingEnabled bool +} + +// NewHandler creates a new handler +func NewHandler( + schemaReader SchemaReader, + schemaManager SchemaManager, + validator validator, + logger logrus.FieldLogger, authorizer authorization.Authorizer, schemaConfig *config.SchemaHandlerConfig, + config config.Config, + configParser VectorConfigParser, vectorizerValidator VectorizerValidator, + invertedConfigValidator InvertedConfigValidator, + moduleConfig ModuleConfig, clusterState clusterState, + cloud modulecapabilities.OffloadCloud, + parser Parser, classGetter *ClassGetter, +) (Handler, error) { + handler := Handler{ + config: config, + schemaConfig: schemaConfig, + schemaReader: schemaReader, + schemaManager: schemaManager, + parser: parser, + validator: validator, + logger: logger, + Authorizer: authorizer, + configParser: configParser, + vectorizerValidator: vectorizerValidator, + invertedConfigValidator: invertedConfigValidator, + moduleConfig: moduleConfig, + clusterState: clusterState, + cloud: cloud, + classGetter: classGetter, + + asyncIndexingEnabled: entcfg.Enabled(os.Getenv("ASYNC_INDEXING")), + } + return handler, nil +} + +// GetSchema retrieves a locally cached copy of the schema +func (h *Handler) GetConsistentSchema(ctx context.Context, principal *models.Principal, consistency bool) (schema.Schema, error) { + var fullSchema schema.Schema + if !consistency { + fullSchema = h.getSchema() + } else { + consistentSchema, err := h.schemaManager.QuerySchema() + if err != nil { + return schema.Schema{}, fmt.Errorf("could not read schema with strong consistency: %w", err) + } + fullSchema = schema.Schema{ + Objects: &consistentSchema, + } + } + + filteredClasses := filter.New[*models.Class](h.Authorizer, h.config.Authorization.Rbac).Filter( + ctx, + h.logger, + principal, + fullSchema.Objects.Classes, + authorization.READ, + func(class *models.Class) string { + return authorization.CollectionsMetadata(class.Class)[0] + }, + ) + + return schema.Schema{ + Objects: &models.Schema{ + Classes: filteredClasses, + }, + }, nil +} + +// GetSchemaSkipAuth can never be used as a response to a user request as it +// could leak the schema to an unauthorized user, is intended to be used for +// non-user triggered processes, such as regular updates / maintenance / etc +func (h *Handler) GetSchemaSkipAuth() schema.Schema { return h.getSchema() } + +func (h *Handler) getSchema() schema.Schema { + s := h.schemaReader.ReadOnlySchema() + return schema.Schema{ + Objects: &s, + } +} + +func (h *Handler) Nodes() []string { + return h.clusterState.AllNames() +} + +func (h *Handler) NodeName() string { + return h.clusterState.LocalName() +} + +func (h *Handler) UpdateShardStatus(ctx context.Context, + principal *models.Principal, class, shard, status string, +) (uint64, error) { + err := h.Authorizer.Authorize(ctx, principal, authorization.UPDATE, authorization.ShardsMetadata(class, shard)...) + if err != nil { + return 0, err + } + + return h.schemaManager.UpdateShardStatus(ctx, class, shard, status) +} + +func (h *Handler) ShardsStatus(ctx context.Context, + principal *models.Principal, class, shard string, +) (models.ShardStatusList, error) { + err := h.Authorizer.Authorize(ctx, principal, authorization.READ, authorization.ShardsMetadata(class, shard)...) + if err != nil { + return nil, err + } + + return h.schemaReader.GetShardsStatus(class, shard) +} + +// JoinNode adds the given node to the cluster. +// Node needs to reachable via memberlist/gossip. +// If nodePort is an empty string, nodePort will be the default raft port. +// If the node is not reachable using memberlist, an error is returned +// If joining the node fails, an error is returned. +func (h *Handler) JoinNode(ctx context.Context, node string, nodePort string, voter bool) error { + nodeAddr, ok := h.clusterState.NodeHostname(node) + if !ok { + return fmt.Errorf("could not resolve addr for node id %v", node) + } + nodeAddr = strings.Split(nodeAddr, ":")[0] + + if nodePort == "" { + nodePort = fmt.Sprintf("%d", config.DefaultRaftPort) + } + + if err := h.schemaManager.Join(ctx, node, nodeAddr+":"+nodePort, voter); err != nil { + return fmt.Errorf("node failed to join cluster: %w", err) + } + return nil +} + +// RemoveNode removes the given node from the cluster. +func (h *Handler) RemoveNode(ctx context.Context, node string) error { + if err := h.schemaManager.Remove(ctx, node); err != nil { + return fmt.Errorf("node failed to leave cluster: %w", err) + } + return nil +} + +// Statistics is used to return a map of various internal stats. This should only be used for informative purposes or debugging. +func (h *Handler) Statistics() map[string]any { + return h.schemaManager.Stats() +} + +func (h *Handler) StoreSchemaV1() error { + return h.schemaManager.StoreSchemaV1() +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/handler_test.go b/platform/dbops/binaries/weaviate-src/usecases/schema/handler_test.go new file mode 100644 index 0000000000000000000000000000000000000000..d3959aa80e2b48564a68c65fef86e156713258aa --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/handler_test.go @@ -0,0 +1,403 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/config/runtime" +) + +var schemaTests = []struct { + name string + fn func(*testing.T, *Handler, *fakeSchemaManager) +}{ + {name: "AddObjectClass", fn: testAddObjectClass}, + {name: "AddObjectClassWithExplicitVectorizer", fn: testAddObjectClassExplicitVectorizer}, + {name: "AddObjectClassWithImplicitVectorizer", fn: testAddObjectClassImplicitVectorizer}, + {name: "AddObjectClassWithWrongVectorizer", fn: testAddObjectClassWrongVectorizer}, + {name: "AddObjectClassWithWrongIndexType", fn: testAddObjectClassWrongIndexType}, + {name: "RemoveObjectClass", fn: testRemoveObjectClass}, + {name: "CantAddSameClassTwice", fn: testCantAddSameClassTwice}, + {name: "CantAddSameClassTwiceDifferentKind", fn: testCantAddSameClassTwiceDifferentKinds}, + {name: "AddPropertyDuringCreation", fn: testAddPropertyDuringCreation}, + {name: "AddPropertyWithTargetVectorConfig", fn: testAddPropertyWithTargetVectorConfig}, + {name: "AddInvalidPropertyDuringCreation", fn: testAddInvalidPropertyDuringCreation}, + {name: "AddInvalidPropertyWithEmptyDataTypeDuringCreation", fn: testAddInvalidPropertyWithEmptyDataTypeDuringCreation}, + {name: "DropProperty", fn: testDropProperty}, +} + +func testAddObjectClass(t *testing.T, handler *Handler, fakeSchemaManager *fakeSchemaManager) { + t.Parallel() + + class := &models.Class{ + Class: "Car", + Properties: []*models.Property{{ + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + Name: "dummy", + }}, + Vectorizer: "model1", + VectorIndexConfig: map[string]interface{}{}, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + fakeSchemaManager.On("AddClass", class, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + _, _, err := handler.AddClass(context.Background(), nil, class) + assert.Nil(t, err) +} + +func testAddObjectClassExplicitVectorizer(t *testing.T, handler *Handler, fakeSchemaManager *fakeSchemaManager) { + t.Parallel() + + class := &models.Class{ + Vectorizer: config.VectorizerModuleText2VecContextionary, + VectorIndexType: "hnsw", + Class: "Car", + Properties: []*models.Property{{ + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + Name: "dummy", + }}, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + fakeSchemaManager.On("AddClass", class, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + _, _, err := handler.AddClass(context.Background(), nil, class) + assert.Nil(t, err) +} + +func testAddObjectClassImplicitVectorizer(t *testing.T, handler *Handler, fakeSchemaManager *fakeSchemaManager) { + t.Parallel() + handler.config.DefaultVectorizerModule = config.VectorizerModuleText2VecContextionary + class := &models.Class{ + Class: "Car", + Properties: []*models.Property{{ + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + Name: "dummy", + }}, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + + fakeSchemaManager.On("AddClass", mock.Anything, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + _, _, err := handler.AddClass(context.Background(), nil, class) + assert.Nil(t, err) +} + +func testAddObjectClassWrongVectorizer(t *testing.T, handler *Handler, fakeSchemaManager *fakeSchemaManager) { + t.Parallel() + + class := &models.Class{ + Class: "Car", + Vectorizer: "vectorizer-5000000", + Properties: []*models.Property{{ + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + Name: "dummy", + }}, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + + _, _, err := handler.AddClass(context.Background(), nil, class) + assert.Error(t, err) +} + +func testAddObjectClassWrongIndexType(t *testing.T, handler *Handler, fakeSchemaManager *fakeSchemaManager) { + t.Parallel() + + class := &models.Class{ + Class: "Car", + VectorIndexType: "vector-index-2-million", + Properties: []*models.Property{{ + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + Name: "dummy", + }}, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + _, _, err := handler.AddClass(context.Background(), nil, class) + require.NotNil(t, err) + assert.Equal(t, "unrecognized or unsupported vectorIndexType \"vector-index-2-million\"", err.Error()) +} + +func testRemoveObjectClass(t *testing.T, handler *Handler, fakeSchemaManager *fakeSchemaManager) { + t.Parallel() + + class := &models.Class{ + Class: "Car", + Vectorizer: "text2vec-contextionary", + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + + fakeSchemaManager.On("AddClass", class, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + _, _, err := handler.AddClass(context.Background(), nil, class) + require.Nil(t, err) + + // Now delete the class + fakeSchemaManager.On("DeleteClass", "Car").Return(nil) + err = handler.DeleteClass(context.Background(), nil, "Car") + assert.Nil(t, err) +} + +func testCantAddSameClassTwice(t *testing.T, handler *Handler, fakeSchemaManager *fakeSchemaManager) { + t.Parallel() + + reset := fakeSchemaManager.On("ReadOnlySchema").Return(models.Schema{}) + + class := &models.Class{ + Class: "Car", + Vectorizer: "text2vec-contextionary", + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + fakeSchemaManager.On("AddClass", class, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + _, _, err := handler.AddClass(context.Background(), nil, class) + assert.Nil(t, err) + + // Reset schema to simulate the class has been added + reset.Unset() + class = &models.Class{ + Class: "Car", + Vectorizer: "text2vec-contextionary", + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + fakeSchemaManager.ExpectedCalls = fakeSchemaManager.ExpectedCalls[:0] + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + fakeSchemaManager.On("AddClass", class, mock.Anything).Return(ErrNotFound) + + // Add it again + _, _, err = handler.AddClass(context.Background(), nil, class) + assert.NotNil(t, err) +} + +func testCantAddSameClassTwiceDifferentKinds(t *testing.T, handler *Handler, fakeSchemaManager *fakeSchemaManager) { + t.Parallel() + ctx := context.Background() + class := &models.Class{ + Class: "Car", + Vectorizer: "text2vec-contextionary", + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizeClassName": true, + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + fakeSchemaManager.On("AddClass", class, mock.Anything).Return(nil) + _, _, err := handler.AddClass(ctx, nil, class) + assert.Nil(t, err) + + class.ModuleConfig = map[string]interface{}{ + "my-module1": map[string]interface{}{ + "my-setting": "some-value", + }, + } + + // Add it again, but with a different kind. + fakeSchemaManager.On("AddClass", class, mock.Anything).Return(nil) + _, _, err = handler.AddClass(context.Background(), nil, class) + assert.NotNil(t, err) +} + +func testAddPropertyDuringCreation(t *testing.T, handler *Handler, fakeSchemaManager *fakeSchemaManager) { + t.Parallel() + + vFalse := false + vTrue := true + + properties := []*models.Property{ + { + Name: "color", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + { + Name: "colorRaw1", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + }, + { + Name: "colorRaw2", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + IndexFilterable: &vTrue, + IndexSearchable: &vFalse, + }, + { + Name: "colorRaw3", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + IndexFilterable: &vFalse, + IndexSearchable: &vTrue, + }, + { + Name: "colorRaw4", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + IndexFilterable: &vTrue, + IndexSearchable: &vTrue, + }, + { + Name: "content", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + { + Name: "allDefault", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + } + + class := &models.Class{ + Class: "Car", + Properties: properties, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + fakeSchemaManager.On("AddClass", class, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + _, _, err := handler.AddClass(context.Background(), nil, class) + assert.Nil(t, err) +} + +func testAddPropertyWithTargetVectorConfig(t *testing.T, handler *Handler, fakeSchemaManager *fakeSchemaManager) { + t.Parallel() + + class := &models.Class{ + Class: "Car", + Properties: []*models.Property{ + { + Name: "color", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + ModuleConfig: map[string]interface{}{ + "text2vec-contextionary": map[string]interface{}{ + "vectorizePropertyName": true, + }, + }, + }, + }, + VectorConfig: map[string]models.VectorConfig{ + "vec1": { + Vectorizer: map[string]interface{}{"text2vec-contextionary": map[string]interface{}{}}, + VectorIndexType: "flat", + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + fakeSchemaManager.On("AddClass", class, mock.Anything).Return(nil) + _, _, err := handler.AddClass(context.Background(), nil, class) + require.NoError(t, err) +} + +func testAddInvalidPropertyDuringCreation(t *testing.T, handler *Handler, fakeSchemaManager *fakeSchemaManager) { + t.Parallel() + + properties := []*models.Property{ + {Name: "color", DataType: []string{"blurp"}}, + } + + _, _, err := handler.AddClass(context.Background(), nil, &models.Class{ + Class: "Car", + Properties: properties, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }) + assert.NotNil(t, err) +} + +func testAddInvalidPropertyWithEmptyDataTypeDuringCreation(t *testing.T, handler *Handler, fakeSchemaManager *fakeSchemaManager) { + t.Parallel() + + properties := []*models.Property{ + {Name: "color", DataType: []string{""}}, + } + + _, _, err := handler.AddClass(context.Background(), nil, &models.Class{ + Class: "Car", + Properties: properties, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }) + assert.NotNil(t, err) +} + +func testDropProperty(t *testing.T, handler *Handler, fakeSchemaManager *fakeSchemaManager) { + // TODO: https://github.com/weaviate/weaviate/issues/973 + // Remove skip + + t.Skip() + + t.Parallel() + + fakeSchemaManager.On("ReadOnlySchema").Return(models.Schema{}) + + properties := []*models.Property{ + {Name: "color", DataType: schema.DataTypeText.PropString(), Tokenization: models.PropertyTokenizationWhitespace}, + } + class := &models.Class{ + Class: "Car", + Properties: properties, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + fakeSchemaManager.On("AddClass", class, mock.Anything).Return(nil) + _, _, err := handler.AddClass(context.Background(), nil, class) + assert.Nil(t, err) + + // Now drop the property + handler.DeleteClassProperty(context.Background(), nil, "Car", "color") + // TODO: add the mock necessary to verify that the property is deleted +} + +// This grant parent test setups up the temporary directory needed for the tests. +func TestSchema(t *testing.T) { + t.Run("TestSchema", func(t *testing.T) { + for _, testCase := range schemaTests { + // Run each test independently with their own handler + t.Run(testCase.name, func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + handler.schemaConfig.MaximumAllowedCollectionsCount = runtime.NewDynamicValue(-1) + defer fakeSchemaManager.AssertExpectations(t) + testCase.fn(t, handler, fakeSchemaManager) + }) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/helpers_test.go b/platform/dbops/binaries/weaviate-src/usecases/schema/helpers_test.go new file mode 100644 index 0000000000000000000000000000000000000000..bf73c805fd6d486878f73751512c4091681789ed --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/helpers_test.go @@ -0,0 +1,391 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "context" + "fmt" + "strings" + "testing" + + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" + + command "github.com/weaviate/weaviate/cluster/proto/api" + clusterSchema "github.com/weaviate/weaviate/cluster/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + schemaConfig "github.com/weaviate/weaviate/entities/schema/config" + "github.com/weaviate/weaviate/entities/vectorindex/common" + "github.com/weaviate/weaviate/usecases/auth/authorization" + "github.com/weaviate/weaviate/usecases/auth/authorization/mocks" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/config/runtime" + "github.com/weaviate/weaviate/usecases/fakes" + "github.com/weaviate/weaviate/usecases/sharding" +) + +func newTestHandler(t *testing.T, db clusterSchema.Indexer) (*Handler, *fakeSchemaManager) { + schemaManager := &fakeSchemaManager{} + logger, _ := test.NewNullLogger() + vectorizerValidator := &fakeVectorizerValidator{ + valid: []string{"text2vec-contextionary", "model1", "model2"}, + } + cfg := config.Config{ + DefaultVectorizerModule: config.VectorizerModuleNone, + DefaultVectorDistanceMetric: "cosine", + } + fakeClusterState := fakes.NewFakeClusterState() + fakeValidator := &fakeValidator{} + schemaParser := NewParser(fakeClusterState, dummyParseVectorConfig, fakeValidator, fakeModulesProvider{}, nil) + handler, err := NewHandler( + schemaManager, schemaManager, fakeValidator, logger, mocks.NewMockAuthorizer(), + &cfg.SchemaHandlerConfig, cfg, dummyParseVectorConfig, vectorizerValidator, dummyValidateInvertedConfig, + &fakeModuleConfig{}, fakeClusterState, nil, *schemaParser, nil) + require.NoError(t, err) + handler.schemaConfig.MaximumAllowedCollectionsCount = runtime.NewDynamicValue(-1) + return &handler, schemaManager +} + +func newTestHandlerWithCustomAuthorizer(t *testing.T, db clusterSchema.Indexer, authorizer authorization.Authorizer) (*Handler, *fakeSchemaManager) { + cfg := config.Config{} + metaHandler := &fakeSchemaManager{} + logger, _ := test.NewNullLogger() + vectorizerValidator := &fakeVectorizerValidator{ + valid: []string{ + "model1", "model2", + }, + } + fakeClusterState := fakes.NewFakeClusterState() + fakeValidator := &fakeValidator{} + schemaParser := NewParser(fakeClusterState, dummyParseVectorConfig, fakeValidator, nil, nil) + handler, err := NewHandler( + metaHandler, metaHandler, fakeValidator, logger, authorizer, + &cfg.SchemaHandlerConfig, cfg, dummyParseVectorConfig, vectorizerValidator, dummyValidateInvertedConfig, + &fakeModuleConfig{}, fakeClusterState, nil, *schemaParser, nil) + require.Nil(t, err) + return &handler, metaHandler +} + +type fakeDB struct { + mock.Mock +} + +func (f *fakeDB) Open(context.Context) error { + return nil +} + +func (f *fakeDB) Close(context.Context) error { + return nil +} + +func (f *fakeDB) AddClass(cmd command.AddClassRequest) error { + return nil +} + +func (f *fakeDB) RestoreClassDir(class string) error { + return nil +} + +func (f *fakeDB) AddReplicaToShard(class string, shard string, targetNode string) error { + return nil +} + +func (f *fakeDB) DeleteReplicaFromShard(class string, shard string, targetNode string) error { + return nil +} + +func (f *fakeDB) LoadShard(class string, shard string) { +} + +func (f *fakeDB) DropShard(class string, shard string) { +} + +func (f *fakeDB) ShutdownShard(class string, shard string) { +} + +func (f *fakeDB) UpdateClass(cmd command.UpdateClassRequest) error { + return nil +} + +func (f *fakeDB) UpdateIndex(cmd command.UpdateClassRequest) error { + return nil +} + +func (f *fakeDB) ReloadLocalDB(ctx context.Context, all []command.UpdateClassRequest) error { + return nil +} + +func (f *fakeDB) DeleteClass(class string, hasFrozen bool) error { + return nil +} + +func (f *fakeDB) AddProperty(prop string, cmd command.AddPropertyRequest) error { + return nil +} + +func (f *fakeDB) AddTenants(class string, cmd *command.AddTenantsRequest) error { + return nil +} + +func (f *fakeDB) UpdateTenants(class string, cmd *command.UpdateTenantsRequest) error { + return nil +} + +func (f *fakeDB) UpdateTenantsProcess(class string, req *command.TenantProcessRequest) error { + return nil +} + +func (f *fakeDB) DeleteTenants(class string, tenants []*models.Tenant) error { + return nil +} + +func (f *fakeDB) UpdateShardStatus(cmd *command.UpdateShardStatusRequest) error { + return nil +} + +func (f *fakeDB) GetShardsStatus(class, tenant string) (models.ShardStatusList, error) { + args := f.Called(class, tenant) + return args.Get(0).(models.ShardStatusList), nil +} + +func (f *fakeDB) TriggerSchemaUpdateCallbacks() { + f.Called() +} + +type fakeValidator struct{} + +func (f fakeValidator) ValidateVectorIndexConfigUpdate( + old, updated schemaConfig.VectorIndexConfig, +) error { + return nil +} + +func (f fakeValidator) ValidateInvertedIndexConfigUpdate( + old, updated *models.InvertedIndexConfig, +) error { + return nil +} + +func (fakeValidator) ValidateVectorIndexConfigsUpdate(old, updated map[string]schemaConfig.VectorIndexConfig, +) error { + return nil +} + +type fakeModuleConfig struct{} + +func (f *fakeModuleConfig) SetClassDefaults(class *models.Class) { + defaultConfig := map[string]interface{}{ + "my-module1": map[string]interface{}{ + "my-setting": "default-value", + }, + } + + asMap, ok := class.ModuleConfig.(map[string]interface{}) + if !ok { + class.ModuleConfig = defaultConfig + return + } + + module, ok := asMap["my-module1"] + if !ok { + class.ModuleConfig = defaultConfig + return + } + + asMap, ok = module.(map[string]interface{}) + if !ok { + class.ModuleConfig = defaultConfig + return + } + + if _, ok := asMap["my-setting"]; !ok { + asMap["my-setting"] = "default-value" + defaultConfig["my-module1"] = asMap + class.ModuleConfig = defaultConfig + } +} + +func (f *fakeModuleConfig) SetSinglePropertyDefaults(class *models.Class, + prop ...*models.Property, +) { +} + +func (f *fakeModuleConfig) ValidateClass(ctx context.Context, class *models.Class) error { + return nil +} + +func (f *fakeModuleConfig) GetByName(name string) modulecapabilities.Module { + return nil +} + +func (f *fakeModuleConfig) IsGenerative(moduleName string) bool { + return strings.Contains(moduleName, "generative") +} + +func (f *fakeModuleConfig) IsReranker(moduleName string) bool { + return strings.Contains(moduleName, "reranker") +} + +func (f *fakeModuleConfig) IsMultiVector(moduleName string) bool { + return strings.Contains(moduleName, "colbert") +} + +type fakeVectorizerValidator struct { + valid []string +} + +func (f *fakeVectorizerValidator) ValidateVectorizer(moduleName string) error { + for _, valid := range f.valid { + if moduleName == valid { + return nil + } + } + + return fmt.Errorf("invalid vectorizer %q", moduleName) +} + +type fakeVectorConfig struct { + raw interface{} +} + +func (f fakeVectorConfig) IndexType() string { + return "fake" +} + +func (f fakeVectorConfig) DistanceName() string { + return common.DistanceCosine +} + +func (f fakeVectorConfig) IsMultiVector() bool { + return false +} + +func dummyParseVectorConfig(in interface{}, vectorIndexType string, isMultiVector bool) (schemaConfig.VectorIndexConfig, error) { + return fakeVectorConfig{raw: in}, nil +} + +func dummyValidateInvertedConfig(in *models.InvertedIndexConfig) error { + return nil +} + +type fakeMigrator struct { + mock.Mock +} + +func (f *fakeMigrator) GetShardsQueueSize(ctx context.Context, className, tenant string) (map[string]int64, error) { + return nil, nil +} + +func (f *fakeMigrator) AddClass(ctx context.Context, cls *models.Class) error { + args := f.Called(ctx, cls) + return args.Error(0) +} + +func (f *fakeMigrator) DropClass(ctx context.Context, className string, hasFrozen bool) error { + args := f.Called(ctx, className) + return args.Error(0) +} + +func (f *fakeMigrator) AddProperty(ctx context.Context, className string, prop ...*models.Property) error { + args := f.Called(ctx, className, prop) + return args.Error(0) +} + +func (f *fakeMigrator) LoadShard(ctx context.Context, class string, shard string) error { + args := f.Called(ctx, class, shard) + return args.Error(0) +} + +func (f *fakeMigrator) DropShard(ctx context.Context, class string, shard string) error { + args := f.Called(ctx, class, shard) + return args.Error(0) +} + +func (f *fakeMigrator) ShutdownShard(ctx context.Context, class string, shard string) error { + args := f.Called(ctx, class, shard) + return args.Error(0) +} + +func (f *fakeMigrator) UpdateProperty(ctx context.Context, className string, propName string, newName *string) error { + return nil +} + +func (f *fakeMigrator) NewTenants(ctx context.Context, class *models.Class, creates []*CreateTenantPayload) error { + args := f.Called(ctx, class, creates) + return args.Error(0) +} + +func (f *fakeMigrator) UpdateTenants(ctx context.Context, class *models.Class, updates []*UpdateTenantPayload, implicitUpdate bool) error { + args := f.Called(ctx, class, updates) + return args.Error(0) +} + +func (f *fakeMigrator) DeleteTenants(ctx context.Context, class string, tenants []*models.Tenant) error { + args := f.Called(ctx, class, tenants) + return args.Error(0) +} + +func (f *fakeMigrator) GetShardsStatus(ctx context.Context, className, tenant string) (map[string]string, error) { + args := f.Called(ctx, className, tenant) + return args.Get(0).(map[string]string), args.Error(1) +} + +func (f *fakeMigrator) UpdateShardStatus(ctx context.Context, className, shardName, targetStatus string, schemaVersion uint64) error { + args := f.Called(ctx, className, shardName, targetStatus, schemaVersion) + return args.Error(0) +} + +func (f *fakeMigrator) UpdateVectorIndexConfig(ctx context.Context, className string, updated schemaConfig.VectorIndexConfig) error { + args := f.Called(ctx, className, updated) + return args.Error(0) +} + +func (*fakeMigrator) ValidateVectorIndexConfigsUpdate(old, updated map[string]schemaConfig.VectorIndexConfig, +) error { + return nil +} + +func (*fakeMigrator) UpdateVectorIndexConfigs(ctx context.Context, className string, + updated map[string]schemaConfig.VectorIndexConfig, +) error { + return nil +} + +func (*fakeMigrator) ValidateInvertedIndexConfigUpdate(old, updated *models.InvertedIndexConfig) error { + return nil +} + +func (f *fakeMigrator) UpdateInvertedIndexConfig(ctx context.Context, className string, updated *models.InvertedIndexConfig) error { + args := f.Called(ctx, className, updated) + return args.Error(0) +} + +func (f *fakeMigrator) UpdateReplicationConfig(ctx context.Context, className string, cfg *models.ReplicationConfig) error { + return nil +} + +func (f *fakeMigrator) WaitForStartup(ctx context.Context) error { + args := f.Called(ctx) + return args.Error(0) +} + +func (f *fakeMigrator) Shutdown(ctx context.Context) error { + args := f.Called(ctx) + return args.Error(0) +} + +func (f *fakeMigrator) UpdateIndex(ctx context.Context, class *models.Class, shardingState *sharding.State) error { + args := f.Called(class, shardingState) + return args.Error(0) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/manager.go b/platform/dbops/binaries/weaviate-src/usecases/schema/manager.go new file mode 100644 index 0000000000000000000000000000000000000000..4da44e54521bf5eb7b55908bc54fd54d3e184355 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/manager.go @@ -0,0 +1,449 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "context" + "fmt" + "slices" + "strings" + "sync" + + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/cluster/proto/api" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/schema" + schemaConfig "github.com/weaviate/weaviate/entities/schema/config" + "github.com/weaviate/weaviate/usecases/auth/authorization" + "github.com/weaviate/weaviate/usecases/cluster" + "github.com/weaviate/weaviate/usecases/config" + configRuntime "github.com/weaviate/weaviate/usecases/config/runtime" + "github.com/weaviate/weaviate/usecases/sharding" +) + +// Manager Manages schema changes at a use-case level, i.e. agnostic of +// underlying databases or storage providers +type Manager struct { + validator validator + repo SchemaStore + logger logrus.FieldLogger + Authorizer authorization.Authorizer + clusterState clusterState + + sync.RWMutex + // The handler is responsible for well-defined tasks and should be decoupled from the manager. + // This enables API requests to be directed straight to the handler without the need to pass through the manager. + // For more context, refer to the handler's definition. + Handler + + SchemaReader +} + +type VectorConfigParser func(in interface{}, vectorIndexType string, isMultiVector bool) (schemaConfig.VectorIndexConfig, error) + +type InvertedConfigValidator func(in *models.InvertedIndexConfig) error + +type SchemaGetter interface { + GetSchemaSkipAuth() schema.Schema + ReadOnlyClass(string) *models.Class + ResolveAlias(string) string + GetAliasesForClass(class string) []*models.Alias + Nodes() []string + NodeName() string + ClusterHealthScore() int + ResolveParentNodes(string, string) (map[string]string, error) + Statistics() map[string]any + + ShardOwner(class, shard string) (string, error) + TenantsShards(ctx context.Context, class string, tenants ...string) (map[string]string, error) + OptimisticTenantStatus(ctx context.Context, class string, tenants string) (map[string]string, error) + ShardFromUUID(class string, uuid []byte) string + ShardReplicas(class, shard string) ([]string, error) +} + +type VectorizerValidator interface { + ValidateVectorizer(moduleName string) error +} + +type ModuleConfig interface { + SetClassDefaults(class *models.Class) + SetSinglePropertyDefaults(class *models.Class, props ...*models.Property) + ValidateClass(ctx context.Context, class *models.Class) error + GetByName(name string) modulecapabilities.Module + IsGenerative(string) bool + IsReranker(string) bool + IsMultiVector(string) bool +} + +// State is a cached copy of the schema that can also be saved into a remote +// storage, as specified by Repo +type State struct { + ObjectSchema *models.Schema `json:"object"` + ShardingState map[string]*sharding.State +} + +// NewState returns a new state with room for nClasses classes +func NewState(nClasses int) State { + return State{ + ObjectSchema: &models.Schema{ + Classes: make([]*models.Class, 0, nClasses), + }, + ShardingState: make(map[string]*sharding.State, nClasses), + } +} + +func (s State) EqualEnough(other *State) bool { + // Same number of classes + eqClassLen := len(s.ObjectSchema.Classes) == len(other.ObjectSchema.Classes) + if !eqClassLen { + return false + } + + // Same sharding state length + eqSSLen := len(s.ShardingState) == len(other.ShardingState) + if !eqSSLen { + return false + } + + for cls, ss1ss := range s.ShardingState { + // Same sharding state keys + ss2ss, ok := other.ShardingState[cls] + if !ok { + return false + } + + // Same number of physical shards + eqPhysLen := len(ss1ss.Physical) == len(ss2ss.Physical) + if !eqPhysLen { + return false + } + + for shard, ss1phys := range ss1ss.Physical { + // Same physical shard contents and status + ss2phys, ok := ss2ss.Physical[shard] + if !ok { + return false + } + eqActivStat := ss1phys.ActivityStatus() == ss2phys.ActivityStatus() + if !eqActivStat { + return false + } + } + } + + return true +} + +// SchemaStore is responsible for persisting the schema +// by providing support for both partial and complete schema updates +// Deprecated: instead schema now is persistent via RAFT +// see : usecase/schema/handler.go & cluster/store/store.go +// Load and save are left to support backward compatibility +type SchemaStore interface { + // Save saves the complete schema to the persistent storage + Save(ctx context.Context, schema State) error + + // Load loads the complete schema from the persistent storage + Load(context.Context) (State, error) +} + +// KeyValuePair is used to serialize shards updates +type KeyValuePair struct { + Key string + Value []byte +} + +// ClassPayload is used to serialize class updates +type ClassPayload struct { + Name string + Metadata []byte + ShardingState []byte + Shards []KeyValuePair + ReplaceShards bool + Error error +} + +type clusterState interface { + cluster.NodeSelector + // Hostnames initializes a broadcast + Hostnames() []string + + // AllNames initializes shard distribution across nodes + AllNames() []string + NodeCount() int + + // ClusterHealthScore gets the whole cluster health, the lower number the better + ClusterHealthScore() int + + SchemaSyncIgnored() bool + SkipSchemaRepair() bool +} + +// NewManager creates a new manager +func NewManager(validator validator, + schemaManager SchemaManager, + schemaReader SchemaReader, + repo SchemaStore, + logger logrus.FieldLogger, authorizer authorization.Authorizer, + schemaConfig *config.SchemaHandlerConfig, + config config.Config, + configParser VectorConfigParser, vectorizerValidator VectorizerValidator, + invertedConfigValidator InvertedConfigValidator, + moduleConfig ModuleConfig, clusterState clusterState, + cloud modulecapabilities.OffloadCloud, + parser Parser, + collectionRetrievalStrategyFF *configRuntime.FeatureFlag[string], +) (*Manager, error) { + handler, err := NewHandler( + schemaReader, + schemaManager, + validator, + logger, authorizer, + schemaConfig, + config, configParser, vectorizerValidator, invertedConfigValidator, + moduleConfig, clusterState, cloud, parser, NewClassGetter(&parser, schemaManager, schemaReader, collectionRetrievalStrategyFF, logger), + ) + if err != nil { + return nil, fmt.Errorf("cannot init handler: %w", err) + } + m := &Manager{ + validator: validator, + repo: repo, + logger: logger, + clusterState: clusterState, + Handler: handler, + SchemaReader: schemaReader, + Authorizer: authorizer, + } + + return m, nil +} + +// func (m *Manager) migrateSchemaIfNecessary(ctx context.Context, localSchema *State) error { +// // introduced when Weaviate started supporting multi-shards per class in v1.8 +// if err := m.checkSingleShardMigration(ctx, localSchema); err != nil { +// return errors.Wrap(err, "migrating sharding state from previous version") +// } + +// // introduced when Weaviate started supporting replication in v1.17 +// if err := m.checkShardingStateForReplication(ctx, localSchema); err != nil { +// return errors.Wrap(err, "migrating sharding state from previous version (before replication)") +// } + +// // if other migrations become necessary in the future, you can add them here. +// return nil +// } + +// func (m *Manager) checkSingleShardMigration(ctx context.Context, localSchema *State) error { +// for _, c := range localSchema.ObjectSchema.Classes { +// if _, ok := localSchema.ShardingState[c.Class]; ok { // there is sharding state for this class. Nothing to do +// continue +// } + +// m.logger.WithField("className", c.Class).WithField("action", "initialize_schema"). +// Warningf("No sharding state found for class %q, initializing new state. "+ +// "This is expected behavior if the schema was created with an older Weaviate "+ +// "version, prior to supporting multi-shard indices.", c.Class) + +// // there is no sharding state for this class, let's create the correct +// // config. This class must have been created prior to the sharding feature, +// // so we now that the shardCount==1 - we do not care about any of the other +// // parameters and simply use the defaults for those +// c.ShardingConfig = map[string]interface{}{ +// "desiredCount": 1, +// } +// if err := m.praser.parseShardingConfig(c); err != nil { +// return err +// } + +// if err := replica.ValidateConfig(c, m.config.Replication); err != nil { +// return fmt.Errorf("validate replication config: %w", err) +// } +// shardState, err := sharding.InitState(c.Class, +// c.ShardingConfig.(sharding.Config), +// m.clusterState, c.ReplicationConfig.Factor, +// schema.MultiTenancyEnabled(c)) +// if err != nil { +// return errors.Wrap(err, "init sharding state") +// } + +// if localSchema.ShardingState == nil { +// localSchema.ShardingState = map[string]*sharding.State{} +// } +// localSchema.ShardingState[c.Class] = shardState + +// } + +// return nil +// } + +// func (m *Manager) checkShardingStateForReplication(ctx context.Context, localSchema *State) error { +// for _, classState := range localSchema.ShardingState { +// classState.MigrateFromOldFormat() +// } +// return nil +// } + +// func newSchema() *State { +// return &State{ +// ObjectSchema: &models.Schema{ +// Classes: []*models.Class{}, +// }, +// ShardingState: map[string]*sharding.State{}, +// } +// } + +func (m *Manager) ClusterHealthScore() int { + return m.clusterState.ClusterHealthScore() +} + +// ResolveParentNodes gets all replicas for a specific class shard and resolves their names +// +// it returns map[node_name] node_address where node_address = "" if can't resolve node_name +func (m *Manager) ResolveParentNodes(class, shardName string) (map[string]string, error) { + nodes, err := m.ShardReplicas(class, shardName) + if err != nil { + return nil, fmt.Errorf("get replicas from schema: %w", err) + } + + if len(nodes) == 0 { + return nil, nil + } + + name2Addr := make(map[string]string, len(nodes)) + for _, node := range nodes { + if node != "" { + host, _ := m.clusterState.NodeHostname(node) + name2Addr[node] = host + } + } + return name2Addr, nil +} + +func (m *Manager) TenantsShards(ctx context.Context, class string, tenants ...string) (map[string]string, error) { + slices.Sort(tenants) + tenants = slices.Compact(tenants) + status, _, err := m.schemaManager.QueryTenantsShards(class, tenants...) + if !m.AllowImplicitTenantActivation(class) || err != nil { + return status, err + } + + return m.activateTenantIfInactive(ctx, class, status) +} + +// OptimisticTenantStatus tries to query the local state first. It is +// optimistic that the state has already propagated correctly. If the state is +// unexpected, i.e. either the tenant is not found at all or the status is +// COLD, it will double-check with the leader. +// +// This way we accept false positives (for HOT tenants), but guarantee that there will never be +// false negatives (i.e. tenants labelled as COLD that the leader thinks should +// be HOT). +// +// This means: +// +// - If a tenant is HOT locally (true positive), we proceed normally +// - If a tenant is HOT locally, but should be COLD (false positive), we still +// proceed. This is a conscious decision to keep the happy path free from +// (expensive) leader lookups. +// - If a tenant is not found locally, we assume it was recently created, but +// the state hasn't propagated yet. To verify, we check with the leader. +// - If a tenant is found locally, but is marked as COLD, we assume it was +// recently turned HOT, but the state hasn't propagated yet. To verify, we +// check with the leader +// +// Overall, we keep the (very common) happy path, free from expensive +// leader-lookups and only fall back to the leader if the local result implies +// an unhappy path. +func (m *Manager) OptimisticTenantStatus(ctx context.Context, class string, tenant string) (map[string]string, error) { + var foundTenant bool + var status string + err := m.schemaReader.Read(class, func(_ *models.Class, ss *sharding.State) error { + t, ok := ss.Physical[tenant] + if !ok { + return nil + } + + foundTenant = true + status = t.Status + return nil + }) + if err != nil { + return nil, err + } + + if !foundTenant || status != models.TenantActivityStatusHOT { + // either no state at all or state does not imply happy path -> delegate to + // leader + return m.TenantsShards(ctx, class, tenant) + } + + return map[string]string{ + tenant: status, + }, nil +} + +func (m *Manager) activateTenantIfInactive(ctx context.Context, class string, + status map[string]string, +) (map[string]string, error) { + req := &api.UpdateTenantsRequest{ + Tenants: make([]*api.Tenant, 0, len(status)), + ClusterNodes: m.schemaManager.StorageCandidates(), + ImplicitUpdateRequest: true, + } + for tenant, s := range status { + if s != models.TenantActivityStatusHOT { + req.Tenants = append(req.Tenants, + &api.Tenant{Name: tenant, Status: models.TenantActivityStatusHOT}) + } + } + + if len(req.Tenants) == 0 { + // nothing to do, all tenants are already HOT + return status, nil + } + + _, err := m.schemaManager.UpdateTenants(ctx, class, req) + if err != nil { + names := make([]string, len(req.Tenants)) + for i, t := range req.Tenants { + names[i] = t.Name + } + + return nil, fmt.Errorf("implicit activation of tenants %s: %w", strings.Join(names, ", "), err) + } + + for _, t := range req.Tenants { + status[t.Name] = models.TenantActivityStatusHOT + } + + return status, nil +} + +func (m *Manager) AllowImplicitTenantActivation(class string) bool { + allow := false + m.schemaReader.Read(class, func(c *models.Class, _ *sharding.State) error { + allow = schema.AutoTenantActivationEnabled(c) + return nil + }) + + return allow +} + +func (m *Manager) ShardOwner(class, shard string) (string, error) { + owner, _, err := m.schemaManager.QueryShardOwner(class, shard) + if err != nil { + return "", err + } + return owner, nil +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/migrator.go b/platform/dbops/binaries/weaviate-src/usecases/schema/migrator.go new file mode 100644 index 0000000000000000000000000000000000000000..a65961b401e6afeb4f931110d65e6224e1f27e92 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/migrator.go @@ -0,0 +1,69 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +// Package migrate provides a simple composer tool, which implements the +// Migrator interface and can take in any number of migrators which themselves +// have to implement the interface +package schema + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + schemaConfig "github.com/weaviate/weaviate/entities/schema/config" + "github.com/weaviate/weaviate/usecases/sharding" +) + +type CreateTenantPayload struct { + Name string + Status string +} + +type UpdateTenantPayload struct { + Name string + Status string +} + +// Migrator represents both the input and output interface of the Composer +type Migrator interface { + AddClass(ctx context.Context, class *models.Class) error + DropClass(ctx context.Context, className string, hasFrozen bool) error + // UpdateClass(ctx context.Context, className string,newClassName *string) error + GetShardsQueueSize(ctx context.Context, className, tenant string) (map[string]int64, error) + LoadShard(ctx context.Context, class, shard string) error + DropShard(ctx context.Context, class, shard string) error + ShutdownShard(ctx context.Context, class, shard string) error + + AddProperty(ctx context.Context, className string, + props ...*models.Property) error + UpdateProperty(ctx context.Context, className string, + propName string, newName *string) error + UpdateIndex(ctx context.Context, class *models.Class, shardingState *sharding.State) error + + NewTenants(ctx context.Context, class *models.Class, creates []*CreateTenantPayload) error + UpdateTenants(ctx context.Context, class *models.Class, updates []*UpdateTenantPayload, implicitUpdate bool) error + DeleteTenants(ctx context.Context, class string, tenants []*models.Tenant) error + + GetShardsStatus(ctx context.Context, className, tenant string) (map[string]string, error) + UpdateShardStatus(ctx context.Context, className, shardName, targetStatus string, schemaVersion uint64) error + + UpdateVectorIndexConfig(ctx context.Context, className string, updated schemaConfig.VectorIndexConfig) error + ValidateVectorIndexConfigsUpdate(old, updated map[string]schemaConfig.VectorIndexConfig) error + UpdateVectorIndexConfigs(ctx context.Context, className string, + updated map[string]schemaConfig.VectorIndexConfig) error + ValidateInvertedIndexConfigUpdate(old, updated *models.InvertedIndexConfig) error + UpdateInvertedIndexConfig(ctx context.Context, className string, + updated *models.InvertedIndexConfig) error + UpdateReplicationConfig(ctx context.Context, className string, + updated *models.ReplicationConfig) error + WaitForStartup(context.Context) error + Shutdown(context.Context) error +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/mock_schema_getter.go b/platform/dbops/binaries/weaviate-src/usecases/schema/mock_schema_getter.go new file mode 100644 index 0000000000000000000000000000000000000000..8536a2fde9e3dded937d278bc2668e6cbd1c8f3c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/mock_schema_getter.go @@ -0,0 +1,777 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +// Code generated by mockery v2.53.2. DO NOT EDIT. + +package schema + +import ( + context "context" + + mock "github.com/stretchr/testify/mock" + entitiesschema "github.com/weaviate/weaviate/entities/schema" + + models "github.com/weaviate/weaviate/entities/models" +) + +// MockSchemaGetter is an autogenerated mock type for the SchemaGetter type +type MockSchemaGetter struct { + mock.Mock +} + +type MockSchemaGetter_Expecter struct { + mock *mock.Mock +} + +func (_m *MockSchemaGetter) EXPECT() *MockSchemaGetter_Expecter { + return &MockSchemaGetter_Expecter{mock: &_m.Mock} +} + +// ClusterHealthScore provides a mock function with no fields +func (_m *MockSchemaGetter) ClusterHealthScore() int { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for ClusterHealthScore") + } + + var r0 int + if rf, ok := ret.Get(0).(func() int); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(int) + } + + return r0 +} + +// MockSchemaGetter_ClusterHealthScore_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ClusterHealthScore' +type MockSchemaGetter_ClusterHealthScore_Call struct { + *mock.Call +} + +// ClusterHealthScore is a helper method to define mock.On call +func (_e *MockSchemaGetter_Expecter) ClusterHealthScore() *MockSchemaGetter_ClusterHealthScore_Call { + return &MockSchemaGetter_ClusterHealthScore_Call{Call: _e.mock.On("ClusterHealthScore")} +} + +func (_c *MockSchemaGetter_ClusterHealthScore_Call) Run(run func()) *MockSchemaGetter_ClusterHealthScore_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *MockSchemaGetter_ClusterHealthScore_Call) Return(_a0 int) *MockSchemaGetter_ClusterHealthScore_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockSchemaGetter_ClusterHealthScore_Call) RunAndReturn(run func() int) *MockSchemaGetter_ClusterHealthScore_Call { + _c.Call.Return(run) + return _c +} + +// GetAliasesForClass provides a mock function with given fields: class +func (_m *MockSchemaGetter) GetAliasesForClass(class string) []*models.Alias { + ret := _m.Called(class) + + if len(ret) == 0 { + panic("no return value specified for GetAliasesForClass") + } + + var r0 []*models.Alias + if rf, ok := ret.Get(0).(func(string) []*models.Alias); ok { + r0 = rf(class) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Alias) + } + } + + return r0 +} + +// MockSchemaGetter_GetAliasesForClass_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetAliasesForClass' +type MockSchemaGetter_GetAliasesForClass_Call struct { + *mock.Call +} + +// GetAliasesForClass is a helper method to define mock.On call +// - class string +func (_e *MockSchemaGetter_Expecter) GetAliasesForClass(class interface{}) *MockSchemaGetter_GetAliasesForClass_Call { + return &MockSchemaGetter_GetAliasesForClass_Call{Call: _e.mock.On("GetAliasesForClass", class)} +} + +func (_c *MockSchemaGetter_GetAliasesForClass_Call) Run(run func(class string)) *MockSchemaGetter_GetAliasesForClass_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string)) + }) + return _c +} + +func (_c *MockSchemaGetter_GetAliasesForClass_Call) Return(_a0 []*models.Alias) *MockSchemaGetter_GetAliasesForClass_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockSchemaGetter_GetAliasesForClass_Call) RunAndReturn(run func(string) []*models.Alias) *MockSchemaGetter_GetAliasesForClass_Call { + _c.Call.Return(run) + return _c +} + +// GetSchemaSkipAuth provides a mock function with no fields +func (_m *MockSchemaGetter) GetSchemaSkipAuth() entitiesschema.Schema { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetSchemaSkipAuth") + } + + var r0 entitiesschema.Schema + if rf, ok := ret.Get(0).(func() entitiesschema.Schema); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(entitiesschema.Schema) + } + + return r0 +} + +// MockSchemaGetter_GetSchemaSkipAuth_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetSchemaSkipAuth' +type MockSchemaGetter_GetSchemaSkipAuth_Call struct { + *mock.Call +} + +// GetSchemaSkipAuth is a helper method to define mock.On call +func (_e *MockSchemaGetter_Expecter) GetSchemaSkipAuth() *MockSchemaGetter_GetSchemaSkipAuth_Call { + return &MockSchemaGetter_GetSchemaSkipAuth_Call{Call: _e.mock.On("GetSchemaSkipAuth")} +} + +func (_c *MockSchemaGetter_GetSchemaSkipAuth_Call) Run(run func()) *MockSchemaGetter_GetSchemaSkipAuth_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *MockSchemaGetter_GetSchemaSkipAuth_Call) Return(_a0 entitiesschema.Schema) *MockSchemaGetter_GetSchemaSkipAuth_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockSchemaGetter_GetSchemaSkipAuth_Call) RunAndReturn(run func() entitiesschema.Schema) *MockSchemaGetter_GetSchemaSkipAuth_Call { + _c.Call.Return(run) + return _c +} + +// NodeName provides a mock function with no fields +func (_m *MockSchemaGetter) NodeName() string { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for NodeName") + } + + var r0 string + if rf, ok := ret.Get(0).(func() string); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(string) + } + + return r0 +} + +// MockSchemaGetter_NodeName_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'NodeName' +type MockSchemaGetter_NodeName_Call struct { + *mock.Call +} + +// NodeName is a helper method to define mock.On call +func (_e *MockSchemaGetter_Expecter) NodeName() *MockSchemaGetter_NodeName_Call { + return &MockSchemaGetter_NodeName_Call{Call: _e.mock.On("NodeName")} +} + +func (_c *MockSchemaGetter_NodeName_Call) Run(run func()) *MockSchemaGetter_NodeName_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *MockSchemaGetter_NodeName_Call) Return(_a0 string) *MockSchemaGetter_NodeName_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockSchemaGetter_NodeName_Call) RunAndReturn(run func() string) *MockSchemaGetter_NodeName_Call { + _c.Call.Return(run) + return _c +} + +// Nodes provides a mock function with no fields +func (_m *MockSchemaGetter) Nodes() []string { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Nodes") + } + + var r0 []string + if rf, ok := ret.Get(0).(func() []string); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]string) + } + } + + return r0 +} + +// MockSchemaGetter_Nodes_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Nodes' +type MockSchemaGetter_Nodes_Call struct { + *mock.Call +} + +// Nodes is a helper method to define mock.On call +func (_e *MockSchemaGetter_Expecter) Nodes() *MockSchemaGetter_Nodes_Call { + return &MockSchemaGetter_Nodes_Call{Call: _e.mock.On("Nodes")} +} + +func (_c *MockSchemaGetter_Nodes_Call) Run(run func()) *MockSchemaGetter_Nodes_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *MockSchemaGetter_Nodes_Call) Return(_a0 []string) *MockSchemaGetter_Nodes_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockSchemaGetter_Nodes_Call) RunAndReturn(run func() []string) *MockSchemaGetter_Nodes_Call { + _c.Call.Return(run) + return _c +} + +// OptimisticTenantStatus provides a mock function with given fields: ctx, class, tenants +func (_m *MockSchemaGetter) OptimisticTenantStatus(ctx context.Context, class string, tenants string) (map[string]string, error) { + ret := _m.Called(ctx, class, tenants) + + if len(ret) == 0 { + panic("no return value specified for OptimisticTenantStatus") + } + + var r0 map[string]string + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, string) (map[string]string, error)); ok { + return rf(ctx, class, tenants) + } + if rf, ok := ret.Get(0).(func(context.Context, string, string) map[string]string); ok { + r0 = rf(ctx, class, tenants) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]string) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { + r1 = rf(ctx, class, tenants) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockSchemaGetter_OptimisticTenantStatus_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'OptimisticTenantStatus' +type MockSchemaGetter_OptimisticTenantStatus_Call struct { + *mock.Call +} + +// OptimisticTenantStatus is a helper method to define mock.On call +// - ctx context.Context +// - class string +// - tenants string +func (_e *MockSchemaGetter_Expecter) OptimisticTenantStatus(ctx interface{}, class interface{}, tenants interface{}) *MockSchemaGetter_OptimisticTenantStatus_Call { + return &MockSchemaGetter_OptimisticTenantStatus_Call{Call: _e.mock.On("OptimisticTenantStatus", ctx, class, tenants)} +} + +func (_c *MockSchemaGetter_OptimisticTenantStatus_Call) Run(run func(ctx context.Context, class string, tenants string)) *MockSchemaGetter_OptimisticTenantStatus_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(string)) + }) + return _c +} + +func (_c *MockSchemaGetter_OptimisticTenantStatus_Call) Return(_a0 map[string]string, _a1 error) *MockSchemaGetter_OptimisticTenantStatus_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockSchemaGetter_OptimisticTenantStatus_Call) RunAndReturn(run func(context.Context, string, string) (map[string]string, error)) *MockSchemaGetter_OptimisticTenantStatus_Call { + _c.Call.Return(run) + return _c +} + +// ReadOnlyClass provides a mock function with given fields: _a0 +func (_m *MockSchemaGetter) ReadOnlyClass(_a0 string) *models.Class { + ret := _m.Called(_a0) + + if len(ret) == 0 { + panic("no return value specified for ReadOnlyClass") + } + + var r0 *models.Class + if rf, ok := ret.Get(0).(func(string) *models.Class); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Class) + } + } + + return r0 +} + +// MockSchemaGetter_ReadOnlyClass_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ReadOnlyClass' +type MockSchemaGetter_ReadOnlyClass_Call struct { + *mock.Call +} + +// ReadOnlyClass is a helper method to define mock.On call +// - _a0 string +func (_e *MockSchemaGetter_Expecter) ReadOnlyClass(_a0 interface{}) *MockSchemaGetter_ReadOnlyClass_Call { + return &MockSchemaGetter_ReadOnlyClass_Call{Call: _e.mock.On("ReadOnlyClass", _a0)} +} + +func (_c *MockSchemaGetter_ReadOnlyClass_Call) Run(run func(_a0 string)) *MockSchemaGetter_ReadOnlyClass_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string)) + }) + return _c +} + +func (_c *MockSchemaGetter_ReadOnlyClass_Call) Return(_a0 *models.Class) *MockSchemaGetter_ReadOnlyClass_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockSchemaGetter_ReadOnlyClass_Call) RunAndReturn(run func(string) *models.Class) *MockSchemaGetter_ReadOnlyClass_Call { + _c.Call.Return(run) + return _c +} + +// ResolveAlias provides a mock function with given fields: _a0 +func (_m *MockSchemaGetter) ResolveAlias(_a0 string) string { + ret := _m.Called(_a0) + + if len(ret) == 0 { + panic("no return value specified for ResolveAlias") + } + + var r0 string + if rf, ok := ret.Get(0).(func(string) string); ok { + r0 = rf(_a0) + } else { + r0 = ret.Get(0).(string) + } + + return r0 +} + +// MockSchemaGetter_ResolveAlias_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ResolveAlias' +type MockSchemaGetter_ResolveAlias_Call struct { + *mock.Call +} + +// ResolveAlias is a helper method to define mock.On call +// - _a0 string +func (_e *MockSchemaGetter_Expecter) ResolveAlias(_a0 interface{}) *MockSchemaGetter_ResolveAlias_Call { + return &MockSchemaGetter_ResolveAlias_Call{Call: _e.mock.On("ResolveAlias", _a0)} +} + +func (_c *MockSchemaGetter_ResolveAlias_Call) Run(run func(_a0 string)) *MockSchemaGetter_ResolveAlias_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string)) + }) + return _c +} + +func (_c *MockSchemaGetter_ResolveAlias_Call) Return(_a0 string) *MockSchemaGetter_ResolveAlias_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockSchemaGetter_ResolveAlias_Call) RunAndReturn(run func(string) string) *MockSchemaGetter_ResolveAlias_Call { + _c.Call.Return(run) + return _c +} + +// ResolveParentNodes provides a mock function with given fields: _a0, _a1 +func (_m *MockSchemaGetter) ResolveParentNodes(_a0 string, _a1 string) (map[string]string, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for ResolveParentNodes") + } + + var r0 map[string]string + var r1 error + if rf, ok := ret.Get(0).(func(string, string) (map[string]string, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(string, string) map[string]string); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]string) + } + } + + if rf, ok := ret.Get(1).(func(string, string) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockSchemaGetter_ResolveParentNodes_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ResolveParentNodes' +type MockSchemaGetter_ResolveParentNodes_Call struct { + *mock.Call +} + +// ResolveParentNodes is a helper method to define mock.On call +// - _a0 string +// - _a1 string +func (_e *MockSchemaGetter_Expecter) ResolveParentNodes(_a0 interface{}, _a1 interface{}) *MockSchemaGetter_ResolveParentNodes_Call { + return &MockSchemaGetter_ResolveParentNodes_Call{Call: _e.mock.On("ResolveParentNodes", _a0, _a1)} +} + +func (_c *MockSchemaGetter_ResolveParentNodes_Call) Run(run func(_a0 string, _a1 string)) *MockSchemaGetter_ResolveParentNodes_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(string)) + }) + return _c +} + +func (_c *MockSchemaGetter_ResolveParentNodes_Call) Return(_a0 map[string]string, _a1 error) *MockSchemaGetter_ResolveParentNodes_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockSchemaGetter_ResolveParentNodes_Call) RunAndReturn(run func(string, string) (map[string]string, error)) *MockSchemaGetter_ResolveParentNodes_Call { + _c.Call.Return(run) + return _c +} + +// ShardFromUUID provides a mock function with given fields: class, uuid +func (_m *MockSchemaGetter) ShardFromUUID(class string, uuid []byte) string { + ret := _m.Called(class, uuid) + + if len(ret) == 0 { + panic("no return value specified for ShardFromUUID") + } + + var r0 string + if rf, ok := ret.Get(0).(func(string, []byte) string); ok { + r0 = rf(class, uuid) + } else { + r0 = ret.Get(0).(string) + } + + return r0 +} + +// MockSchemaGetter_ShardFromUUID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ShardFromUUID' +type MockSchemaGetter_ShardFromUUID_Call struct { + *mock.Call +} + +// ShardFromUUID is a helper method to define mock.On call +// - class string +// - uuid []byte +func (_e *MockSchemaGetter_Expecter) ShardFromUUID(class interface{}, uuid interface{}) *MockSchemaGetter_ShardFromUUID_Call { + return &MockSchemaGetter_ShardFromUUID_Call{Call: _e.mock.On("ShardFromUUID", class, uuid)} +} + +func (_c *MockSchemaGetter_ShardFromUUID_Call) Run(run func(class string, uuid []byte)) *MockSchemaGetter_ShardFromUUID_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].([]byte)) + }) + return _c +} + +func (_c *MockSchemaGetter_ShardFromUUID_Call) Return(_a0 string) *MockSchemaGetter_ShardFromUUID_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockSchemaGetter_ShardFromUUID_Call) RunAndReturn(run func(string, []byte) string) *MockSchemaGetter_ShardFromUUID_Call { + _c.Call.Return(run) + return _c +} + +// ShardOwner provides a mock function with given fields: class, shard +func (_m *MockSchemaGetter) ShardOwner(class string, shard string) (string, error) { + ret := _m.Called(class, shard) + + if len(ret) == 0 { + panic("no return value specified for ShardOwner") + } + + var r0 string + var r1 error + if rf, ok := ret.Get(0).(func(string, string) (string, error)); ok { + return rf(class, shard) + } + if rf, ok := ret.Get(0).(func(string, string) string); ok { + r0 = rf(class, shard) + } else { + r0 = ret.Get(0).(string) + } + + if rf, ok := ret.Get(1).(func(string, string) error); ok { + r1 = rf(class, shard) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockSchemaGetter_ShardOwner_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ShardOwner' +type MockSchemaGetter_ShardOwner_Call struct { + *mock.Call +} + +// ShardOwner is a helper method to define mock.On call +// - class string +// - shard string +func (_e *MockSchemaGetter_Expecter) ShardOwner(class interface{}, shard interface{}) *MockSchemaGetter_ShardOwner_Call { + return &MockSchemaGetter_ShardOwner_Call{Call: _e.mock.On("ShardOwner", class, shard)} +} + +func (_c *MockSchemaGetter_ShardOwner_Call) Run(run func(class string, shard string)) *MockSchemaGetter_ShardOwner_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(string)) + }) + return _c +} + +func (_c *MockSchemaGetter_ShardOwner_Call) Return(_a0 string, _a1 error) *MockSchemaGetter_ShardOwner_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockSchemaGetter_ShardOwner_Call) RunAndReturn(run func(string, string) (string, error)) *MockSchemaGetter_ShardOwner_Call { + _c.Call.Return(run) + return _c +} + +// ShardReplicas provides a mock function with given fields: class, shard +func (_m *MockSchemaGetter) ShardReplicas(class string, shard string) ([]string, error) { + ret := _m.Called(class, shard) + + if len(ret) == 0 { + panic("no return value specified for ShardReplicas") + } + + var r0 []string + var r1 error + if rf, ok := ret.Get(0).(func(string, string) ([]string, error)); ok { + return rf(class, shard) + } + if rf, ok := ret.Get(0).(func(string, string) []string); ok { + r0 = rf(class, shard) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]string) + } + } + + if rf, ok := ret.Get(1).(func(string, string) error); ok { + r1 = rf(class, shard) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockSchemaGetter_ShardReplicas_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ShardReplicas' +type MockSchemaGetter_ShardReplicas_Call struct { + *mock.Call +} + +// ShardReplicas is a helper method to define mock.On call +// - class string +// - shard string +func (_e *MockSchemaGetter_Expecter) ShardReplicas(class interface{}, shard interface{}) *MockSchemaGetter_ShardReplicas_Call { + return &MockSchemaGetter_ShardReplicas_Call{Call: _e.mock.On("ShardReplicas", class, shard)} +} + +func (_c *MockSchemaGetter_ShardReplicas_Call) Run(run func(class string, shard string)) *MockSchemaGetter_ShardReplicas_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(string)) + }) + return _c +} + +func (_c *MockSchemaGetter_ShardReplicas_Call) Return(_a0 []string, _a1 error) *MockSchemaGetter_ShardReplicas_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockSchemaGetter_ShardReplicas_Call) RunAndReturn(run func(string, string) ([]string, error)) *MockSchemaGetter_ShardReplicas_Call { + _c.Call.Return(run) + return _c +} + +// Statistics provides a mock function with no fields +func (_m *MockSchemaGetter) Statistics() map[string]interface{} { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Statistics") + } + + var r0 map[string]interface{} + if rf, ok := ret.Get(0).(func() map[string]interface{}); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]interface{}) + } + } + + return r0 +} + +// MockSchemaGetter_Statistics_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Statistics' +type MockSchemaGetter_Statistics_Call struct { + *mock.Call +} + +// Statistics is a helper method to define mock.On call +func (_e *MockSchemaGetter_Expecter) Statistics() *MockSchemaGetter_Statistics_Call { + return &MockSchemaGetter_Statistics_Call{Call: _e.mock.On("Statistics")} +} + +func (_c *MockSchemaGetter_Statistics_Call) Run(run func()) *MockSchemaGetter_Statistics_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *MockSchemaGetter_Statistics_Call) Return(_a0 map[string]interface{}) *MockSchemaGetter_Statistics_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockSchemaGetter_Statistics_Call) RunAndReturn(run func() map[string]interface{}) *MockSchemaGetter_Statistics_Call { + _c.Call.Return(run) + return _c +} + +// TenantsShards provides a mock function with given fields: ctx, class, tenants +func (_m *MockSchemaGetter) TenantsShards(ctx context.Context, class string, tenants ...string) (map[string]string, error) { + _va := make([]interface{}, len(tenants)) + for _i := range tenants { + _va[_i] = tenants[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, class) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for TenantsShards") + } + + var r0 map[string]string + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, ...string) (map[string]string, error)); ok { + return rf(ctx, class, tenants...) + } + if rf, ok := ret.Get(0).(func(context.Context, string, ...string) map[string]string); ok { + r0 = rf(ctx, class, tenants...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]string) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, string, ...string) error); ok { + r1 = rf(ctx, class, tenants...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockSchemaGetter_TenantsShards_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'TenantsShards' +type MockSchemaGetter_TenantsShards_Call struct { + *mock.Call +} + +// TenantsShards is a helper method to define mock.On call +// - ctx context.Context +// - class string +// - tenants ...string +func (_e *MockSchemaGetter_Expecter) TenantsShards(ctx interface{}, class interface{}, tenants ...interface{}) *MockSchemaGetter_TenantsShards_Call { + return &MockSchemaGetter_TenantsShards_Call{Call: _e.mock.On("TenantsShards", + append([]interface{}{ctx, class}, tenants...)...)} +} + +func (_c *MockSchemaGetter_TenantsShards_Call) Run(run func(ctx context.Context, class string, tenants ...string)) *MockSchemaGetter_TenantsShards_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]string, len(args)-2) + for i, a := range args[2:] { + if a != nil { + variadicArgs[i] = a.(string) + } + } + run(args[0].(context.Context), args[1].(string), variadicArgs...) + }) + return _c +} + +func (_c *MockSchemaGetter_TenantsShards_Call) Return(_a0 map[string]string, _a1 error) *MockSchemaGetter_TenantsShards_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockSchemaGetter_TenantsShards_Call) RunAndReturn(run func(context.Context, string, ...string) (map[string]string, error)) *MockSchemaGetter_TenantsShards_Call { + _c.Call.Return(run) + return _c +} + +// NewMockSchemaGetter creates a new instance of MockSchemaGetter. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewMockSchemaGetter(t interface { + mock.TestingT + Cleanup(func()) +}) *MockSchemaGetter { + mock := &MockSchemaGetter{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/mock_schema_reader.go b/platform/dbops/binaries/weaviate-src/usecases/schema/mock_schema_reader.go new file mode 100644 index 0000000000000000000000000000000000000000..3be151bc12b47951dda70ca270ee5400ecec124a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/mock_schema_reader.go @@ -0,0 +1,1335 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +// Code generated by mockery v2.53.2. DO NOT EDIT. + +package schema + +import ( + context "context" + + clusterschema "github.com/weaviate/weaviate/cluster/schema" + + mock "github.com/stretchr/testify/mock" + + models "github.com/weaviate/weaviate/entities/models" + + sharding "github.com/weaviate/weaviate/usecases/sharding" + + versioned "github.com/weaviate/weaviate/entities/versioned" +) + +// MockSchemaReader is an autogenerated mock type for the SchemaReader type +type MockSchemaReader struct { + mock.Mock +} + +type MockSchemaReader_Expecter struct { + mock *mock.Mock +} + +func (_m *MockSchemaReader) EXPECT() *MockSchemaReader_Expecter { + return &MockSchemaReader_Expecter{mock: &_m.Mock} +} + +// Aliases provides a mock function with no fields +func (_m *MockSchemaReader) Aliases() map[string]string { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Aliases") + } + + var r0 map[string]string + if rf, ok := ret.Get(0).(func() map[string]string); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]string) + } + } + + return r0 +} + +// MockSchemaReader_Aliases_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Aliases' +type MockSchemaReader_Aliases_Call struct { + *mock.Call +} + +// Aliases is a helper method to define mock.On call +func (_e *MockSchemaReader_Expecter) Aliases() *MockSchemaReader_Aliases_Call { + return &MockSchemaReader_Aliases_Call{Call: _e.mock.On("Aliases")} +} + +func (_c *MockSchemaReader_Aliases_Call) Run(run func()) *MockSchemaReader_Aliases_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *MockSchemaReader_Aliases_Call) Return(_a0 map[string]string) *MockSchemaReader_Aliases_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockSchemaReader_Aliases_Call) RunAndReturn(run func() map[string]string) *MockSchemaReader_Aliases_Call { + _c.Call.Return(run) + return _c +} + +// ClassEqual provides a mock function with given fields: name +func (_m *MockSchemaReader) ClassEqual(name string) string { + ret := _m.Called(name) + + if len(ret) == 0 { + panic("no return value specified for ClassEqual") + } + + var r0 string + if rf, ok := ret.Get(0).(func(string) string); ok { + r0 = rf(name) + } else { + r0 = ret.Get(0).(string) + } + + return r0 +} + +// MockSchemaReader_ClassEqual_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ClassEqual' +type MockSchemaReader_ClassEqual_Call struct { + *mock.Call +} + +// ClassEqual is a helper method to define mock.On call +// - name string +func (_e *MockSchemaReader_Expecter) ClassEqual(name interface{}) *MockSchemaReader_ClassEqual_Call { + return &MockSchemaReader_ClassEqual_Call{Call: _e.mock.On("ClassEqual", name)} +} + +func (_c *MockSchemaReader_ClassEqual_Call) Run(run func(name string)) *MockSchemaReader_ClassEqual_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string)) + }) + return _c +} + +func (_c *MockSchemaReader_ClassEqual_Call) Return(_a0 string) *MockSchemaReader_ClassEqual_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockSchemaReader_ClassEqual_Call) RunAndReturn(run func(string) string) *MockSchemaReader_ClassEqual_Call { + _c.Call.Return(run) + return _c +} + +// ClassInfo provides a mock function with given fields: class +func (_m *MockSchemaReader) ClassInfo(class string) clusterschema.ClassInfo { + ret := _m.Called(class) + + if len(ret) == 0 { + panic("no return value specified for ClassInfo") + } + + var r0 clusterschema.ClassInfo + if rf, ok := ret.Get(0).(func(string) clusterschema.ClassInfo); ok { + r0 = rf(class) + } else { + r0 = ret.Get(0).(clusterschema.ClassInfo) + } + + return r0 +} + +// MockSchemaReader_ClassInfo_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ClassInfo' +type MockSchemaReader_ClassInfo_Call struct { + *mock.Call +} + +// ClassInfo is a helper method to define mock.On call +// - class string +func (_e *MockSchemaReader_Expecter) ClassInfo(class interface{}) *MockSchemaReader_ClassInfo_Call { + return &MockSchemaReader_ClassInfo_Call{Call: _e.mock.On("ClassInfo", class)} +} + +func (_c *MockSchemaReader_ClassInfo_Call) Run(run func(class string)) *MockSchemaReader_ClassInfo_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string)) + }) + return _c +} + +func (_c *MockSchemaReader_ClassInfo_Call) Return(ci clusterschema.ClassInfo) *MockSchemaReader_ClassInfo_Call { + _c.Call.Return(ci) + return _c +} + +func (_c *MockSchemaReader_ClassInfo_Call) RunAndReturn(run func(string) clusterschema.ClassInfo) *MockSchemaReader_ClassInfo_Call { + _c.Call.Return(run) + return _c +} + +// ClassInfoWithVersion provides a mock function with given fields: ctx, class, version +func (_m *MockSchemaReader) ClassInfoWithVersion(ctx context.Context, class string, version uint64) (clusterschema.ClassInfo, error) { + ret := _m.Called(ctx, class, version) + + if len(ret) == 0 { + panic("no return value specified for ClassInfoWithVersion") + } + + var r0 clusterschema.ClassInfo + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, uint64) (clusterschema.ClassInfo, error)); ok { + return rf(ctx, class, version) + } + if rf, ok := ret.Get(0).(func(context.Context, string, uint64) clusterschema.ClassInfo); ok { + r0 = rf(ctx, class, version) + } else { + r0 = ret.Get(0).(clusterschema.ClassInfo) + } + + if rf, ok := ret.Get(1).(func(context.Context, string, uint64) error); ok { + r1 = rf(ctx, class, version) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockSchemaReader_ClassInfoWithVersion_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ClassInfoWithVersion' +type MockSchemaReader_ClassInfoWithVersion_Call struct { + *mock.Call +} + +// ClassInfoWithVersion is a helper method to define mock.On call +// - ctx context.Context +// - class string +// - version uint64 +func (_e *MockSchemaReader_Expecter) ClassInfoWithVersion(ctx interface{}, class interface{}, version interface{}) *MockSchemaReader_ClassInfoWithVersion_Call { + return &MockSchemaReader_ClassInfoWithVersion_Call{Call: _e.mock.On("ClassInfoWithVersion", ctx, class, version)} +} + +func (_c *MockSchemaReader_ClassInfoWithVersion_Call) Run(run func(ctx context.Context, class string, version uint64)) *MockSchemaReader_ClassInfoWithVersion_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(uint64)) + }) + return _c +} + +func (_c *MockSchemaReader_ClassInfoWithVersion_Call) Return(_a0 clusterschema.ClassInfo, _a1 error) *MockSchemaReader_ClassInfoWithVersion_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockSchemaReader_ClassInfoWithVersion_Call) RunAndReturn(run func(context.Context, string, uint64) (clusterschema.ClassInfo, error)) *MockSchemaReader_ClassInfoWithVersion_Call { + _c.Call.Return(run) + return _c +} + +// GetAliasesForClass provides a mock function with given fields: class +func (_m *MockSchemaReader) GetAliasesForClass(class string) []*models.Alias { + ret := _m.Called(class) + + if len(ret) == 0 { + panic("no return value specified for GetAliasesForClass") + } + + var r0 []*models.Alias + if rf, ok := ret.Get(0).(func(string) []*models.Alias); ok { + r0 = rf(class) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Alias) + } + } + + return r0 +} + +// MockSchemaReader_GetAliasesForClass_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetAliasesForClass' +type MockSchemaReader_GetAliasesForClass_Call struct { + *mock.Call +} + +// GetAliasesForClass is a helper method to define mock.On call +// - class string +func (_e *MockSchemaReader_Expecter) GetAliasesForClass(class interface{}) *MockSchemaReader_GetAliasesForClass_Call { + return &MockSchemaReader_GetAliasesForClass_Call{Call: _e.mock.On("GetAliasesForClass", class)} +} + +func (_c *MockSchemaReader_GetAliasesForClass_Call) Run(run func(class string)) *MockSchemaReader_GetAliasesForClass_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string)) + }) + return _c +} + +func (_c *MockSchemaReader_GetAliasesForClass_Call) Return(_a0 []*models.Alias) *MockSchemaReader_GetAliasesForClass_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockSchemaReader_GetAliasesForClass_Call) RunAndReturn(run func(string) []*models.Alias) *MockSchemaReader_GetAliasesForClass_Call { + _c.Call.Return(run) + return _c +} + +// GetShardsStatus provides a mock function with given fields: class, tenant +func (_m *MockSchemaReader) GetShardsStatus(class string, tenant string) (models.ShardStatusList, error) { + ret := _m.Called(class, tenant) + + if len(ret) == 0 { + panic("no return value specified for GetShardsStatus") + } + + var r0 models.ShardStatusList + var r1 error + if rf, ok := ret.Get(0).(func(string, string) (models.ShardStatusList, error)); ok { + return rf(class, tenant) + } + if rf, ok := ret.Get(0).(func(string, string) models.ShardStatusList); ok { + r0 = rf(class, tenant) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(models.ShardStatusList) + } + } + + if rf, ok := ret.Get(1).(func(string, string) error); ok { + r1 = rf(class, tenant) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockSchemaReader_GetShardsStatus_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetShardsStatus' +type MockSchemaReader_GetShardsStatus_Call struct { + *mock.Call +} + +// GetShardsStatus is a helper method to define mock.On call +// - class string +// - tenant string +func (_e *MockSchemaReader_Expecter) GetShardsStatus(class interface{}, tenant interface{}) *MockSchemaReader_GetShardsStatus_Call { + return &MockSchemaReader_GetShardsStatus_Call{Call: _e.mock.On("GetShardsStatus", class, tenant)} +} + +func (_c *MockSchemaReader_GetShardsStatus_Call) Run(run func(class string, tenant string)) *MockSchemaReader_GetShardsStatus_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(string)) + }) + return _c +} + +func (_c *MockSchemaReader_GetShardsStatus_Call) Return(_a0 models.ShardStatusList, _a1 error) *MockSchemaReader_GetShardsStatus_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockSchemaReader_GetShardsStatus_Call) RunAndReturn(run func(string, string) (models.ShardStatusList, error)) *MockSchemaReader_GetShardsStatus_Call { + _c.Call.Return(run) + return _c +} + +// LocalShards provides a mock function with given fields: class +func (_m *MockSchemaReader) LocalShards(class string) ([]string, error) { + ret := _m.Called(class) + + if len(ret) == 0 { + panic("no return value specified for LocalShards") + } + + var r0 []string + var r1 error + if rf, ok := ret.Get(0).(func(string) ([]string, error)); ok { + return rf(class) + } + if rf, ok := ret.Get(0).(func(string) []string); ok { + r0 = rf(class) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]string) + } + } + + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(class) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockSchemaReader_LocalShards_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'LocalShards' +type MockSchemaReader_LocalShards_Call struct { + *mock.Call +} + +// LocalShards is a helper method to define mock.On call +// - class string +func (_e *MockSchemaReader_Expecter) LocalShards(class interface{}) *MockSchemaReader_LocalShards_Call { + return &MockSchemaReader_LocalShards_Call{Call: _e.mock.On("LocalShards", class)} +} + +func (_c *MockSchemaReader_LocalShards_Call) Run(run func(class string)) *MockSchemaReader_LocalShards_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string)) + }) + return _c +} + +func (_c *MockSchemaReader_LocalShards_Call) Return(_a0 []string, _a1 error) *MockSchemaReader_LocalShards_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockSchemaReader_LocalShards_Call) RunAndReturn(run func(string) ([]string, error)) *MockSchemaReader_LocalShards_Call { + _c.Call.Return(run) + return _c +} + +// MultiTenancy provides a mock function with given fields: class +func (_m *MockSchemaReader) MultiTenancy(class string) models.MultiTenancyConfig { + ret := _m.Called(class) + + if len(ret) == 0 { + panic("no return value specified for MultiTenancy") + } + + var r0 models.MultiTenancyConfig + if rf, ok := ret.Get(0).(func(string) models.MultiTenancyConfig); ok { + r0 = rf(class) + } else { + r0 = ret.Get(0).(models.MultiTenancyConfig) + } + + return r0 +} + +// MockSchemaReader_MultiTenancy_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'MultiTenancy' +type MockSchemaReader_MultiTenancy_Call struct { + *mock.Call +} + +// MultiTenancy is a helper method to define mock.On call +// - class string +func (_e *MockSchemaReader_Expecter) MultiTenancy(class interface{}) *MockSchemaReader_MultiTenancy_Call { + return &MockSchemaReader_MultiTenancy_Call{Call: _e.mock.On("MultiTenancy", class)} +} + +func (_c *MockSchemaReader_MultiTenancy_Call) Run(run func(class string)) *MockSchemaReader_MultiTenancy_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string)) + }) + return _c +} + +func (_c *MockSchemaReader_MultiTenancy_Call) Return(_a0 models.MultiTenancyConfig) *MockSchemaReader_MultiTenancy_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockSchemaReader_MultiTenancy_Call) RunAndReturn(run func(string) models.MultiTenancyConfig) *MockSchemaReader_MultiTenancy_Call { + _c.Call.Return(run) + return _c +} + +// MultiTenancyWithVersion provides a mock function with given fields: ctx, class, version +func (_m *MockSchemaReader) MultiTenancyWithVersion(ctx context.Context, class string, version uint64) (models.MultiTenancyConfig, error) { + ret := _m.Called(ctx, class, version) + + if len(ret) == 0 { + panic("no return value specified for MultiTenancyWithVersion") + } + + var r0 models.MultiTenancyConfig + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, uint64) (models.MultiTenancyConfig, error)); ok { + return rf(ctx, class, version) + } + if rf, ok := ret.Get(0).(func(context.Context, string, uint64) models.MultiTenancyConfig); ok { + r0 = rf(ctx, class, version) + } else { + r0 = ret.Get(0).(models.MultiTenancyConfig) + } + + if rf, ok := ret.Get(1).(func(context.Context, string, uint64) error); ok { + r1 = rf(ctx, class, version) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockSchemaReader_MultiTenancyWithVersion_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'MultiTenancyWithVersion' +type MockSchemaReader_MultiTenancyWithVersion_Call struct { + *mock.Call +} + +// MultiTenancyWithVersion is a helper method to define mock.On call +// - ctx context.Context +// - class string +// - version uint64 +func (_e *MockSchemaReader_Expecter) MultiTenancyWithVersion(ctx interface{}, class interface{}, version interface{}) *MockSchemaReader_MultiTenancyWithVersion_Call { + return &MockSchemaReader_MultiTenancyWithVersion_Call{Call: _e.mock.On("MultiTenancyWithVersion", ctx, class, version)} +} + +func (_c *MockSchemaReader_MultiTenancyWithVersion_Call) Run(run func(ctx context.Context, class string, version uint64)) *MockSchemaReader_MultiTenancyWithVersion_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(uint64)) + }) + return _c +} + +func (_c *MockSchemaReader_MultiTenancyWithVersion_Call) Return(_a0 models.MultiTenancyConfig, _a1 error) *MockSchemaReader_MultiTenancyWithVersion_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockSchemaReader_MultiTenancyWithVersion_Call) RunAndReturn(run func(context.Context, string, uint64) (models.MultiTenancyConfig, error)) *MockSchemaReader_MultiTenancyWithVersion_Call { + _c.Call.Return(run) + return _c +} + +// Read provides a mock function with given fields: class, reader +func (_m *MockSchemaReader) Read(class string, reader func(*models.Class, *sharding.State) error) error { + ret := _m.Called(class, reader) + + if len(ret) == 0 { + panic("no return value specified for Read") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string, func(*models.Class, *sharding.State) error) error); ok { + r0 = rf(class, reader) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// MockSchemaReader_Read_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Read' +type MockSchemaReader_Read_Call struct { + *mock.Call +} + +// Read is a helper method to define mock.On call +// - class string +// - reader func(*models.Class , *sharding.State) error +func (_e *MockSchemaReader_Expecter) Read(class interface{}, reader interface{}) *MockSchemaReader_Read_Call { + return &MockSchemaReader_Read_Call{Call: _e.mock.On("Read", class, reader)} +} + +func (_c *MockSchemaReader_Read_Call) Run(run func(class string, reader func(*models.Class, *sharding.State) error)) *MockSchemaReader_Read_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(func(*models.Class, *sharding.State) error)) + }) + return _c +} + +func (_c *MockSchemaReader_Read_Call) Return(_a0 error) *MockSchemaReader_Read_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockSchemaReader_Read_Call) RunAndReturn(run func(string, func(*models.Class, *sharding.State) error) error) *MockSchemaReader_Read_Call { + _c.Call.Return(run) + return _c +} + +// ReadOnlyClass provides a mock function with given fields: name +func (_m *MockSchemaReader) ReadOnlyClass(name string) *models.Class { + ret := _m.Called(name) + + if len(ret) == 0 { + panic("no return value specified for ReadOnlyClass") + } + + var r0 *models.Class + if rf, ok := ret.Get(0).(func(string) *models.Class); ok { + r0 = rf(name) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Class) + } + } + + return r0 +} + +// MockSchemaReader_ReadOnlyClass_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ReadOnlyClass' +type MockSchemaReader_ReadOnlyClass_Call struct { + *mock.Call +} + +// ReadOnlyClass is a helper method to define mock.On call +// - name string +func (_e *MockSchemaReader_Expecter) ReadOnlyClass(name interface{}) *MockSchemaReader_ReadOnlyClass_Call { + return &MockSchemaReader_ReadOnlyClass_Call{Call: _e.mock.On("ReadOnlyClass", name)} +} + +func (_c *MockSchemaReader_ReadOnlyClass_Call) Run(run func(name string)) *MockSchemaReader_ReadOnlyClass_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string)) + }) + return _c +} + +func (_c *MockSchemaReader_ReadOnlyClass_Call) Return(_a0 *models.Class) *MockSchemaReader_ReadOnlyClass_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockSchemaReader_ReadOnlyClass_Call) RunAndReturn(run func(string) *models.Class) *MockSchemaReader_ReadOnlyClass_Call { + _c.Call.Return(run) + return _c +} + +// ReadOnlyClassWithVersion provides a mock function with given fields: ctx, class, version +func (_m *MockSchemaReader) ReadOnlyClassWithVersion(ctx context.Context, class string, version uint64) (*models.Class, error) { + ret := _m.Called(ctx, class, version) + + if len(ret) == 0 { + panic("no return value specified for ReadOnlyClassWithVersion") + } + + var r0 *models.Class + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, uint64) (*models.Class, error)); ok { + return rf(ctx, class, version) + } + if rf, ok := ret.Get(0).(func(context.Context, string, uint64) *models.Class); ok { + r0 = rf(ctx, class, version) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Class) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, string, uint64) error); ok { + r1 = rf(ctx, class, version) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockSchemaReader_ReadOnlyClassWithVersion_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ReadOnlyClassWithVersion' +type MockSchemaReader_ReadOnlyClassWithVersion_Call struct { + *mock.Call +} + +// ReadOnlyClassWithVersion is a helper method to define mock.On call +// - ctx context.Context +// - class string +// - version uint64 +func (_e *MockSchemaReader_Expecter) ReadOnlyClassWithVersion(ctx interface{}, class interface{}, version interface{}) *MockSchemaReader_ReadOnlyClassWithVersion_Call { + return &MockSchemaReader_ReadOnlyClassWithVersion_Call{Call: _e.mock.On("ReadOnlyClassWithVersion", ctx, class, version)} +} + +func (_c *MockSchemaReader_ReadOnlyClassWithVersion_Call) Run(run func(ctx context.Context, class string, version uint64)) *MockSchemaReader_ReadOnlyClassWithVersion_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(uint64)) + }) + return _c +} + +func (_c *MockSchemaReader_ReadOnlyClassWithVersion_Call) Return(_a0 *models.Class, _a1 error) *MockSchemaReader_ReadOnlyClassWithVersion_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockSchemaReader_ReadOnlyClassWithVersion_Call) RunAndReturn(run func(context.Context, string, uint64) (*models.Class, error)) *MockSchemaReader_ReadOnlyClassWithVersion_Call { + _c.Call.Return(run) + return _c +} + +// ReadOnlySchema provides a mock function with no fields +func (_m *MockSchemaReader) ReadOnlySchema() models.Schema { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for ReadOnlySchema") + } + + var r0 models.Schema + if rf, ok := ret.Get(0).(func() models.Schema); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(models.Schema) + } + + return r0 +} + +// MockSchemaReader_ReadOnlySchema_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ReadOnlySchema' +type MockSchemaReader_ReadOnlySchema_Call struct { + *mock.Call +} + +// ReadOnlySchema is a helper method to define mock.On call +func (_e *MockSchemaReader_Expecter) ReadOnlySchema() *MockSchemaReader_ReadOnlySchema_Call { + return &MockSchemaReader_ReadOnlySchema_Call{Call: _e.mock.On("ReadOnlySchema")} +} + +func (_c *MockSchemaReader_ReadOnlySchema_Call) Run(run func()) *MockSchemaReader_ReadOnlySchema_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *MockSchemaReader_ReadOnlySchema_Call) Return(_a0 models.Schema) *MockSchemaReader_ReadOnlySchema_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockSchemaReader_ReadOnlySchema_Call) RunAndReturn(run func() models.Schema) *MockSchemaReader_ReadOnlySchema_Call { + _c.Call.Return(run) + return _c +} + +// ReadOnlyVersionedClass provides a mock function with given fields: name +func (_m *MockSchemaReader) ReadOnlyVersionedClass(name string) versioned.Class { + ret := _m.Called(name) + + if len(ret) == 0 { + panic("no return value specified for ReadOnlyVersionedClass") + } + + var r0 versioned.Class + if rf, ok := ret.Get(0).(func(string) versioned.Class); ok { + r0 = rf(name) + } else { + r0 = ret.Get(0).(versioned.Class) + } + + return r0 +} + +// MockSchemaReader_ReadOnlyVersionedClass_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ReadOnlyVersionedClass' +type MockSchemaReader_ReadOnlyVersionedClass_Call struct { + *mock.Call +} + +// ReadOnlyVersionedClass is a helper method to define mock.On call +// - name string +func (_e *MockSchemaReader_Expecter) ReadOnlyVersionedClass(name interface{}) *MockSchemaReader_ReadOnlyVersionedClass_Call { + return &MockSchemaReader_ReadOnlyVersionedClass_Call{Call: _e.mock.On("ReadOnlyVersionedClass", name)} +} + +func (_c *MockSchemaReader_ReadOnlyVersionedClass_Call) Run(run func(name string)) *MockSchemaReader_ReadOnlyVersionedClass_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string)) + }) + return _c +} + +func (_c *MockSchemaReader_ReadOnlyVersionedClass_Call) Return(_a0 versioned.Class) *MockSchemaReader_ReadOnlyVersionedClass_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockSchemaReader_ReadOnlyVersionedClass_Call) RunAndReturn(run func(string) versioned.Class) *MockSchemaReader_ReadOnlyVersionedClass_Call { + _c.Call.Return(run) + return _c +} + +// ResolveAlias provides a mock function with given fields: alias +func (_m *MockSchemaReader) ResolveAlias(alias string) string { + ret := _m.Called(alias) + + if len(ret) == 0 { + panic("no return value specified for ResolveAlias") + } + + var r0 string + if rf, ok := ret.Get(0).(func(string) string); ok { + r0 = rf(alias) + } else { + r0 = ret.Get(0).(string) + } + + return r0 +} + +// MockSchemaReader_ResolveAlias_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ResolveAlias' +type MockSchemaReader_ResolveAlias_Call struct { + *mock.Call +} + +// ResolveAlias is a helper method to define mock.On call +// - alias string +func (_e *MockSchemaReader_Expecter) ResolveAlias(alias interface{}) *MockSchemaReader_ResolveAlias_Call { + return &MockSchemaReader_ResolveAlias_Call{Call: _e.mock.On("ResolveAlias", alias)} +} + +func (_c *MockSchemaReader_ResolveAlias_Call) Run(run func(alias string)) *MockSchemaReader_ResolveAlias_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string)) + }) + return _c +} + +func (_c *MockSchemaReader_ResolveAlias_Call) Return(_a0 string) *MockSchemaReader_ResolveAlias_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockSchemaReader_ResolveAlias_Call) RunAndReturn(run func(string) string) *MockSchemaReader_ResolveAlias_Call { + _c.Call.Return(run) + return _c +} + +// ShardFromUUID provides a mock function with given fields: class, uuid +func (_m *MockSchemaReader) ShardFromUUID(class string, uuid []byte) string { + ret := _m.Called(class, uuid) + + if len(ret) == 0 { + panic("no return value specified for ShardFromUUID") + } + + var r0 string + if rf, ok := ret.Get(0).(func(string, []byte) string); ok { + r0 = rf(class, uuid) + } else { + r0 = ret.Get(0).(string) + } + + return r0 +} + +// MockSchemaReader_ShardFromUUID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ShardFromUUID' +type MockSchemaReader_ShardFromUUID_Call struct { + *mock.Call +} + +// ShardFromUUID is a helper method to define mock.On call +// - class string +// - uuid []byte +func (_e *MockSchemaReader_Expecter) ShardFromUUID(class interface{}, uuid interface{}) *MockSchemaReader_ShardFromUUID_Call { + return &MockSchemaReader_ShardFromUUID_Call{Call: _e.mock.On("ShardFromUUID", class, uuid)} +} + +func (_c *MockSchemaReader_ShardFromUUID_Call) Run(run func(class string, uuid []byte)) *MockSchemaReader_ShardFromUUID_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].([]byte)) + }) + return _c +} + +func (_c *MockSchemaReader_ShardFromUUID_Call) Return(_a0 string) *MockSchemaReader_ShardFromUUID_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockSchemaReader_ShardFromUUID_Call) RunAndReturn(run func(string, []byte) string) *MockSchemaReader_ShardFromUUID_Call { + _c.Call.Return(run) + return _c +} + +// ShardFromUUIDWithVersion provides a mock function with given fields: ctx, class, uuid, version +func (_m *MockSchemaReader) ShardFromUUIDWithVersion(ctx context.Context, class string, uuid []byte, version uint64) (string, error) { + ret := _m.Called(ctx, class, uuid, version) + + if len(ret) == 0 { + panic("no return value specified for ShardFromUUIDWithVersion") + } + + var r0 string + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, []byte, uint64) (string, error)); ok { + return rf(ctx, class, uuid, version) + } + if rf, ok := ret.Get(0).(func(context.Context, string, []byte, uint64) string); ok { + r0 = rf(ctx, class, uuid, version) + } else { + r0 = ret.Get(0).(string) + } + + if rf, ok := ret.Get(1).(func(context.Context, string, []byte, uint64) error); ok { + r1 = rf(ctx, class, uuid, version) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockSchemaReader_ShardFromUUIDWithVersion_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ShardFromUUIDWithVersion' +type MockSchemaReader_ShardFromUUIDWithVersion_Call struct { + *mock.Call +} + +// ShardFromUUIDWithVersion is a helper method to define mock.On call +// - ctx context.Context +// - class string +// - uuid []byte +// - version uint64 +func (_e *MockSchemaReader_Expecter) ShardFromUUIDWithVersion(ctx interface{}, class interface{}, uuid interface{}, version interface{}) *MockSchemaReader_ShardFromUUIDWithVersion_Call { + return &MockSchemaReader_ShardFromUUIDWithVersion_Call{Call: _e.mock.On("ShardFromUUIDWithVersion", ctx, class, uuid, version)} +} + +func (_c *MockSchemaReader_ShardFromUUIDWithVersion_Call) Run(run func(ctx context.Context, class string, uuid []byte, version uint64)) *MockSchemaReader_ShardFromUUIDWithVersion_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].([]byte), args[3].(uint64)) + }) + return _c +} + +func (_c *MockSchemaReader_ShardFromUUIDWithVersion_Call) Return(_a0 string, _a1 error) *MockSchemaReader_ShardFromUUIDWithVersion_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockSchemaReader_ShardFromUUIDWithVersion_Call) RunAndReturn(run func(context.Context, string, []byte, uint64) (string, error)) *MockSchemaReader_ShardFromUUIDWithVersion_Call { + _c.Call.Return(run) + return _c +} + +// ShardOwner provides a mock function with given fields: class, shard +func (_m *MockSchemaReader) ShardOwner(class string, shard string) (string, error) { + ret := _m.Called(class, shard) + + if len(ret) == 0 { + panic("no return value specified for ShardOwner") + } + + var r0 string + var r1 error + if rf, ok := ret.Get(0).(func(string, string) (string, error)); ok { + return rf(class, shard) + } + if rf, ok := ret.Get(0).(func(string, string) string); ok { + r0 = rf(class, shard) + } else { + r0 = ret.Get(0).(string) + } + + if rf, ok := ret.Get(1).(func(string, string) error); ok { + r1 = rf(class, shard) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockSchemaReader_ShardOwner_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ShardOwner' +type MockSchemaReader_ShardOwner_Call struct { + *mock.Call +} + +// ShardOwner is a helper method to define mock.On call +// - class string +// - shard string +func (_e *MockSchemaReader_Expecter) ShardOwner(class interface{}, shard interface{}) *MockSchemaReader_ShardOwner_Call { + return &MockSchemaReader_ShardOwner_Call{Call: _e.mock.On("ShardOwner", class, shard)} +} + +func (_c *MockSchemaReader_ShardOwner_Call) Run(run func(class string, shard string)) *MockSchemaReader_ShardOwner_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(string)) + }) + return _c +} + +func (_c *MockSchemaReader_ShardOwner_Call) Return(_a0 string, _a1 error) *MockSchemaReader_ShardOwner_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockSchemaReader_ShardOwner_Call) RunAndReturn(run func(string, string) (string, error)) *MockSchemaReader_ShardOwner_Call { + _c.Call.Return(run) + return _c +} + +// ShardOwnerWithVersion provides a mock function with given fields: ctx, lass, shard, version +func (_m *MockSchemaReader) ShardOwnerWithVersion(ctx context.Context, lass string, shard string, version uint64) (string, error) { + ret := _m.Called(ctx, lass, shard, version) + + if len(ret) == 0 { + panic("no return value specified for ShardOwnerWithVersion") + } + + var r0 string + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, string, uint64) (string, error)); ok { + return rf(ctx, lass, shard, version) + } + if rf, ok := ret.Get(0).(func(context.Context, string, string, uint64) string); ok { + r0 = rf(ctx, lass, shard, version) + } else { + r0 = ret.Get(0).(string) + } + + if rf, ok := ret.Get(1).(func(context.Context, string, string, uint64) error); ok { + r1 = rf(ctx, lass, shard, version) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockSchemaReader_ShardOwnerWithVersion_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ShardOwnerWithVersion' +type MockSchemaReader_ShardOwnerWithVersion_Call struct { + *mock.Call +} + +// ShardOwnerWithVersion is a helper method to define mock.On call +// - ctx context.Context +// - lass string +// - shard string +// - version uint64 +func (_e *MockSchemaReader_Expecter) ShardOwnerWithVersion(ctx interface{}, lass interface{}, shard interface{}, version interface{}) *MockSchemaReader_ShardOwnerWithVersion_Call { + return &MockSchemaReader_ShardOwnerWithVersion_Call{Call: _e.mock.On("ShardOwnerWithVersion", ctx, lass, shard, version)} +} + +func (_c *MockSchemaReader_ShardOwnerWithVersion_Call) Run(run func(ctx context.Context, lass string, shard string, version uint64)) *MockSchemaReader_ShardOwnerWithVersion_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(string), args[3].(uint64)) + }) + return _c +} + +func (_c *MockSchemaReader_ShardOwnerWithVersion_Call) Return(_a0 string, _a1 error) *MockSchemaReader_ShardOwnerWithVersion_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockSchemaReader_ShardOwnerWithVersion_Call) RunAndReturn(run func(context.Context, string, string, uint64) (string, error)) *MockSchemaReader_ShardOwnerWithVersion_Call { + _c.Call.Return(run) + return _c +} + +// ShardReplicas provides a mock function with given fields: class, shard +func (_m *MockSchemaReader) ShardReplicas(class string, shard string) ([]string, error) { + ret := _m.Called(class, shard) + + if len(ret) == 0 { + panic("no return value specified for ShardReplicas") + } + + var r0 []string + var r1 error + if rf, ok := ret.Get(0).(func(string, string) ([]string, error)); ok { + return rf(class, shard) + } + if rf, ok := ret.Get(0).(func(string, string) []string); ok { + r0 = rf(class, shard) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]string) + } + } + + if rf, ok := ret.Get(1).(func(string, string) error); ok { + r1 = rf(class, shard) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockSchemaReader_ShardReplicas_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ShardReplicas' +type MockSchemaReader_ShardReplicas_Call struct { + *mock.Call +} + +// ShardReplicas is a helper method to define mock.On call +// - class string +// - shard string +func (_e *MockSchemaReader_Expecter) ShardReplicas(class interface{}, shard interface{}) *MockSchemaReader_ShardReplicas_Call { + return &MockSchemaReader_ShardReplicas_Call{Call: _e.mock.On("ShardReplicas", class, shard)} +} + +func (_c *MockSchemaReader_ShardReplicas_Call) Run(run func(class string, shard string)) *MockSchemaReader_ShardReplicas_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string), args[1].(string)) + }) + return _c +} + +func (_c *MockSchemaReader_ShardReplicas_Call) Return(_a0 []string, _a1 error) *MockSchemaReader_ShardReplicas_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockSchemaReader_ShardReplicas_Call) RunAndReturn(run func(string, string) ([]string, error)) *MockSchemaReader_ShardReplicas_Call { + _c.Call.Return(run) + return _c +} + +// ShardReplicasWithVersion provides a mock function with given fields: ctx, class, shard, version +func (_m *MockSchemaReader) ShardReplicasWithVersion(ctx context.Context, class string, shard string, version uint64) ([]string, error) { + ret := _m.Called(ctx, class, shard, version) + + if len(ret) == 0 { + panic("no return value specified for ShardReplicasWithVersion") + } + + var r0 []string + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, string, uint64) ([]string, error)); ok { + return rf(ctx, class, shard, version) + } + if rf, ok := ret.Get(0).(func(context.Context, string, string, uint64) []string); ok { + r0 = rf(ctx, class, shard, version) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]string) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, string, string, uint64) error); ok { + r1 = rf(ctx, class, shard, version) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockSchemaReader_ShardReplicasWithVersion_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ShardReplicasWithVersion' +type MockSchemaReader_ShardReplicasWithVersion_Call struct { + *mock.Call +} + +// ShardReplicasWithVersion is a helper method to define mock.On call +// - ctx context.Context +// - class string +// - shard string +// - version uint64 +func (_e *MockSchemaReader_Expecter) ShardReplicasWithVersion(ctx interface{}, class interface{}, shard interface{}, version interface{}) *MockSchemaReader_ShardReplicasWithVersion_Call { + return &MockSchemaReader_ShardReplicasWithVersion_Call{Call: _e.mock.On("ShardReplicasWithVersion", ctx, class, shard, version)} +} + +func (_c *MockSchemaReader_ShardReplicasWithVersion_Call) Run(run func(ctx context.Context, class string, shard string, version uint64)) *MockSchemaReader_ShardReplicasWithVersion_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(string), args[3].(uint64)) + }) + return _c +} + +func (_c *MockSchemaReader_ShardReplicasWithVersion_Call) Return(_a0 []string, _a1 error) *MockSchemaReader_ShardReplicasWithVersion_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockSchemaReader_ShardReplicasWithVersion_Call) RunAndReturn(run func(context.Context, string, string, uint64) ([]string, error)) *MockSchemaReader_ShardReplicasWithVersion_Call { + _c.Call.Return(run) + return _c +} + +// Shards provides a mock function with given fields: class +func (_m *MockSchemaReader) Shards(class string) ([]string, error) { + ret := _m.Called(class) + + if len(ret) == 0 { + panic("no return value specified for Shards") + } + + var r0 []string + var r1 error + if rf, ok := ret.Get(0).(func(string) ([]string, error)); ok { + return rf(class) + } + if rf, ok := ret.Get(0).(func(string) []string); ok { + r0 = rf(class) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]string) + } + } + + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(class) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockSchemaReader_Shards_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Shards' +type MockSchemaReader_Shards_Call struct { + *mock.Call +} + +// Shards is a helper method to define mock.On call +// - class string +func (_e *MockSchemaReader_Expecter) Shards(class interface{}) *MockSchemaReader_Shards_Call { + return &MockSchemaReader_Shards_Call{Call: _e.mock.On("Shards", class)} +} + +func (_c *MockSchemaReader_Shards_Call) Run(run func(class string)) *MockSchemaReader_Shards_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(string)) + }) + return _c +} + +func (_c *MockSchemaReader_Shards_Call) Return(_a0 []string, _a1 error) *MockSchemaReader_Shards_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockSchemaReader_Shards_Call) RunAndReturn(run func(string) ([]string, error)) *MockSchemaReader_Shards_Call { + _c.Call.Return(run) + return _c +} + +// TenantsShardsWithVersion provides a mock function with given fields: ctx, version, class, tenants +func (_m *MockSchemaReader) TenantsShardsWithVersion(ctx context.Context, version uint64, class string, tenants ...string) (map[string]string, error) { + _va := make([]interface{}, len(tenants)) + for _i := range tenants { + _va[_i] = tenants[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, version, class) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for TenantsShardsWithVersion") + } + + var r0 map[string]string + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, uint64, string, ...string) (map[string]string, error)); ok { + return rf(ctx, version, class, tenants...) + } + if rf, ok := ret.Get(0).(func(context.Context, uint64, string, ...string) map[string]string); ok { + r0 = rf(ctx, version, class, tenants...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]string) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, uint64, string, ...string) error); ok { + r1 = rf(ctx, version, class, tenants...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockSchemaReader_TenantsShardsWithVersion_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'TenantsShardsWithVersion' +type MockSchemaReader_TenantsShardsWithVersion_Call struct { + *mock.Call +} + +// TenantsShardsWithVersion is a helper method to define mock.On call +// - ctx context.Context +// - version uint64 +// - class string +// - tenants ...string +func (_e *MockSchemaReader_Expecter) TenantsShardsWithVersion(ctx interface{}, version interface{}, class interface{}, tenants ...interface{}) *MockSchemaReader_TenantsShardsWithVersion_Call { + return &MockSchemaReader_TenantsShardsWithVersion_Call{Call: _e.mock.On("TenantsShardsWithVersion", + append([]interface{}{ctx, version, class}, tenants...)...)} +} + +func (_c *MockSchemaReader_TenantsShardsWithVersion_Call) Run(run func(ctx context.Context, version uint64, class string, tenants ...string)) *MockSchemaReader_TenantsShardsWithVersion_Call { + _c.Call.Run(func(args mock.Arguments) { + variadicArgs := make([]string, len(args)-3) + for i, a := range args[3:] { + if a != nil { + variadicArgs[i] = a.(string) + } + } + run(args[0].(context.Context), args[1].(uint64), args[2].(string), variadicArgs...) + }) + return _c +} + +func (_c *MockSchemaReader_TenantsShardsWithVersion_Call) Return(_a0 map[string]string, _a1 error) *MockSchemaReader_TenantsShardsWithVersion_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockSchemaReader_TenantsShardsWithVersion_Call) RunAndReturn(run func(context.Context, uint64, string, ...string) (map[string]string, error)) *MockSchemaReader_TenantsShardsWithVersion_Call { + _c.Call.Return(run) + return _c +} + +// WaitForUpdate provides a mock function with given fields: ctx, version +func (_m *MockSchemaReader) WaitForUpdate(ctx context.Context, version uint64) error { + ret := _m.Called(ctx, version) + + if len(ret) == 0 { + panic("no return value specified for WaitForUpdate") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, uint64) error); ok { + r0 = rf(ctx, version) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// MockSchemaReader_WaitForUpdate_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'WaitForUpdate' +type MockSchemaReader_WaitForUpdate_Call struct { + *mock.Call +} + +// WaitForUpdate is a helper method to define mock.On call +// - ctx context.Context +// - version uint64 +func (_e *MockSchemaReader_Expecter) WaitForUpdate(ctx interface{}, version interface{}) *MockSchemaReader_WaitForUpdate_Call { + return &MockSchemaReader_WaitForUpdate_Call{Call: _e.mock.On("WaitForUpdate", ctx, version)} +} + +func (_c *MockSchemaReader_WaitForUpdate_Call) Run(run func(ctx context.Context, version uint64)) *MockSchemaReader_WaitForUpdate_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(uint64)) + }) + return _c +} + +func (_c *MockSchemaReader_WaitForUpdate_Call) Return(_a0 error) *MockSchemaReader_WaitForUpdate_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockSchemaReader_WaitForUpdate_Call) RunAndReturn(run func(context.Context, uint64) error) *MockSchemaReader_WaitForUpdate_Call { + _c.Call.Return(run) + return _c +} + +// NewMockSchemaReader creates a new instance of MockSchemaReader. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewMockSchemaReader(t interface { + mock.TestingT + Cleanup(func()) +}) *MockSchemaReader { + mock := &MockSchemaReader{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/parser.go b/platform/dbops/binaries/weaviate-src/usecases/schema/parser.go new file mode 100644 index 0000000000000000000000000000000000000000..889225ae8f86fc78adc9242462e465e75ce806cb --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/parser.go @@ -0,0 +1,581 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "encoding/json" + "fmt" + "reflect" + "sort" + "strings" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modelsext" + "github.com/weaviate/weaviate/entities/schema" + schemaConfig "github.com/weaviate/weaviate/entities/schema/config" + "github.com/weaviate/weaviate/entities/vectorindex" + "github.com/weaviate/weaviate/usecases/config" + configRuntime "github.com/weaviate/weaviate/usecases/config/runtime" + shardingConfig "github.com/weaviate/weaviate/usecases/sharding/config" +) + +var errPropertiesUpdatedInClassUpdate = errors.Errorf( + "property fields other than description cannot be updated through updating the class. Use the add " + + "property feature (e.g. \"POST /v1/schema/{className}/properties\") " + + "to add additional properties") + +type modulesProvider interface { + IsGenerative(string) bool + IsReranker(string) bool + IsMultiVector(string) bool +} + +type Parser struct { + clusterState clusterState + configParser VectorConfigParser + validator validator + modules modulesProvider + defaultQuantization *configRuntime.DynamicValue[string] +} + +func NewParser(cs clusterState, vCfg VectorConfigParser, v validator, modules modulesProvider, defaultQuantization *configRuntime.DynamicValue[string]) *Parser { + return &Parser{ + clusterState: cs, + configParser: vCfg, + validator: v, + modules: modules, + defaultQuantization: defaultQuantization, + } +} + +func (p *Parser) ParseClass(class *models.Class) error { + if class == nil { + return fmt.Errorf("class cannot be nil") + } + + if strings.EqualFold(class.Class, config.DefaultRaftDir) { + return fmt.Errorf("parse class name: %w", fmt.Errorf("class name `raft` is reserved")) + } + + if err := p.parseShardingConfig(class); err != nil { + return fmt.Errorf("parse sharding config: %w", err) + } + + if err := p.parseVectorIndexConfig(class); err != nil { + return fmt.Errorf("parse vector index config: %w", err) + } + + return nil +} + +func (p *Parser) parseModuleConfig(class *models.Class) error { + if class.ModuleConfig == nil { + return nil + } + + mapMC, ok := class.ModuleConfig.(map[string]any) + if !ok { + return fmt.Errorf("module config is not a map, got %v", class.ModuleConfig) + } + + mc, err := p.moduleConfig(mapMC) + if err != nil { + return fmt.Errorf("module config: %w", err) + } + class.ModuleConfig = mc + + return nil +} + +func (p *Parser) parseVectorConfig(class *models.Class) error { + if class.VectorConfig == nil { + return nil + } + + newVC := map[string]models.VectorConfig{} + for vector, config := range class.VectorConfig { + mapMC, ok := config.Vectorizer.(map[string]any) + if !ok { + return fmt.Errorf("vectorizer for %s is not a map, got %v", vector, config) + } + + mc, err := p.moduleConfig(mapMC) + if err != nil { + return fmt.Errorf("vectorizer config: %w", err) + } + + config.Vectorizer = mc + newVC[vector] = config + } + class.VectorConfig = newVC + return nil +} + +func (p *Parser) moduleConfig(moduleConfig map[string]any) (map[string]any, error) { + parsedMC := map[string]any{} + for module, config := range moduleConfig { + if config == nil { + parsedMC[module] = nil // nil is allowed, do no further parsing + continue + } + mapC, ok := config.(map[string]any) + if !ok { + return nil, fmt.Errorf("module config for %s is not a map, got %v", module, config) + } + parsedC := map[string]any{} + // raft interprets all `json.Number` types as float64 when unmarshalling + // we parse them explicitly here so that UpdateClass can compare the new class + // with the old one read from the raft schema manager + for key, value := range mapC { + if number, ok := value.(json.Number); ok { + if integer, err := number.Int64(); err == nil { + parsedC[key] = float64(integer) + } else if float, err := number.Float64(); err == nil { + parsedC[key] = float + } else { + parsedC[key] = number.String() + } + continue + } + parsedC[key] = value + } + parsedMC[module] = parsedC + } + return parsedMC, nil +} + +func (p *Parser) parseVectorIndexConfig(class *models.Class) error { + if !hasTargetVectors(class) || class.VectorIndexType != "" { + parsed, err := p.parseGivenVectorIndexConfig(class.VectorIndexType, class.VectorIndexConfig, p.modules.IsMultiVector(class.Vectorizer), p.defaultQuantization) + if err != nil { + return err + } + if parsed.IsMultiVector() { + return fmt.Errorf("class.VectorIndexConfig multi vector type index type is only configurable using named vectors") + } + class.VectorIndexConfig = parsed + } + + if err := p.parseTargetVectorsIndexConfig(class); err != nil { + return err + } + return nil +} + +func (p *Parser) parseShardingConfig(class *models.Class) (err error) { + // multiTenancyConfig and shardingConfig are mutually exclusive + cfg := shardingConfig.Config{} // cfg is empty in case of MT + if !schema.MultiTenancyEnabled(class) { + cfg, err = shardingConfig.ParseConfig(class.ShardingConfig, p.clusterState.NodeCount()) + if err != nil { + return err + } + + } + class.ShardingConfig = cfg + return nil +} + +func (p *Parser) parseTargetVectorsIndexConfig(class *models.Class) error { + for targetVector, vectorConfig := range class.VectorConfig { + isMultiVector := false + vectorizerModuleName := "" + if vectorizer, ok := vectorConfig.Vectorizer.(map[string]interface{}); ok { + for name := range vectorizer { + isMultiVector = p.modules.IsMultiVector(name) + vectorizerModuleName = name + } + } + parsed, err := p.parseGivenVectorIndexConfig(vectorConfig.VectorIndexType, vectorConfig.VectorIndexConfig, isMultiVector, p.defaultQuantization) + if err != nil { + return fmt.Errorf("parse vector config for %s: %w", targetVector, err) + } + if parsed.IsMultiVector() && vectorizerModuleName != "none" && !isMultiVector { + return fmt.Errorf("parse vector config for %s: multi vector index configured but vectorizer: %q doesn't support multi vectors", targetVector, vectorizerModuleName) + } + vectorConfig.VectorIndexConfig = parsed + class.VectorConfig[targetVector] = vectorConfig + } + return nil +} + +func (p *Parser) parseGivenVectorIndexConfig(vectorIndexType string, + vectorIndexConfig interface{}, isMultiVector bool, defaultQuantization *configRuntime.DynamicValue[string], +) (schemaConfig.VectorIndexConfig, error) { + if vectorIndexType != vectorindex.VectorIndexTypeHNSW && vectorIndexType != vectorindex.VectorIndexTypeFLAT && vectorIndexType != vectorindex.VectorIndexTypeDYNAMIC { + return nil, errors.Errorf( + "parse vector index config: unsupported vector index type: %q", + vectorIndexType) + } + + if vectorIndexType != vectorindex.VectorIndexTypeHNSW && isMultiVector { + return nil, errors.Errorf( + "parse vector index config: multi vector index is not supported for vector index type: %q, only supported type is hnsw", + vectorIndexType) + } + + parsed, err := p.configParser(vectorIndexConfig, vectorIndexType, isMultiVector) + if err != nil { + return nil, errors.Wrap(err, "parse vector index config") + } + return parsed, nil +} + +// ParseClassUpdate parses a class after unmarshaling by setting concrete types for the fields +func (p *Parser) ParseClassUpdate(class, update *models.Class) (*models.Class, error) { + if err := p.ParseClass(update); err != nil { + return nil, err + } + mtEnabled, err := validateUpdatingMT(class, update) + if err != nil { + return nil, err + } + + if err := validateImmutableFields(class, update); err != nil { + return nil, err + } + + if err := p.validateModuleConfigsParityAndImmutables(class, update); err != nil { + return nil, err + } + + // run target vectors validation first, as it will reject classes + // where legacy vector was changed to target vectors and vice versa + if err = p.validateNamedVectorConfigsParityAndImmutables(class, update); err != nil { + return nil, err + } + + if err = validateLegacyVectorIndexConfigImmutableFields(class, update); err != nil { + return nil, err + } + + if class.VectorIndexConfig != nil || update.VectorIndexConfig != nil { + vIdxConfig, ok1 := class.VectorIndexConfig.(schemaConfig.VectorIndexConfig) + vIdxConfigU, ok2 := update.VectorIndexConfig.(schemaConfig.VectorIndexConfig) + if !ok1 || !ok2 { + return nil, fmt.Errorf("vector index config wrong type: current=%t new=%t", ok1, ok2) + } + if err := p.validator.ValidateVectorIndexConfigUpdate(vIdxConfig, vIdxConfigU); err != nil { + return nil, fmt.Errorf("validate vector index config: %w", err) + } + } + + if hasTargetVectors(update) { + if err := p.validator.ValidateVectorIndexConfigsUpdate( + asVectorIndexConfigs(class), asVectorIndexConfigs(update)); err != nil { + return nil, err + } + } + + if err := validateShardingConfig(class, update, mtEnabled); err != nil { + return nil, fmt.Errorf("validate sharding config: %w", err) + } + + if err = p.validatePropertiesForUpdate(class.Properties, update.Properties); err != nil { + return nil, err + } + + if err := p.validator.ValidateInvertedIndexConfigUpdate( + class.InvertedIndexConfig, + update.InvertedIndexConfig); err != nil { + return nil, fmt.Errorf("inverted index config: %w", err) + } + + return update, nil +} + +func (p *Parser) validatePropertiesForUpdate(existing []*models.Property, new []*models.Property) error { + if len(existing) != len(new) { + return errPropertiesUpdatedInClassUpdate + } + + sort.Slice(existing, func(i, j int) bool { + return existing[i].Name < existing[j].Name + }) + + sort.Slice(new, func(i, j int) bool { + return new[i].Name < new[j].Name + }) + + for i, prop := range existing { + // make a copy of the properties to remove the description field + // so that we can compare the rest of the fields + if prop == nil { + continue + } + if new[i] == nil { + continue + } + + if err := p.validatePropertyForUpdate(prop, new[i]); err != nil { + return errors.Wrapf(err, "property %q", prop.Name) + } + } + + return nil +} + +func propertyAsMap(in any) (map[string]any, error) { + out := make(map[string]any) + + v := reflect.ValueOf(in) + if v.Kind() == reflect.Ptr { + v = v.Elem() + } + + if v.Kind() != reflect.Struct { // Non-structural return error + return nil, fmt.Errorf("asMap only accepts struct or struct pointer; got %T", v) + } + + t := v.Type() + // Traversing structure fields + // Specify the tagName value as the key in the map; the field value as the value in the map + for i := 0; i < v.NumField(); i++ { + tfi := t.Field(i) + if tagValue := tfi.Tag.Get("json"); tagValue != "" { + key := strings.Split(tagValue, ",")[0] + if key == "description" { + continue + } + if key == "nestedProperties" { + nps := v.Field(i).Interface().([]*models.NestedProperty) + out[key] = make([]map[string]any, 0, len(nps)) + for _, np := range nps { + npm, err := propertyAsMap(np) + if err != nil { + return nil, err + } + out[key] = append(out[key].([]map[string]any), npm) + } + continue + } + out[key] = v.Field(i).Interface() + } + } + return out, nil +} + +func (p *Parser) validatePropertyForUpdate(existing, new *models.Property) error { + e, err := propertyAsMap(existing) + if err != nil { + return errors.Wrap(err, "converting existing properties to a map") + } + + n, err := propertyAsMap(new) + if err != nil { + return errors.Wrap(err, "converting new properties to a map") + } + + var ( + existingModuleConfig = cutModuleConfig(e) + newModuleConfig = cutModuleConfig(n) + ) + + for moduleName, existingCfg := range existingModuleConfig { + newCfg, ok := newModuleConfig[moduleName] + if !ok { + return errors.Errorf("module %q configuration was removed", moduleName) + } + + if !reflect.DeepEqual(existingCfg, newCfg) { + return errors.Errorf("module %q configuration cannot be updated", moduleName) + } + } + + if !reflect.DeepEqual(e, n) { + return errPropertiesUpdatedInClassUpdate + } + + return nil +} + +func cutModuleConfig(properties map[string]any) map[string]any { + cfg, _ := properties["moduleConfig"].(map[string]any) + delete(properties, "moduleConfig") + return cfg +} + +func hasTargetVectors(class *models.Class) bool { + return len(class.VectorConfig) > 0 +} + +func (p *Parser) validateModuleConfigsParityAndImmutables(initial, updated *models.Class) error { + if updated.ModuleConfig == nil || reflect.DeepEqual(initial.ModuleConfig, updated.ModuleConfig) { + return nil + } + + updatedModConf, ok := updated.ModuleConfig.(map[string]any) + if !ok { + return fmt.Errorf("module config for %s is not a map, got %v", updated.ModuleConfig, updated.ModuleConfig) + } + + updatedModConf, err := p.moduleConfig(updatedModConf) + if err != nil { + return err + } + + initialModConf, _ := initial.ModuleConfig.(map[string]any) + + // this part: + // - allow adding new modules + // - only allows updating generative and rerankers + // - only one gen/rerank module can be present. Existing ones will be replaced, updating with more than one is not + // allowed + // - other modules will not be changed. They can be present in the update if they have EXACTLY the same settings + hasGenerativeUpdate := false + hasRerankerUpdate := false + for module := range updatedModConf { + if p.modules.IsGenerative(module) { + if hasGenerativeUpdate { + return fmt.Errorf("updated moduleconfig has multiple generative modules: %v", updatedModConf) + } + hasGenerativeUpdate = true + continue + } + + if p.modules.IsReranker(module) { + if hasRerankerUpdate { + return fmt.Errorf("updated moduleconfig has multiple reranker modules: %v", updatedModConf) + } + hasRerankerUpdate = true + continue + } + + if _, moduleExisted := initialModConf[module]; !moduleExisted { + continue + } + + if reflect.DeepEqual(initialModConf[module], updatedModConf[module]) { + continue + } + + return fmt.Errorf("can only update generative and reranker module configs. Got: %v", module) + } + + if initial.ModuleConfig == nil { + initial.ModuleConfig = updatedModConf + return nil + } + + if _, ok := initial.ModuleConfig.(map[string]any); !ok { + initial.ModuleConfig = updatedModConf + return nil + } + if hasGenerativeUpdate { + // clear out old generative module + for module := range initialModConf { + if p.modules.IsGenerative(module) { + delete(initialModConf, module) + } + } + } + + if hasRerankerUpdate { + // clear out old reranker module + for module := range initialModConf { + if p.modules.IsReranker(module) { + delete(initialModConf, module) + } + } + } + + for module := range updatedModConf { + initialModConf[module] = updatedModConf[module] + } + updated.ModuleConfig = initialModConf + return nil +} + +func (p *Parser) validateNamedVectorConfigsParityAndImmutables(initial, updated *models.Class) error { + if modelsext.ClassHasLegacyVectorIndex(initial) { + for targetVector := range updated.VectorConfig { + if targetVector == modelsext.DefaultNamedVectorName { + return fmt.Errorf("vector named %s cannot be created when collection level vector index is configured", modelsext.DefaultNamedVectorName) + } + } + } + + for vecName, initialCfg := range initial.VectorConfig { + updatedCfg, ok := updated.VectorConfig[vecName] + if !ok { + return fmt.Errorf("missing config for vector %q", vecName) + } + + // immutable vector type + if initialCfg.VectorIndexType != updatedCfg.VectorIndexType { + return fmt.Errorf("vector index type of vector %q is immutable: attempted change from %q to %q", + vecName, initialCfg.VectorIndexType, updatedCfg.VectorIndexType) + } + + // immutable vectorizer + if imap, ok := initialCfg.Vectorizer.(map[string]interface{}); ok && len(imap) == 1 { + umap, ok := updatedCfg.Vectorizer.(map[string]interface{}) + if !ok || len(umap) != 1 { + return fmt.Errorf("invalid vectorizer config for vector %q", vecName) + } + + ivectorizer := "" + for k := range imap { + ivectorizer = k + } + uvectorizer := "" + for k := range umap { + uvectorizer = k + } + + if ivectorizer != uvectorizer { + return fmt.Errorf("vectorizer of vector %q is immutable: attempted change from %q to %q", + vecName, ivectorizer, uvectorizer) + } + } + } + return nil +} + +func asVectorIndexConfigs(c *models.Class) map[string]schemaConfig.VectorIndexConfig { + if c.VectorConfig == nil { + return nil + } + + cfgs := map[string]schemaConfig.VectorIndexConfig{} + for vecName := range c.VectorConfig { + cfgs[vecName] = c.VectorConfig[vecName].VectorIndexConfig.(schemaConfig.VectorIndexConfig) + } + return cfgs +} + +func validateShardingConfig(current, update *models.Class, mtEnabled bool) error { + if mtEnabled { + return nil + } + first, ok := current.ShardingConfig.(shardingConfig.Config) + if !ok { + return fmt.Errorf("current config is not well-formed") + } + second, ok := update.ShardingConfig.(shardingConfig.Config) + if !ok { + return fmt.Errorf("updated config is not well-formed") + } + if first.DesiredCount != second.DesiredCount { + return fmt.Errorf("re-sharding not supported yet: shard count is immutable: "+ + "attempted change from \"%d\" to \"%d\"", first.DesiredCount, + second.DesiredCount) + } + + if first.VirtualPerPhysical != second.VirtualPerPhysical { + return fmt.Errorf("virtual shards per physical is immutable: "+ + "attempted change from \"%d\" to \"%d\"", first.VirtualPerPhysical, + second.VirtualPerPhysical) + } + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/parser_test.go b/platform/dbops/binaries/weaviate-src/usecases/schema/parser_test.go new file mode 100644 index 0000000000000000000000000000000000000000..6d4d4c18a162815e1d09b02752d93321c21fff0c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/parser_test.go @@ -0,0 +1,182 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/vectorindex" + enthnsw "github.com/weaviate/weaviate/entities/vectorindex/hnsw" + "github.com/weaviate/weaviate/usecases/fakes" + "github.com/weaviate/weaviate/usecases/sharding/config" +) + +const hnswT = vectorindex.VectorIndexTypeHNSW + +func TestParser(t *testing.T) { + cs := fakes.NewFakeClusterState() + p := NewParser(cs, dummyParseVectorConfig, fakeValidator{}, fakeModulesProvider{}, nil) + + sc := config.Config{DesiredCount: 1, VirtualPerPhysical: 128, ActualCount: 1, DesiredVirtualCount: 128, Key: "_id", Strategy: "hash", Function: "murmur3"} + vic := enthnsw.NewDefaultUserConfig() + emptyMap := map[string]interface{}{} + valueMap := map[string]interface{}{"something": emptyMap} + + testCases := []struct { + name string + old *models.Class + update *models.Class + expected *models.Class + error bool + }{ + { + name: "update description", + old: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: enthnsw.NewDefaultUserConfig(), ShardingConfig: sc}, + update: &models.Class{Class: "Test", Description: "NEW", VectorIndexType: hnswT, VectorIndexConfig: enthnsw.NewDefaultUserConfig()}, + expected: &models.Class{Class: "Test", Description: "NEW", VectorIndexType: hnswT, VectorIndexConfig: enthnsw.NewDefaultUserConfig(), ShardingConfig: sc}, + error: false, + }, + { + name: "update generative module - previously not configured", + old: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ShardingConfig: sc}, + update: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ModuleConfig: map[string]interface{}{"generative-madeup": emptyMap}}, + expected: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ShardingConfig: sc, ModuleConfig: map[string]interface{}{"generative-madeup": emptyMap}}, + error: false, + }, + { + name: "update generative module - previously not configured, other modules present", + old: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ShardingConfig: sc, ModuleConfig: map[string]interface{}{"text2vec-random": emptyMap}}, + update: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ModuleConfig: map[string]interface{}{"generative-madeup": emptyMap}}, + expected: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ShardingConfig: sc, ModuleConfig: map[string]interface{}{"generative-madeup": emptyMap, "text2vec-random": emptyMap}}, + error: false, + }, + { + name: "update generative module - previously not configured, other generative module present", + old: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ShardingConfig: sc, ModuleConfig: map[string]interface{}{"generative-random": emptyMap}}, + update: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ModuleConfig: map[string]interface{}{"generative-madeup": emptyMap}}, + expected: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ShardingConfig: sc, ModuleConfig: map[string]interface{}{"generative-madeup": emptyMap}}, + error: false, + }, + { + name: "update reranker module - previously not configured", + old: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ShardingConfig: sc}, + update: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ModuleConfig: map[string]interface{}{"reranker-madeup": emptyMap}}, + expected: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ShardingConfig: sc, ModuleConfig: map[string]interface{}{"reranker-madeup": emptyMap}}, + error: false, + }, + { + name: "update reranker module - previously not configured, other modules present", + old: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ShardingConfig: sc, ModuleConfig: map[string]interface{}{"text2vec-random": emptyMap}}, + update: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ModuleConfig: map[string]interface{}{"reranker-madeup": emptyMap}}, + expected: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ShardingConfig: sc, ModuleConfig: map[string]interface{}{"reranker-madeup": emptyMap, "text2vec-random": emptyMap}}, + error: false, + }, + { + name: "update reranker module - previously not configured, other generative module present", + old: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ShardingConfig: sc, ModuleConfig: map[string]interface{}{"reranker-random": emptyMap, "generative-random": emptyMap}}, + update: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ModuleConfig: map[string]interface{}{"reranker-madeup": emptyMap}}, + expected: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ShardingConfig: sc, ModuleConfig: map[string]interface{}{"reranker-madeup": emptyMap, "generative-random": emptyMap}}, + error: false, + }, + { + name: "update reranker and generative module - previously not configured, other text2vec module present", + old: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ShardingConfig: sc, ModuleConfig: map[string]interface{}{"reranker-random": emptyMap, "generative-random": emptyMap, "text2vec-random": emptyMap}}, + update: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ModuleConfig: map[string]interface{}{"reranker-madeup": emptyMap, "generative-madeup": emptyMap}}, + expected: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ShardingConfig: sc, ModuleConfig: map[string]interface{}{"reranker-madeup": emptyMap, "generative-madeup": emptyMap, "text2vec-random": emptyMap}}, + error: false, + }, + { + name: "update text2vec - previously not configured, add a new vector index", + old: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ShardingConfig: sc}, + update: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ModuleConfig: map[string]interface{}{"text2vec-random": emptyMap}}, + expected: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ShardingConfig: sc, ModuleConfig: map[string]interface{}{"text2vec-random": emptyMap}}, + error: false, + }, + { + name: "update text2vec - previously differently configured => error", + old: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ShardingConfig: sc, ModuleConfig: map[string]interface{}{"text2vec-random": valueMap}}, + update: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ModuleConfig: map[string]interface{}{"text2vec-random": emptyMap}}, + error: true, + }, + { + name: "update text2vec - other modules present => error", + old: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ShardingConfig: sc, ModuleConfig: map[string]interface{}{"generative-random": valueMap}}, + update: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ModuleConfig: map[string]interface{}{"text2vec-random": emptyMap, "generative-random": valueMap}}, + expected: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ShardingConfig: sc, ModuleConfig: map[string]interface{}{"text2vec-random": emptyMap, "generative-random": valueMap}}, + error: false, + }, + { + name: "update with same text2vec config", + old: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ShardingConfig: sc, ModuleConfig: map[string]interface{}{"text2vec-random": valueMap}}, + update: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ModuleConfig: map[string]interface{}{"text2vec-random": valueMap}}, + expected: &models.Class{Class: "Test", VectorIndexType: hnswT, VectorIndexConfig: vic, ShardingConfig: sc, ModuleConfig: map[string]interface{}{"text2vec-random": valueMap}}, + error: false, + }, + } + + for _, test := range testCases { + t.Run(test.name, func(t *testing.T) { + update, err := p.ParseClassUpdate(test.old, test.update) + if test.error { + require.Error(t, err) + } else { + require.NoError(t, err) + require.Equal(t, test.expected.Description, update.Description) + require.Equal(t, test.expected.ModuleConfig, update.ModuleConfig) + } + }) + } +} + +func Test_asMap(t *testing.T) { + t.Run("not nil", func(t *testing.T) { + m, err := propertyAsMap(&models.Property{ + Name: "name", + Description: "description", + DataType: []string{"object"}, + NestedProperties: []*models.NestedProperty{{ + Name: "nested", + Description: "nested description", + DataType: []string{"text"}, + }}, + }) + require.NotNil(t, m) + require.Nil(t, err) + + _, ok := m["description"] + require.False(t, ok) + + nps, ok := m["nestedProperties"].([]map[string]any) + require.True(t, ok) + require.Len(t, nps, 1) + + _, ok = nps[0]["description"] + require.False(t, ok) + }) +} + +type fakeModulesProvider struct{} + +func (m fakeModulesProvider) IsReranker(name string) bool { + return strings.Contains(name, "reranker") +} + +func (m fakeModulesProvider) IsGenerative(name string) bool { + return strings.Contains(name, "generative") +} + +func (m fakeModulesProvider) IsMultiVector(name string) bool { + return strings.Contains(name, "colbert") +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/property.go b/platform/dbops/binaries/weaviate-src/usecases/schema/property.go new file mode 100644 index 0000000000000000000000000000000000000000..5fdf9b8994f72a268c32f586d27b498b9627d317 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/property.go @@ -0,0 +1,157 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "context" + "fmt" + "strings" + + clusterSchema "github.com/weaviate/weaviate/cluster/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +// AddClassProperty it is upsert operation. it adds properties to a class and updates +// existing properties if the merge bool passed true. +func (h *Handler) AddClassProperty(ctx context.Context, principal *models.Principal, + class *models.Class, className string, merge bool, newProps ...*models.Property, +) (*models.Class, uint64, error) { + if err := h.Authorizer.Authorize(ctx, principal, authorization.UPDATE, authorization.CollectionsMetadata(className)...); err != nil { + return nil, 0, err + } + + if err := h.Authorizer.Authorize(ctx, principal, authorization.READ, authorization.CollectionsMetadata(className)...); err != nil { + return nil, 0, err + } + classGetterWithAuth := func(name string) (*models.Class, error) { + if err := h.Authorizer.Authorize(ctx, principal, authorization.READ, authorization.CollectionsMetadata(name)...); err != nil { + return nil, err + } + return h.schemaReader.ReadOnlyClass(name), nil + } + + if class == nil { + return nil, 0, fmt.Errorf("class is nil: %w", ErrNotFound) + } + + if len(newProps) == 0 { + return nil, 0, nil + } + + // validate new props + for _, prop := range newProps { + if prop.Name == "" { + return nil, 0, fmt.Errorf("property must contain name") + } + prop.Name = schema.LowercaseFirstLetter(prop.Name) + if prop.DataType == nil { + return nil, 0, fmt.Errorf("property must contain dataType") + } + } + + if err := h.setNewPropDefaults(class, newProps...); err != nil { + return nil, 0, err + } + + existingNames := make(map[string]bool, len(class.Properties)) + if !merge { + for _, prop := range class.Properties { + existingNames[strings.ToLower(prop.Name)] = true + } + } + + if err := h.validateProperty(class, existingNames, false, classGetterWithAuth, newProps...); err != nil { + return nil, 0, err + } + + // TODO-RAFT use UpdateProperty() for adding/merging property when index idempotence exists + // revisit when index idempotence exists and/or allowing merging properties on index. + props := schema.DedupProperties(class.Properties, newProps) + if len(props) == 0 { + return class, 0, nil + } + + migratePropertySettings(props...) + + class.Properties = clusterSchema.MergeProps(class.Properties, props) + version, err := h.schemaManager.AddProperty(ctx, class.Class, props...) + if err != nil { + return nil, 0, err + } + return class, version, err +} + +// DeleteClassProperty from existing Schema +func (h *Handler) DeleteClassProperty(ctx context.Context, principal *models.Principal, + class string, property string, +) error { + err := h.Authorizer.Authorize(ctx, principal, authorization.UPDATE, authorization.CollectionsMetadata(class)...) + if err != nil { + return err + } + + return fmt.Errorf("deleting a property is currently not supported, see " + + "https://github.com/weaviate/weaviate/issues/973 for details") + // return h.deleteClassProperty(ctx, class, property, kind.Action) +} + +func (h *Handler) setNewPropDefaults(class *models.Class, props ...*models.Property) error { + setPropertyDefaults(props...) + h.moduleConfig.SetSinglePropertyDefaults(class, props...) + return nil +} + +func (h *Handler) validatePropModuleConfig(class *models.Class, props ...*models.Property) error { + configuredVectorizers := map[string]struct{}{} + if class.Vectorizer != "" { + configuredVectorizers[class.Vectorizer] = struct{}{} + } + + for targetVector, cfg := range class.VectorConfig { + if vm, ok := cfg.Vectorizer.(map[string]interface{}); ok && len(vm) == 1 { + for vectorizer := range vm { + configuredVectorizers[vectorizer] = struct{}{} + } + } else if len(vm) > 1 { + return fmt.Errorf("vector index %q has multiple vectorizers", targetVector) + } + } + + for _, prop := range props { + if prop.ModuleConfig == nil { + continue + } + + modconfig, ok := prop.ModuleConfig.(map[string]interface{}) + if !ok { + return fmt.Errorf("%v property config invalid", prop.Name) + } + + for vectorizer, cfg := range modconfig { + if err := h.vectorizerValidator.ValidateVectorizer(vectorizer); err != nil { + continue + } + + if _, ok := configuredVectorizers[vectorizer]; !ok { + return fmt.Errorf("vectorizer %q not configured for any of target vectors", vectorizer) + } + + if _, ok := cfg.(map[string]interface{}); !ok { + return fmt.Errorf("vectorizer config for vectorizer %q not of type map[string]interface{}", vectorizer) + } + } + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/property_test.go b/platform/dbops/binaries/weaviate-src/usecases/schema/property_test.go new file mode 100644 index 0000000000000000000000000000000000000000..9ae71a32dc41789a4fc91b0f7351324860e6dd71 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/property_test.go @@ -0,0 +1,783 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "context" + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/tokenizer" +) + +func TestHandler_AddProperty(t *testing.T) { + ctx := context.Background() + + t.Run("adds property of each data type", func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + + class := models.Class{ + Class: "NewClass", + Vectorizer: "none", + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + fakeSchemaManager.On("AddClass", mock.Anything, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + _, _, err := handler.AddClass(ctx, nil, &class) + require.NoError(t, err) + dataTypes := []schema.DataType{ + schema.DataTypeInt, + schema.DataTypeIntArray, + schema.DataTypeNumber, + schema.DataTypeNumberArray, + schema.DataTypeText, + schema.DataTypeTextArray, + schema.DataTypeBoolean, + schema.DataTypeBooleanArray, + schema.DataTypeDate, + schema.DataTypeDateArray, + schema.DataTypeUUID, + schema.DataTypeUUIDArray, + schema.DataTypeBlob, + schema.DataTypeGeoCoordinates, + schema.DataTypePhoneNumber, + schema.DataTypeString, + schema.DataTypeStringArray, + } + + t.Run("adds properties", func(t *testing.T) { + for _, dt := range dataTypes { + t.Run(dt.AsName(), func(t *testing.T) { + prop := &models.Property{ + Name: dt.AsName(), + DataType: dt.PropString(), + } + fakeSchemaManager.On("AddProperty", class.Class, []*models.Property{prop}).Return(nil) + _, _, err := handler.AddClassProperty(ctx, nil, &class, class.Class, false, prop) + require.NoError(t, err) + }) + } + fakeSchemaManager.AssertExpectations(t) + }) + }) + + t.Run("fails adding property of existing name", func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + + class := models.Class{ + Class: "NewClass", + Properties: []*models.Property{ + { + Name: "my_prop", + DataType: schema.DataTypeText.PropString(), + }, + { + Name: "otherProp", + DataType: schema.DataTypeText.PropString(), + }, + }, + Vectorizer: "none", + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + fakeSchemaManager.On("AddClass", mock.Anything, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + _, _, err := handler.AddClass(ctx, nil, &class) + require.NoError(t, err) + + existingNames := []string{ + "my_prop", // lowercase, same casing + "my_Prop", // lowercase, different casing + "otherProp", // mixed case, same casing + "otherprop", // mixed case, all lower + "OtHerProP", // mixed case, other casing + } + + t.Run("adding properties", func(t *testing.T) { + for _, propName := range existingNames { + t.Run(propName, func(t *testing.T) { + prop := &models.Property{ + Name: propName, + DataType: schema.DataTypeText.PropString(), + } + _, _, err := handler.AddClassProperty(ctx, nil, &class, class.Class, false, prop) + require.ErrorContains(t, err, "conflict for property") + require.ErrorContains(t, err, "already in use or provided multiple times") + }) + } + fakeSchemaManager.AssertNotCalled(t, "AddProperty", mock.Anything, mock.Anything) + }) + }) +} + +// TestHandler_AddProperty_Object verifies that we can add properties on class with the Object and ObjectArray type. +// This test is different than TestHandler_AddProperty because Object and ObjectArray require nested properties to be validated. +func TestHandler_AddProperty_Object(t *testing.T) { + ctx := context.Background() + + t.Run("adds property of each object data type", func(t *testing.T) { + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + + class := models.Class{ + Class: "NewClass", + Vectorizer: "none", + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + fakeSchemaManager.On("AddClass", mock.Anything, mock.Anything).Return(nil) + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil) + _, _, err := handler.AddClass(ctx, nil, &class) + require.NoError(t, err) + dataTypes := []schema.DataType{ + schema.DataTypeObject, + schema.DataTypeObjectArray, + } + + t.Run("adds properties", func(t *testing.T) { + for _, dt := range dataTypes { + t.Run(dt.AsName(), func(t *testing.T) { + prop := &models.Property{ + Name: dt.AsName(), + DataType: dt.PropString(), + NestedProperties: []*models.NestedProperty{{Name: "test", DataType: schema.DataTypeInt.PropString()}}, + } + fakeSchemaManager.On("AddProperty", class.Class, []*models.Property{prop}).Return(nil) + _, _, err := handler.AddClassProperty(ctx, nil, &class, class.Class, false, prop) + require.NoError(t, err) + }) + } + fakeSchemaManager.AssertExpectations(t) + }) + }) +} + +func TestHandler_AddProperty_Tokenization(t *testing.T) { + ctx := context.Background() + + class := models.Class{ + Class: "NewClass", + Vectorizer: "none", + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + + type testCase struct { + dataType schema.DataType + tokenization string + expectedTokenization string + expectedErrContains []string + } + + runTestCases := func(t *testing.T, testCases []testCase) { + for _, tc := range testCases { + // Set up schema independently for each test + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + fakeSchemaManager.On("ReadOnlyClass", mock.Anything, mock.Anything).Return(&class) + + strTokenization := "empty" + if tc.tokenization != "" { + strTokenization = tc.tokenization + } + propName := fmt.Sprintf("%s_%s", tc.dataType.AsName(), strTokenization) + + t.Run(propName, func(t *testing.T) { + prop := &models.Property{ + Name: propName, + DataType: tc.dataType.PropString(), + Tokenization: tc.tokenization, + } + + // If the dataType is a nested data type ensure we pass validation by adding a dummy nested property + if tc.dataType == schema.DataTypeObject || tc.dataType == schema.DataTypeObjectArray { + prop.NestedProperties = []*models.NestedProperty{{Name: "test", DataType: schema.DataTypeInt.PropString()}} + } + + // If we expect no error, assert that the call is made with the property, else assert that no call was made to add the + // property + fakeSchemaManager.On("ReadOnlyClass", mock.Anything, mock.Anything).Return(&class) + if len(tc.expectedErrContains) == 0 { + fakeSchemaManager.On("AddProperty", class.Class, []*models.Property{prop}).Return(nil) + } else { + fakeSchemaManager.AssertNotCalled(t, "AddProperty", mock.Anything, mock.Anything) + } + + _, _, err := handler.AddClassProperty(ctx, nil, &class, class.Class, false, prop) + if len(tc.expectedErrContains) == 0 { + require.NoError(t, err) + } else { + for i := range tc.expectedErrContains { + assert.ErrorContains(t, err, tc.expectedErrContains[i]) + } + } + }) + } + } + + t.Run("text/text[]", func(t *testing.T) { + dataTypes := []schema.DataType{schema.DataTypeText, schema.DataTypeTextArray} + + testCases := []testCase{} + for _, dataType := range dataTypes { + // all tokenizations + for _, tokenization := range tokenizer.Tokenizations { + testCases = append(testCases, testCase{ + dataType: dataType, + tokenization: tokenization, + expectedTokenization: tokenization, + }) + } + + // empty tokenization + testCases = append(testCases, testCase{ + dataType: dataType, + tokenization: "", + expectedTokenization: models.PropertyTokenizationWord, + }) + + // non-existent tokenization + testCases = append(testCases, testCase{ + dataType: dataType, + tokenization: "nonExistent", + expectedErrContains: []string{ + "tokenization 'nonExistent' is not allowed for data type", + dataType.String(), + }, + }) + } + + runTestCases(t, testCases) + }) + + t.Run("[deprecated string] string/string[]", func(t *testing.T) { + dataTypes := []schema.DataType{schema.DataTypeString, schema.DataTypeStringArray} + + testCases := []testCase{} + for _, dataType := range dataTypes { + // all tokenizations + for _, tokenization := range tokenizer.Tokenizations { + switch tokenization { + case models.PropertyTokenizationWord: + testCases = append(testCases, testCase{ + dataType: dataType, + tokenization: tokenization, + expectedTokenization: models.PropertyTokenizationWhitespace, + }) + case models.PropertyTokenizationField: + testCases = append(testCases, testCase{ + dataType: dataType, + tokenization: tokenization, + expectedTokenization: models.PropertyTokenizationField, + }) + default: + testCases = append(testCases, testCase{ + dataType: dataType, + tokenization: tokenization, + expectedErrContains: []string{ + "is not allowed for data type", + tokenization, + dataType.String(), + }, + }) + } + } + + // empty tokenization + testCases = append(testCases, testCase{ + dataType: dataType, + tokenization: "", + expectedTokenization: models.PropertyTokenizationWhitespace, + }) + + // non-existent tokenization + testCases = append(testCases, testCase{ + dataType: dataType, + tokenization: "nonExistent", + expectedErrContains: []string{ + "tokenization 'nonExistent' is not allowed for data type", + dataType.String(), + }, + }) + } + + runTestCases(t, testCases) + }) + + t.Run("non text/text[]", func(t *testing.T) { + dataTypes := []schema.DataType{} + for _, dt := range schema.PrimitiveDataTypes { + switch dt { + case schema.DataTypeText, schema.DataTypeTextArray: + // skip + default: + dataTypes = append(dataTypes, dt) + } + } + + testCases := []testCase{} + for _, dataType := range dataTypes { + // all tokenizations + for _, tokenization := range tokenizer.Tokenizations { + testCases = append(testCases, testCase{ + dataType: dataType, + tokenization: tokenization, + expectedErrContains: []string{ + "tokenization is not allowed for data type", + dataType.String(), + }, + }) + } + + // empty tokenization + testCases = append(testCases, testCase{ + dataType: dataType, + tokenization: "", + expectedTokenization: "", + }) + + // non-existent tokenization + testCases = append(testCases, testCase{ + dataType: dataType, + tokenization: "nonExistent", + expectedErrContains: []string{ + "tokenization is not allowed for data type", + dataType.String(), + }, + }) + } + + runTestCases(t, testCases) + }) + + t.Run("object/object[]", func(t *testing.T) { + dataTypes := schema.NestedDataTypes + + testCases := []testCase{} + for _, dataType := range dataTypes { + // all tokenizations + for _, tokenization := range tokenizer.Tokenizations { + testCases = append(testCases, testCase{ + dataType: dataType, + tokenization: tokenization, + expectedErrContains: []string{ + "tokenization is not allowed for object/object[] data types", + }, + }) + } + + // empty tokenization + testCases = append(testCases, testCase{ + dataType: dataType, + tokenization: "", + expectedTokenization: "", + }) + + // non-existent tokenization + testCases = append(testCases, testCase{ + dataType: dataType, + tokenization: "nonExistent", + expectedErrContains: []string{ + "tokenization is not allowed for object/object[] data types", + }, + }) + } + + runTestCases(t, testCases) + }) +} + +func TestHandler_AddProperty_Reference_Tokenization(t *testing.T) { + ctx := context.Background() + + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + + class := models.Class{ + Class: "NewClass", + Vectorizer: "none", + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + refClass := models.Class{ + Class: "RefClass", + Vectorizer: "none", + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + } + fakeSchemaManager.On("ReadOnlyClass", mock.Anything, mock.Anything).Return(&refClass) + fakeSchemaManager.On("AddClass", mock.Anything, mock.Anything).Return(nil).Twice() + fakeSchemaManager.On("QueryCollectionsCount").Return(0, nil).Twice() + fakeSchemaManager.On("ReadOnlyClass", mock.Anything, mock.Anything).Return(&class) + _, _, err := handler.AddClass(ctx, nil, &class) + require.NoError(t, err) + _, _, err = handler.AddClass(ctx, nil, &refClass) + require.NoError(t, err) + + dataType := []string{refClass.Class} + + // all tokenizations + for _, tokenization := range tokenizer.Tokenizations { + propName := fmt.Sprintf("ref_%s", tokenization) + t.Run(propName, func(t *testing.T) { + _, _, err := handler.AddClassProperty(ctx, nil, &class, class.Class, false, + &models.Property{ + Name: propName, + DataType: dataType, + Tokenization: tokenization, + }) + + assert.ErrorContains(t, err, "tokenization is not allowed for reference data type") + }) + } + + fakeSchemaManager.AssertNotCalled(t, "AddProperty", mock.Anything, mock.Anything) + + // non-existent tokenization + propName := "ref_nonExistent" + t.Run(propName, func(t *testing.T) { + _, _, err := handler.AddClassProperty(ctx, nil, &class, class.Class, false, + &models.Property{ + Name: propName, + DataType: dataType, + Tokenization: "nonExistent", + }) + + assert.ErrorContains(t, err, "tokenization is not allowed for reference data type") + }) + + fakeSchemaManager.AssertNotCalled(t, "AddProperty", mock.Anything, mock.Anything) + + // empty tokenization + propName = "ref_empty" + t.Run(propName, func(t *testing.T) { + fakeSchemaManager.On("AddProperty", mock.Anything, mock.Anything).Return(nil) + _, _, err := handler.AddClassProperty(ctx, nil, &class, class.Class, false, + &models.Property{ + Name: propName, + DataType: dataType, + Tokenization: "", + }) + + require.NoError(t, err) + fakeSchemaManager.AssertExpectations(t) + }) +} + +func Test_Validation_PropertyTokenization(t *testing.T) { + type testCase struct { + name string + tokenization string + propertyDataType schema.PropertyDataType + expectedErrMsg string + } + + runTestCases := func(t *testing.T, testCases []testCase) { + handler, _ := newTestHandler(t, &fakeDB{}) + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + err := handler.validatePropertyTokenization(tc.tokenization, tc.propertyDataType) + if tc.expectedErrMsg == "" { + assert.Nil(t, err) + } else { + assert.NotNil(t, err) + assert.EqualError(t, err, tc.expectedErrMsg) + } + }) + } + } + + t.Run("validates text/textArray and all tokenizations", func(t *testing.T) { + testCases := []testCase{} + for _, dataType := range []schema.DataType{ + schema.DataTypeText, schema.DataTypeTextArray, + } { + for _, tokenization := range tokenizer.Tokenizations { + testCases = append(testCases, testCase{ + name: fmt.Sprintf("%s + '%s'", dataType, tokenization), + propertyDataType: newFakePrimitivePDT(dataType), + tokenization: tokenization, + expectedErrMsg: "", + }) + } + + for _, tokenization := range []string{"non_existing", ""} { + testCases = append(testCases, testCase{ + name: fmt.Sprintf("%s + '%s'", dataType, tokenization), + propertyDataType: newFakePrimitivePDT(dataType), + tokenization: tokenization, + expectedErrMsg: fmt.Sprintf("tokenization '%s' is not allowed for data type '%s'", tokenization, dataType), + }) + } + } + + runTestCases(t, testCases) + }) + + t.Run("validates non text/textArray and all tokenizations", func(t *testing.T) { + testCases := []testCase{} + for _, dataType := range schema.PrimitiveDataTypes { + switch dataType { + case schema.DataTypeText, schema.DataTypeTextArray: + continue + default: + testCases = append(testCases, testCase{ + name: fmt.Sprintf("%s + ''", dataType), + propertyDataType: newFakePrimitivePDT(dataType), + tokenization: "", + expectedErrMsg: "", + }) + + for _, tokenization := range append(tokenizer.Tokenizations, "non_existing") { + testCases = append(testCases, testCase{ + name: fmt.Sprintf("%s + '%s'", dataType, tokenization), + propertyDataType: newFakePrimitivePDT(dataType), + tokenization: tokenization, + expectedErrMsg: fmt.Sprintf("tokenization is not allowed for data type '%s'", dataType), + }) + } + } + } + + runTestCases(t, testCases) + }) + + t.Run("validates nested datatype and all tokenizations", func(t *testing.T) { + testCases := []testCase{} + for _, dataType := range schema.NestedDataTypes { + testCases = append(testCases, testCase{ + name: fmt.Sprintf("%s + ''", dataType), + propertyDataType: newFakeNestedPDT(dataType), + tokenization: "", + expectedErrMsg: "", + }) + + for _, tokenization := range append(tokenizer.Tokenizations, "non_existent") { + testCases = append(testCases, testCase{ + name: fmt.Sprintf("%s + '%s'", dataType, tokenization), + propertyDataType: newFakeNestedPDT(dataType), + tokenization: tokenization, + expectedErrMsg: "tokenization is not allowed for object/object[] data types", + }) + } + } + + runTestCases(t, testCases) + }) + + t.Run("validates ref datatype (empty) and all tokenizations", func(t *testing.T) { + testCases := []testCase{} + + testCases = append(testCases, testCase{ + name: "ref + ''", + propertyDataType: newFakePrimitivePDT(""), + tokenization: "", + expectedErrMsg: "", + }) + + for _, tokenization := range append(tokenizer.Tokenizations, "non_existing") { + testCases = append(testCases, testCase{ + name: fmt.Sprintf("ref + '%s'", tokenization), + propertyDataType: newFakePrimitivePDT(""), + tokenization: tokenization, + expectedErrMsg: "tokenization is not allowed for reference data type", + }) + } + + runTestCases(t, testCases) + }) + + t.Run("[deprecated string] validates string/stringArray and all tokenizations", func(t *testing.T) { + testCases := []testCase{} + for _, dataType := range []schema.DataType{ + schema.DataTypeString, schema.DataTypeStringArray, + } { + for _, tokenization := range append(tokenizer.Tokenizations, "non_existing") { + switch tokenization { + case models.PropertyTokenizationWord, models.PropertyTokenizationField: + testCases = append(testCases, testCase{ + name: fmt.Sprintf("%s + %s", dataType, tokenization), + propertyDataType: newFakePrimitivePDT(dataType), + tokenization: tokenization, + expectedErrMsg: "", + }) + default: + testCases = append(testCases, testCase{ + name: fmt.Sprintf("%s + %s", dataType, tokenization), + propertyDataType: newFakePrimitivePDT(dataType), + tokenization: tokenization, + expectedErrMsg: fmt.Sprintf("tokenization '%s' is not allowed for data type '%s'", tokenization, dataType), + }) + } + } + } + + runTestCases(t, testCases) + }) +} + +func Test_Validation_PropertyIndexing(t *testing.T) { + vFalse := false + vTrue := true + + handler, _ := newTestHandler(t, &fakeDB{}) + + t.Run("validates indexInverted + indexFilterable + indexSearchable combinations", func(t *testing.T) { + type testCase struct { + propName string + dataType schema.DataType + indexInverted *bool + indexFilterable *bool + indexSearchable *bool + expectedErrContains []string + } + + boolPtrToStr := func(ptr *bool) string { + if ptr == nil { + return "nil" + } + return fmt.Sprintf("%v", *ptr) + } + + allBoolPtrs := []*bool{nil, &vFalse, &vTrue} + dataTypes := append([]schema.DataType{}, schema.PrimitiveDataTypes...) + dataTypes = append(dataTypes, schema.NestedDataTypes...) + + testCases := []testCase{} + for _, dataType := range dataTypes { + for _, inverted := range allBoolPtrs { + for _, filterable := range allBoolPtrs { + for _, searchable := range allBoolPtrs { + propName := fmt.Sprintf("%s_inverted_%s_filterable_%s_searchable_%s", + dataType.AsName(), boolPtrToStr(inverted), boolPtrToStr(filterable), boolPtrToStr(searchable)) + + // inverted can not be set when filterable or/and searchable is already set + if inverted != nil && (filterable != nil || searchable != nil) { + testCases = append(testCases, testCase{ + propName: propName, + dataType: dataType, + indexInverted: inverted, + indexFilterable: filterable, + indexSearchable: searchable, + expectedErrContains: []string{ + "`indexInverted` is deprecated and can not be set together with `indexFilterable`, `indexSearchable` or `indexRangeFilters`", + }, + }) + continue + } + // searchable=true can be set only for text/text[] + if searchable != nil && *searchable { + switch dataType { + case schema.DataTypeText, schema.DataTypeTextArray: + // ignore + default: + testCases = append(testCases, testCase{ + propName: propName, + dataType: dataType, + indexInverted: inverted, + indexFilterable: filterable, + indexSearchable: searchable, + expectedErrContains: []string{ + // TODO should be changed as on master + "`indexSearchable` is allowed only for text/text[] data types. For other data types set false or leave empty", + // "`indexSearchable` is not allowed for other than text/text[] data types" , + }, + }) + continue + } + } + + testCases = append(testCases, testCase{ + propName: propName, + dataType: dataType, + indexInverted: inverted, + indexFilterable: filterable, + indexSearchable: searchable, + }) + } + } + } + } + + for _, tc := range testCases { + t.Run(tc.propName, func(t *testing.T) { + err := handler.validatePropertyIndexing(&models.Property{ + Name: tc.propName, + DataType: tc.dataType.PropString(), + IndexInverted: tc.indexInverted, + IndexFilterable: tc.indexFilterable, + IndexSearchable: tc.indexSearchable, + }) + + if len(tc.expectedErrContains) == 0 { + require.NoError(t, err) + } else { + for i := range tc.expectedErrContains { + assert.ErrorContains(t, err, tc.expectedErrContains[i]) + } + } + }) + } + }) +} + +type fakePropertyDataType struct { + primitiveDataType schema.DataType + nestedDataType schema.DataType +} + +func newFakePrimitivePDT(primitiveDataType schema.DataType) schema.PropertyDataType { + return &fakePropertyDataType{primitiveDataType: primitiveDataType} +} + +func newFakeNestedPDT(nestedDataType schema.DataType) schema.PropertyDataType { + return &fakePropertyDataType{nestedDataType: nestedDataType} +} + +func (pdt *fakePropertyDataType) Kind() schema.PropertyKind { + if pdt.IsPrimitive() { + return schema.PropertyKindPrimitive + } + if pdt.IsNested() { + return schema.PropertyKindNested + } + return schema.PropertyKindRef +} + +func (pdt *fakePropertyDataType) IsPrimitive() bool { + return pdt.primitiveDataType != "" +} + +func (pdt *fakePropertyDataType) AsPrimitive() schema.DataType { + return pdt.primitiveDataType +} + +func (pdt *fakePropertyDataType) IsNested() bool { + return pdt.nestedDataType != "" +} + +func (pdt *fakePropertyDataType) AsNested() schema.DataType { + return pdt.nestedDataType +} + +func (pdt *fakePropertyDataType) IsReference() bool { + return !(pdt.IsPrimitive() || pdt.IsNested()) +} + +func (pdt *fakePropertyDataType) Classes() []schema.ClassName { + if pdt.IsReference() { + return []schema.ClassName{} + } + return nil +} + +func (pdt *fakePropertyDataType) ContainsClass(name schema.ClassName) bool { + return false +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/ref_finder.go b/platform/dbops/binaries/weaviate-src/usecases/schema/ref_finder.go new file mode 100644 index 0000000000000000000000000000000000000000..fdc11aaaf33056e03d27c31e4bbc22f67f7a1d6d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/ref_finder.go @@ -0,0 +1,154 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "sort" + + "github.com/weaviate/weaviate/entities/filters" + "github.com/weaviate/weaviate/entities/models" + libschema "github.com/weaviate/weaviate/entities/schema" +) + +// RefFinder is a helper that lists classes and their possible paths to to a +// desired target class. +// +// For example if the target class is "car". It might list: +// - Person, drives, Car +// - Person, owns, Car +// - Person, friendsWith, Person, drives, Car +// etc. +// +// It will stop at a preconfigured depth limit, to avoid infinite results, such +// as: +// - Person, friendsWith, Person, friendsWith, Person, ..., drives Car +type RefFinder struct { + schemaGetter schemaGetterForRefFinder + depthLimit int +} + +// NewRefFinder with SchemaGetter and depth limit +func NewRefFinder(getter schemaGetterForRefFinder, depthLimit int) *RefFinder { + return &RefFinder{ + schemaGetter: getter, + depthLimit: depthLimit, + } +} + +type schemaGetterForRefFinder interface { + GetSchemaSkipAuth() libschema.Schema +} + +func (r *RefFinder) Find(className libschema.ClassName) []filters.Path { + schema := r.schemaGetter.GetSchemaSkipAuth() + + var classes []*models.Class + if schema.Objects != nil { + classes = append(classes, schema.Objects.Classes...) + } + + return r.findInClassList(className, classes, schema) +} + +func (r *RefFinder) findInClassList(needle libschema.ClassName, classes []*models.Class, + schema libschema.Schema, +) []filters.Path { + var out []filters.Path + + for _, class := range classes { + path, ok := r.hasRefTo(needle, class, schema, 1) + if !ok { + continue + } + + out = append(out, path...) + } + + return r.sortByPathLen(out) +} + +func (r *RefFinder) hasRefTo(needle libschema.ClassName, class *models.Class, + schema libschema.Schema, depth int, +) ([]filters.Path, bool) { + var out []filters.Path + + if depth > r.depthLimit { + return nil, false + } + + for _, prop := range class.Properties { + dt, err := schema.FindPropertyDataType(prop.DataType) + if err != nil { + // silently ignore, maybe the property was deleted in the meantime + continue + } + + if !dt.IsReference() { + continue + } + + for _, haystack := range dt.Classes() { + refs := r.refsPerClass(needle, class, prop.Name, haystack, schema, depth) + out = append(out, refs...) + } + } + + return out, len(out) > 0 +} + +func (r *RefFinder) refsPerClass(needle libschema.ClassName, class *models.Class, + propName string, haystack libschema.ClassName, schema libschema.Schema, + depth int, +) []filters.Path { + if haystack == needle { + // direct match + return []filters.Path{ + { + Class: libschema.ClassName(class.Class), + Property: libschema.PropertyName(propName), + Child: &filters.Path{ + Class: needle, + Property: "id", + }, + }, + } + } + + // could still be an indirect (recursive) match + innerClass := schema.FindClassByName(haystack) + if innerClass == nil { + return nil + } + paths, ok := r.hasRefTo(needle, innerClass, schema, depth+1) + if !ok { + return nil + } + + var out []filters.Path + for _, path := range paths { + out = append(out, filters.Path{ + Class: libschema.ClassName(class.Class), + Property: libschema.PropertyName(propName), + Child: &path, + }) + } + + return out +} + +func (r *RefFinder) sortByPathLen(in []filters.Path) []filters.Path { + sort.Slice(in, func(i, j int) bool { + return len(in[i].Slice()) < len(in[j].Slice()) + }) + + return in +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/ref_finder_test.go b/platform/dbops/binaries/weaviate-src/usecases/schema/ref_finder_test.go new file mode 100644 index 0000000000000000000000000000000000000000..2c32b1cd35d4d12b14c1af1dd6a3acabaac2ab7e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/ref_finder_test.go @@ -0,0 +1,242 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/filters" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" +) + +func TestRefFinder(t *testing.T) { + t.Run("on an empty schema", func(t *testing.T) { + s := schema.Schema{ + Objects: &models.Schema{ + Classes: nil, + }, + } + + getter := &fakeSchemaGetterForRefFinder{s} + res := NewRefFinder(getter, 3).Find("Car") + + assert.Len(t, res, 0) + }) + + t.Run("on an schema containing only the target class and unrelated classes", func(t *testing.T) { + s := schema.Schema{ + Objects: &models.Schema{ + Classes: []*models.Class{ + { + Class: "Car", + Properties: []*models.Property{ + { + Name: "model", + DataType: schema.DataTypeText.PropString(), + }, + }, + }, + { + Class: "Tree", + Properties: []*models.Property{ + { + Name: "kind", + DataType: schema.DataTypeText.PropString(), + }, + }, + }, + }, + }, + } + + getter := &fakeSchemaGetterForRefFinder{s} + res := NewRefFinder(getter, 3).Find("Car") + + assert.Len(t, res, 0) + }) + + t.Run("on a schema containing a single level ref to the target", func(t *testing.T) { + s := schema.Schema{ + Objects: &models.Schema{ + Classes: []*models.Class{ + { + Class: "Car", + Properties: []*models.Property{ + { + Name: "model", + DataType: schema.DataTypeText.PropString(), + }, + }, + }, + { + Class: "Tree", + Properties: []*models.Property{ + { + Name: "kind", + DataType: schema.DataTypeText.PropString(), + }, + }, + }, + { + Class: "Drive", + Properties: []*models.Property{ + { + Name: "destination", + DataType: schema.DataTypeText.PropString(), + }, + }, + }, + { + Class: "Drive", + Properties: []*models.Property{ + { + Name: "destination", + DataType: schema.DataTypeText.PropString(), + }, + { + Name: "vehicle", + DataType: []string{"Car"}, + }, + }, + }, + }, + }, + } + + getter := &fakeSchemaGetterForRefFinder{s} + res := NewRefFinder(getter, 3).Find("Car") + + assert.Equal(t, []filters.Path{ + { + Class: "Drive", + Property: "vehicle", + Child: &filters.Path{ + Class: "Car", + Property: "id", + }, + }, + }, res) + }) + + t.Run("on a schema containing a single level and a multi level ref to the target", func(t *testing.T) { + s := schema.Schema{ + Objects: &models.Schema{ + Classes: []*models.Class{ + { + Class: "Dog", + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + { + Name: "hasOwner", + DataType: []string{"Person"}, + }, + }, + }, + { + Class: "Car", + Properties: []*models.Property{ + { + Name: "model", + DataType: schema.DataTypeText.PropString(), + }, + }, + }, + { + Class: "Person", + Properties: []*models.Property{ + { + Name: "name", + DataType: schema.DataTypeText.PropString(), + }, + { + Name: "travels", + DataType: []string{"Drive"}, + }, + { + Name: "hasPets", + DataType: []string{"Dog"}, + }, + }, + }, + { + Class: "Drive", + Properties: []*models.Property{ + { + Name: "destination", + DataType: schema.DataTypeText.PropString(), + }, + { + Name: "vehicle", + DataType: []string{"Car"}, + }, + }, + }, + }, + }, + } + + getter := &fakeSchemaGetterForRefFinder{s} + res := NewRefFinder(getter, 3).Find("Car") + + assert.Equal(t, []filters.Path{ + { + Class: "Drive", + Property: "vehicle", + Child: &filters.Path{ + Class: "Car", + Property: "id", + }, + }, + { + Class: "Person", + Property: "travels", + Child: &filters.Path{ + Class: "Drive", + Property: "vehicle", + Child: &filters.Path{ + Class: "Car", + Property: "id", + }, + }, + }, + { + Class: "Dog", + Property: "hasOwner", + Child: &filters.Path{ + Class: "Person", + Property: "travels", + Child: &filters.Path{ + Class: "Drive", + Property: "vehicle", + Child: &filters.Path{ + Class: "Car", + Property: "id", + }, + }, + }, + }, + }, res) + }) +} + +type fakeSchemaGetterForRefFinder struct { + schema schema.Schema +} + +func (f *fakeSchemaGetterForRefFinder) GetSchemaSkipAuth() schema.Schema { + return f.schema +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/schema_comparison.go b/platform/dbops/binaries/weaviate-src/usecases/schema/schema_comparison.go new file mode 100644 index 0000000000000000000000000000000000000000..e4fb77654b0de975777ce9357e1edd2967a67e25 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/schema_comparison.go @@ -0,0 +1,267 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "bytes" + "encoding/json" + "fmt" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/usecases/sharding" +) + +// Diff creates human-readable information about the difference in two schemas, +// returns a len=0 slice if schemas are identical +func Diff( + leftLabel string, left *State, + rightLabel string, right *State, +) []string { + var msgs []string + + if len(left.ObjectSchema.Classes) != len(right.ObjectSchema.Classes) { + msg := fmt.Sprintf("%s has %d classes, but %s has %d classes", + leftLabel, len(left.ObjectSchema.Classes), + rightLabel, len(right.ObjectSchema.Classes)) + msgs = append(msgs, msg) + } + + leftClasses := map[string]*models.Class{} + rightClasses := map[string]*models.Class{} + + for _, class := range right.ObjectSchema.Classes { + rightClasses[class.Class] = class + } + + for _, classLeft := range left.ObjectSchema.Classes { + className := classLeft.Class + leftClasses[className] = classLeft + if classRight, ok := rightClasses[className]; !ok { + msg := fmt.Sprintf("class %s exists in %s, but not in %s", + className, leftLabel, rightLabel) + msgs = append(msgs, msg) + } else { + cc := classComparison{ + left: classLeft, + right: classRight, + leftLabel: leftLabel, + rightLabel: rightLabel, + } + msgs = append(msgs, cc.diff()...) + + ssc := shardingStateComparison{ + left: left.ShardingState[className], + right: right.ShardingState[className], + leftLabel: leftLabel, + rightLabel: rightLabel, + className: className, + } + msgs = append(msgs, ssc.diff()...) + } + } + + for className := range rightClasses { + if _, ok := leftClasses[className]; !ok { + msg := fmt.Sprintf("class %s exists in %s, but not in %s", + className, rightLabel, leftLabel) + msgs = append(msgs, msg) + } + } + + return msgs +} + +type classComparison struct { + left, right *models.Class + leftLabel, rightLabel string + msgs []string +} + +func (cc *classComparison) addMsg(msg ...string) { + cc.msgs = append(cc.msgs, msg...) +} + +func (cc *classComparison) diff() []string { + lj, _ := json.Marshal(cc.left) + rj, _ := json.Marshal(cc.right) + + if bytes.Equal(lj, rj) { + // classes are identical, we are done + return nil + } + + // classes are not identical, log this fact, then dig deeper to find the diff + msg := fmt.Sprintf("class %s exists in both, but is not identical: "+ + "size %d vs %d", cc.left.Class, len(lj), len(rj)) + cc.addMsg(msg) + + pc := propsComparison{ + left: cc.left.Properties, + right: cc.right.Properties, + leftLabel: cc.leftLabel, + rightLabel: cc.rightLabel, + className: cc.left.Class, + } + cc.addMsg(pc.diff()...) + + ccc := classConfigComparison{ + left: cc.left, + right: cc.right, + leftLabel: cc.leftLabel, + rightLabel: cc.rightLabel, + className: cc.left.Class, + } + cc.addMsg(ccc.diff()...) + + return cc.msgs +} + +type propsComparison struct { + left, right []*models.Property + leftLabel, rightLabel string + className string + msgs []string +} + +func (pc *propsComparison) addMsg(msg ...string) { + pc.msgs = append(pc.msgs, msg...) +} + +func (pc *propsComparison) diff() []string { + containedLeft := map[string]*models.Property{} + containedRight := map[string]*models.Property{} + + for _, prop := range pc.left { + containedLeft[prop.Name] = prop + } + for _, prop := range pc.right { + if leftProp, ok := containedLeft[prop.Name]; !ok { + msg := fmt.Sprintf("class %s: property %s exists in %s, but not in %s", + pc.className, prop.Name, pc.rightLabel, pc.leftLabel) + pc.addMsg(msg) + } else { + pc.compareProp(leftProp, prop) + } + containedRight[prop.Name] = prop + } + + for _, prop := range pc.left { + if _, ok := containedRight[prop.Name]; !ok { + msg := fmt.Sprintf("class %s: property %s exists in %s, but not in %s", + pc.className, prop.Name, pc.leftLabel, pc.rightLabel) + pc.addMsg(msg) + } + } + + return pc.msgs +} + +func (pc *propsComparison) compareProp(left, right *models.Property) { + lj, _ := json.Marshal(left) + rj, _ := json.Marshal(right) + + if bytes.Equal(lj, rj) { + return + } + + msg := fmt.Sprintf("class %s: property %s: mismatch: %s has %s, but %s has %s", + pc.className, left.Name, pc.leftLabel, lj, pc.rightLabel, rj) + pc.addMsg(msg) +} + +type classConfigComparison struct { + left, right *models.Class + leftLabel, rightLabel string + className string + msgs []string +} + +func (ccc *classConfigComparison) addMsg(msg ...string) { + ccc.msgs = append(ccc.msgs, msg...) +} + +func (ccc *classConfigComparison) diff() []string { + ccc.compare(ccc.left.Description, ccc.right.Description, "description") + ccc.compare(ccc.left.InvertedIndexConfig, + ccc.right.InvertedIndexConfig, "inverted index config") + ccc.compare(ccc.left.ModuleConfig, + ccc.right.ModuleConfig, "module config") + ccc.compare(ccc.left.ReplicationConfig, + ccc.right.ReplicationConfig, "replication config") + ccc.compare(ccc.left.ShardingConfig, + ccc.right.ShardingConfig, "sharding config") + ccc.compare(ccc.left.VectorIndexConfig, + ccc.right.VectorIndexConfig, "vector index config") + ccc.compare(ccc.left.VectorIndexType, + ccc.right.VectorIndexType, "vector index type") + ccc.compare(ccc.left.Vectorizer, + ccc.right.Vectorizer, "vectorizer") + ccc.compare(ccc.left.VectorConfig, + ccc.right.VectorConfig, "vector config") + return ccc.msgs +} + +func (ccc *classConfigComparison) compare( + left, right any, label string, +) { + lj, _ := json.Marshal(left) + rj, _ := json.Marshal(right) + + if bytes.Equal(lj, rj) { + return + } + + msg := fmt.Sprintf("class %s: %s mismatch: %s has %s, but %s has %s", + ccc.className, label, ccc.leftLabel, lj, ccc.rightLabel, rj) + ccc.addMsg(msg) +} + +type shardingStateComparison struct { + left, right *sharding.State + leftLabel, rightLabel string + className string + msgs []string +} + +func (ssc *shardingStateComparison) addMsg(msg ...string) { + ssc.msgs = append(ssc.msgs, msg...) +} + +func (ssc *shardingStateComparison) diff() []string { + if ssc.left == nil && ssc.right != nil { + msg := fmt.Sprintf("class %s: missing sharding state in %s", + ssc.className, ssc.leftLabel) + ssc.addMsg(msg) + return ssc.msgs + } + + if ssc.left != nil && ssc.right == nil { + msg := fmt.Sprintf("class %s: missing sharding state in %s", + ssc.className, ssc.rightLabel) + ssc.addMsg(msg) + return ssc.msgs + } + + lj, _ := json.Marshal(ssc.left) + rj, _ := json.Marshal(ssc.right) + + if bytes.Equal(lj, rj) { + return ssc.msgs + } + + msg := fmt.Sprintf("class %s: sharding state mismatch: "+ + "%s has %s, but %s has %s", + ssc.className, ssc.leftLabel, lj, ssc.rightLabel, rj) + ssc.addMsg(msg) + + return ssc.msgs +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/schema_comparison_test.go b/platform/dbops/binaries/weaviate-src/usecases/schema/schema_comparison_test.go new file mode 100644 index 0000000000000000000000000000000000000000..0b85d991ccaba95577e1627759d3b8b0741429c0 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/schema_comparison_test.go @@ -0,0 +1,269 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/sharding" +) + +func Test_SchemaComparison_Identical(t *testing.T) { + left := &State{ + ShardingState: map[string]*sharding.State{ + "Foo": { + IndexID: "Foo", + }, + }, + + ObjectSchema: &models.Schema{ + Classes: []*models.Class{ + { + Class: "Foo", + Properties: []*models.Property{ + { + DataType: []string{"int"}, + Name: "prop_1", + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + }, + }, + } + + right := &State{ + ShardingState: map[string]*sharding.State{ + "Foo": { + IndexID: "Foo", + }, + }, + + ObjectSchema: &models.Schema{ + Classes: []*models.Class{ + { + Class: "Foo", + Properties: []*models.Property{ + { + DataType: []string{"int"}, + Name: "prop_1", + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + }, + }, + } + + assert.Len(t, Diff("left schema", left, "right schema", right), 0) +} + +func Test_SchemaComparison_MismatchInClasses(t *testing.T) { + left := &State{ + ShardingState: map[string]*sharding.State{ + "Foo": { + IndexID: "Foo", + }, + }, + + ObjectSchema: &models.Schema{ + Classes: []*models.Class{ + { + Class: "Foo", + Properties: []*models.Property{ + { + DataType: []string{"int"}, + Name: "prop_1", + }, + }, + ReplicationConfig: &models.ReplicationConfig{Factor: 1}, + }, + }, + }, + } + + right := &State{ + ShardingState: map[string]*sharding.State{ + "Foo": { + IndexID: "Foo", + }, + }, + + ObjectSchema: &models.Schema{ + Classes: []*models.Class{}, + }, + } + + msgs := Diff("left schema", left, "right schema", right) + assert.Greater(t, len(msgs), 0) + assert.Contains(t, msgs, "class Foo exists in left schema, but not in right schema") +} + +func Test_SchemaComparison_VariousMismatches(t *testing.T) { + left := &State{ + ShardingState: map[string]*sharding.State{ + "Foo": { + IndexID: "Foo", + ReplicationFactor: 1, + }, + "Foo2": { + Physical: map[string]sharding.Physical{ + "abcd": { + OwnsVirtual: []string{"v1"}, + }, + }, + ReplicationFactor: 1, + }, + "Foo4": { + Physical: map[string]sharding.Physical{ + "abcd": {}, + }, + ReplicationFactor: 1, + }, + }, + + ObjectSchema: &models.Schema{ + Classes: []*models.Class{ + { + Class: "Foo", + Properties: []*models.Property{ + { + DataType: []string{"int"}, + Name: "prop_1", + }, + { + DataType: []string{"text"}, + Name: "prop_2", + }, + { + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + Name: "prop_4", + }, + }, + Description: "foo", + InvertedIndexConfig: &models.InvertedIndexConfig{ + IndexPropertyLength: true, + }, + ModuleConfig: "bar", + ReplicationConfig: &models.ReplicationConfig{ + Factor: 7, + }, + ShardingConfig: map[string]interface{}{ + "desiredCount": 7, + }, + VectorIndexConfig: map[string]interface{}{ + "ef": 1000, + }, + VectorIndexType: "age-n-ass-double-u", + }, + {Class: "Foo2"}, + {Class: "Foo3"}, + {Class: "Foo4"}, + }, + }, + } + + right := &State{ + ShardingState: map[string]*sharding.State{ + "Foo": { + IndexID: "Foo", + ReplicationFactor: 1, + }, + "Foo2": { + Physical: map[string]sharding.Physical{ + "xyz": { + BelongsToNodes: []string{"n1"}, + }, + }, + ReplicationFactor: 1, + }, + "Foo3": { + Physical: map[string]sharding.Physical{ + "abcd": {}, + }, + ReplicationFactor: 1, + }, + }, + + ObjectSchema: &models.Schema{ + Classes: []*models.Class{ + { + Class: "Foo", + Properties: []*models.Property{ + { + DataType: []string{"int"}, + Name: "prop_1", + }, + { + DataType: []string{"bool"}, + Name: "prop_3", + }, + { + DataType: []string{"text"}, + Name: "prop_4", + }, + }, + InvertedIndexConfig: &models.InvertedIndexConfig{ + IndexTimestamps: true, + }, + ReplicationConfig: &models.ReplicationConfig{ + Factor: 8, + }, + Vectorizer: "gpt-9", + }, + {Class: "Foo2"}, + {Class: "Foo3"}, + {Class: "Foo4"}, + }, + }, + } + + actual := Diff("L", left, "R", right) + + expected := []string{ + "class Foo: property prop_2 exists in L, but not in R", + "class Foo: property prop_3 exists in R, but not in L", + "class Foo: property prop_4: mismatch: " + + "L has {\"dataType\":[\"text\"],\"name\":\"prop_4\",\"tokenization\":\"whitespace\"}, but " + + "R has {\"dataType\":[\"text\"],\"name\":\"prop_4\"}", + "class Foo: description mismatch: " + + "L has \"foo\", but R has \"\"", + "class Foo: inverted index config mismatch: " + + "L has {\"indexPropertyLength\":true}, " + + "but R has {\"indexTimestamps\":true}", + "class Foo: module config mismatch: " + + "L has \"bar\", but R has null", + "class Foo: replication config mismatch: " + + "L has {\"asyncEnabled\":false,\"factor\":7}, but R has {\"asyncEnabled\":false,\"factor\":8}", + "class Foo: sharding config mismatch: " + + "L has {\"desiredCount\":7}, but R has null", + "class Foo: vector index config mismatch: " + + "L has {\"ef\":1000}, but R has null", + "class Foo: vector index type mismatch: " + + "L has \"age-n-ass-double-u\", but R has \"\"", + "class Foo: vectorizer mismatch: " + + "L has \"\", but R has \"gpt-9\"", + "class Foo3: missing sharding state in L", + "class Foo4: missing sharding state in R", + "class Foo2: sharding state mismatch: " + + "L has {\"indexID\":\"\",\"config\":{\"virtualPerPhysical\":0,\"desiredCount\":0,\"actualCount\":0,\"desiredVirtualCount\":0,\"actualVirtualCount\":0,\"key\":\"\",\"strategy\":\"\",\"function\":\"\"},\"physical\":{\"abcd\":{\"name\":\"\",\"ownsVirtual\":[\"v1\"],\"ownsPercentage\":0}},\"virtual\":null,\"partitioningEnabled\":false,\"replicationFactor\":1}, " + + "but R has {\"indexID\":\"\",\"config\":{\"virtualPerPhysical\":0,\"desiredCount\":0,\"actualCount\":0,\"desiredVirtualCount\":0,\"actualVirtualCount\":0,\"key\":\"\",\"strategy\":\"\",\"function\":\"\"},\"physical\":{\"xyz\":{\"name\":\"\",\"ownsPercentage\":0,\"belongsToNodes\":[\"n1\"]}},\"virtual\":null,\"partitioningEnabled\":false,\"replicationFactor\":1}", + } + + for _, exp := range expected { + assert.Contains(t, actual, exp) + } +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/tenant.go b/platform/dbops/binaries/weaviate-src/usecases/schema/tenant.go new file mode 100644 index 0000000000000000000000000000000000000000..c406449062ba78f4e0eda1a1b573d152f5552740 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/tenant.go @@ -0,0 +1,346 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "context" + "fmt" + "strings" + + "github.com/sirupsen/logrus" + + "github.com/weaviate/weaviate/cluster/proto/api" + clusterSchema "github.com/weaviate/weaviate/cluster/schema" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + modsloads3 "github.com/weaviate/weaviate/modules/offload-s3" + "github.com/weaviate/weaviate/usecases/auth/authorization" + "github.com/weaviate/weaviate/usecases/auth/authorization/filter" + uco "github.com/weaviate/weaviate/usecases/objects" + "github.com/weaviate/weaviate/usecases/sharding" +) + +const ( + ErrMsgMaxAllowedTenants = "maximum number of tenants allowed to be updated simultaneously is 100. Please reduce the number of tenants in your request and try again" +) + +// AddTenants is used to add new tenants to a class +// Class must exist and has partitioning enabled +func (h *Handler) AddTenants(ctx context.Context, + principal *models.Principal, + class string, + tenants []*models.Tenant, +) (uint64, error) { + tenantNames := make([]string, len(tenants)) + for i, tenant := range tenants { + tenantNames[i] = tenant.Name + } + if err := h.Authorizer.Authorize(ctx, principal, authorization.CREATE, authorization.ShardsMetadata(class, tenantNames...)...); err != nil { + return 0, err + } + + validated, err := validateTenants(tenants, true) + if err != nil { + return 0, err + } + + if err = h.validateActivityStatuses(ctx, validated, true, false); err != nil { + return 0, err + } + + request := api.AddTenantsRequest{ + ClusterNodes: h.schemaManager.StorageCandidates(), + Tenants: make([]*api.Tenant, 0, len(validated)), + } + for i, tenant := range validated { + request.Tenants = append(request.Tenants, &api.Tenant{ + Name: tenant.Name, + Status: schema.ActivityStatus(validated[i].ActivityStatus), + }) + } + + return h.schemaManager.AddTenants(ctx, class, &request) +} + +func validateTenants(tenants []*models.Tenant, allowOverHundred bool) (validated []*models.Tenant, err error) { + if !allowOverHundred && len(tenants) > 100 { + err = uco.NewErrInvalidUserInput(ErrMsgMaxAllowedTenants) + return + } + uniq := make(map[string]*models.Tenant) + for i, requested := range tenants { + if errMsg := schema.ValidateTenantName(requested.Name); errMsg != nil { + err = uco.NewErrInvalidUserInput("tenant name at index %d: %s", i, errMsg.Error()) + return + } + _, found := uniq[requested.Name] + if found { + err = uco.NewErrInvalidUserInput("tenant name %s existed multiple times", requested.Name) + return + } + uniq[requested.Name] = requested + } + validated = make([]*models.Tenant, len(uniq)) + i := 0 + for _, tenant := range uniq { + validated[i] = tenant + i++ + } + return +} + +func (h *Handler) validateActivityStatuses(ctx context.Context, tenants []*models.Tenant, + allowEmpty, allowFrozen bool, +) error { + msgs := make([]string, 0, len(tenants)) + + for _, tenant := range tenants { + tenant.ActivityStatus = convertNewTenantNames(tenant.ActivityStatus) + switch status := tenant.ActivityStatus; status { + case models.TenantActivityStatusHOT, models.TenantActivityStatusCOLD: + continue + case models.TenantActivityStatusFROZEN: + if mod := h.moduleConfig.GetByName(modsloads3.Name); mod == nil { + return fmt.Errorf( + "can't offload tenants, because offload-s3 module is not enabled") + } + + if allowFrozen && h.cloud != nil { + if err := h.cloud.VerifyBucket(ctx); err != nil { + return err + } + } + + if allowFrozen { + continue + } + + default: + if status == "" && allowEmpty { + continue + } + } + msgs = append(msgs, fmt.Sprintf( + "invalid activity status '%s' for tenant %q", tenant.ActivityStatus, tenant.Name)) + } + + if len(msgs) != 0 { + return uco.NewErrInvalidUserInput("%s", strings.Join(msgs, ", ")) + } + return nil +} + +// UpdateTenants is used to set activity status of tenants of a class. +// +// Class must exist and has partitioning enabled +func (h *Handler) UpdateTenants(ctx context.Context, principal *models.Principal, + class string, tenants []*models.Tenant, +) ([]*models.Tenant, error) { + shardNames := make([]string, len(tenants)) + for idx := range tenants { + shardNames[idx] = tenants[idx].Name + } + + if err := h.Authorizer.Authorize(ctx, principal, authorization.UPDATE, authorization.ShardsMetadata(class, shardNames...)...); err != nil { + return nil, err + } + + h.logger.WithFields(logrus.Fields{ + "class": class, + "tenants": tenants, + }).Debug("update tenants status") + + validated, err := validateTenants(tenants, false) + if err != nil { + return nil, err + } + if err := h.validateActivityStatuses(ctx, validated, false, true); err != nil { + return nil, err + } + + req := api.UpdateTenantsRequest{ + Tenants: make([]*api.Tenant, len(tenants)), + ClusterNodes: h.schemaManager.StorageCandidates(), + } + tNames := make([]string, len(tenants)) + for i, tenant := range tenants { + tNames[i] = tenant.Name + req.Tenants[i] = &api.Tenant{Name: tenant.Name, Status: tenant.ActivityStatus} + } + + if _, err = h.schemaManager.UpdateTenants(ctx, class, &req); err != nil { + return nil, err + } + + // we get the new state to return correct status + // specially in FREEZING and UNFREEZING + uTenants, _, err := h.schemaManager.QueryTenants(class, tNames) + if err != nil { + return nil, err + } + return uTenants, err +} + +// DeleteTenants is used to delete tenants of a class. +// +// Class must exist and has partitioning enabled +func (h *Handler) DeleteTenants(ctx context.Context, principal *models.Principal, class string, tenants []string) error { + if err := h.Authorizer.Authorize(ctx, principal, authorization.DELETE, authorization.ShardsMetadata(class, tenants...)...); err != nil { + return err + } + + for i, name := range tenants { + if name == "" { + return fmt.Errorf("empty tenant name at index %d", i) + } + } + + req := api.DeleteTenantsRequest{ + Tenants: tenants, + } + + if _, err := h.schemaManager.DeleteTenants(ctx, class, &req); err != nil { + return err + } + + return nil +} + +func (h *Handler) GetConsistentTenants(ctx context.Context, principal *models.Principal, class string, consistency bool, tenants []string) ([]*models.Tenant, error) { + var allTenants []*models.Tenant + var err error + + if consistency { + allTenants, _, err = h.schemaManager.QueryTenants(class, tenants) + } else { + // If non consistent, fallback to the default implementation + allTenants, err = h.getTenantsByNames(class, tenants) + } + if err != nil { + return nil, err + } + + resourceFilter := filter.New[*models.Tenant](h.Authorizer, h.config.Authorization.Rbac) + filteredTenants := resourceFilter.Filter( + ctx, + h.logger, + principal, + allTenants, + authorization.READ, + func(tenant *models.Tenant) string { + return authorization.ShardsMetadata(class, tenant.Name)[0] + }, + ) + + return filteredTenants, nil +} + +func (h *Handler) GetConsistentTenant(ctx context.Context, principal *models.Principal, class string, consistency bool, tenant string) (*models.Tenant, error) { + if err := h.Authorizer.Authorize(ctx, principal, authorization.READ, authorization.ShardsMetadata(class, tenant)...); err != nil { + return nil, err + } + + var allTenants []*models.Tenant + var err error + + tenants := []string{tenant} + if consistency { + allTenants, _, err = h.schemaManager.QueryTenants(class, tenants) + } else { + // If non consistent, fallback to the default implementation + allTenants, err = h.getTenantsByNames(class, tenants) + } + if err != nil { + return nil, err + } + if len(allTenants) == 0 { + return nil, ErrNotFound + } + if len(allTenants) > 1 { + return nil, ErrUnexpectedMultiple + } + return allTenants[0], nil +} + +func (h *Handler) multiTenancy(class string) (clusterSchema.ClassInfo, error) { + info := h.schemaReader.ClassInfo(class) + if !info.Exists { + return info, fmt.Errorf("class %q: %w", class, ErrNotFound) + } + if !info.MultiTenancy.Enabled { + return info, fmt.Errorf("multi-tenancy is not enabled for class %q", class) + } + return info, nil +} + +// TenantExists is used to check if the tenant exists of a class +// +// Class must exist and has partitioning enabled +func (h *Handler) ConsistentTenantExists(ctx context.Context, principal *models.Principal, class string, consistency bool, tenant string) error { + if err := h.Authorizer.Authorize(ctx, principal, authorization.READ, authorization.ShardsMetadata(class, tenant)...); err != nil { + return err + } + + var tenants []*models.Tenant + var err error + if consistency { + tenants, _, err = h.schemaManager.QueryTenants(class, []string{tenant}) + } else { + // If non consistent, fallback to the default implementation + tenants, err = h.getTenantsByNames(class, []string{tenant}) + } + if err != nil { + return err + } + if len(tenants) == 1 { + return nil + } + + return ErrNotFound +} + +func (h *Handler) getTenantsByNames(class string, names []string) ([]*models.Tenant, error) { + info, err := h.multiTenancy(class) + if err != nil || info.Tenants == 0 { + return nil, err + } + + ts := make([]*models.Tenant, 0, len(names)) + f := func(_ *models.Class, ss *sharding.State) error { + for _, name := range names { + if _, ok := ss.Physical[name]; !ok { + continue + } + physical := ss.Physical[name] + ts = append(ts, &models.Tenant{ + Name: name, + ActivityStatus: schema.ActivityStatus(physical.Status), + }) + } + return nil + } + return ts, h.schemaReader.Read(class, f) +} + +// convert the new tenant names (that are only used as input) to the old tenant names that are used throughout the code +func convertNewTenantNames(status string) string { + if status == models.TenantActivityStatusACTIVE { + return models.TenantActivityStatusHOT + } + if status == models.TenantActivityStatusINACTIVE { + return models.TenantActivityStatusCOLD + } + if status == models.TenantActivityStatusOFFLOADED { + return models.TenantActivityStatusFROZEN + } + return status +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/tenant_test.go b/platform/dbops/binaries/weaviate-src/usecases/schema/tenant_test.go new file mode 100644 index 0000000000000000000000000000000000000000..8370a3b6d38104cf3040bc3f82adc054c8ef7e07 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/tenant_test.go @@ -0,0 +1,456 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" +) + +func TestAddTenants(t *testing.T) { + var ( + ctx = context.Background() + tenants = []*models.Tenant{{Name: "USER1"}, {Name: "USER2"}} + properties = []*models.Property{ + { + Name: "uuid", + DataType: schema.DataTypeText.PropString(), + }, + } + repConfig = &models.ReplicationConfig{Factor: 1} + ) + + mtNilClass := &models.Class{ + Class: "MTnil", + MultiTenancyConfig: nil, + Properties: properties, + ReplicationConfig: repConfig, + Vectorizer: "none", + } + mtDisabledClass := &models.Class{ + Class: "MTdisabled", + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: false}, + Properties: properties, + ReplicationConfig: repConfig, + Vectorizer: "none", + } + mtEnabledClass := &models.Class{ + Class: "MTenabled", + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: true}, + Properties: properties, + ReplicationConfig: repConfig, + Vectorizer: "none", + } + type test struct { + name string + class string + tenants []*models.Tenant + errMsgs []string + mockCalls func(fakeSchemaManager *fakeSchemaManager) + } + + tests := []test{ + { + name: "MTIsNil", + class: mtNilClass.Class, + tenants: tenants, + errMsgs: nil, + mockCalls: func(fakeSchemaManager *fakeSchemaManager) { + // MT validation is done leader side now + fakeSchemaManager.On("AddTenants", mock.Anything, mock.Anything).Return(nil) + }, + }, + { + name: "MTDisabled", + class: mtDisabledClass.Class, + tenants: tenants, + errMsgs: nil, + mockCalls: func(fakeSchemaManager *fakeSchemaManager) { + // MT validation is done leader side now + fakeSchemaManager.On("AddTenants", mock.Anything, mock.Anything).Return(nil) + }, + }, + { + name: "UnknownClass", + class: "UnknownClass", + tenants: tenants, + errMsgs: nil, + mockCalls: func(fakeSchemaManager *fakeSchemaManager) { + fakeSchemaManager.On("AddTenants", mock.Anything, mock.Anything).Return(nil) + }, + }, + { + name: "EmptyTenantValue", + class: mtEnabledClass.Class, + tenants: []*models.Tenant{ + {Name: "Aaaa"}, + {Name: ""}, + {Name: "Bbbb"}, + }, + errMsgs: []string{"tenant"}, + mockCalls: func(fakeSchemaManager *fakeSchemaManager) {}, + }, + { + name: "InvalidActivityStatus", + class: mtEnabledClass.Class, + tenants: []*models.Tenant{ + {Name: "Aaaa", ActivityStatus: "DOES_NOT_EXIST_1"}, + {Name: "Bbbb", ActivityStatus: "DOES_NOT_EXIST_2"}, + {Name: "Bbbb2", ActivityStatus: "WARM"}, + }, + errMsgs: []string{ + "invalid activity status", + "DOES_NOT_EXIST_1", + "DOES_NOT_EXIST_2", + }, + mockCalls: func(fakeSchemaManager *fakeSchemaManager) {}, + }, + { + name: "Success", + class: mtEnabledClass.Class, + tenants: []*models.Tenant{ + {Name: "Aaaa"}, + {Name: "Bbbb", ActivityStatus: models.TenantActivityStatusHOT}, + {Name: "Cccc", ActivityStatus: models.TenantActivityStatusCOLD}, + }, + errMsgs: []string{}, + mockCalls: func(fakeSchemaManager *fakeSchemaManager) { + fakeSchemaManager.On("AddTenants", mock.Anything, mock.Anything).Return(nil) + }, + }, + // TODO test with replication factor >= 2 + } + + // AddTenants + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + // Isolate schema for each tests + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + + test.mockCalls(fakeSchemaManager) + + _, err := handler.AddTenants(ctx, nil, test.class, test.tenants) + + fakeSchemaManager.AssertExpectations(t) + + if len(test.errMsgs) == 0 { + require.NoError(t, err) + } else { + for _, msg := range test.errMsgs { + assert.ErrorContains(t, err, msg) + } + } + }) + } +} + +func TestUpdateTenants(t *testing.T) { + var ( + ctx = context.Background() + tenants = []*models.Tenant{ + {Name: "USER1", ActivityStatus: models.TenantActivityStatusHOT}, + {Name: "USER2", ActivityStatus: models.TenantActivityStatusACTIVE}, + } + properties = []*models.Property{ + { + Name: "uUID", + DataType: schema.DataTypeText.PropString(), + }, + } + repConfig = &models.ReplicationConfig{Factor: 1} + ) + + mtNilClass := &models.Class{ + Class: "MTnil", + MultiTenancyConfig: nil, + Properties: properties, + ReplicationConfig: repConfig, + Vectorizer: "none", + } + mtDisabledClass := &models.Class{ + Class: "MTdisabled", + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: false}, + Properties: properties, + ReplicationConfig: repConfig, + Vectorizer: "none", + } + mtEnabledClass := &models.Class{ + Class: "MTenabled", + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: true}, + Properties: properties, + ReplicationConfig: repConfig, + Vectorizer: "none", + } + + type test struct { + name string + class string + updateTenants []*models.Tenant + errMsgs []string + expectedTenants []*models.Tenant + mockCalls func(fakeSchemaManager *fakeSchemaManager) + } + + tests := []test{ + { + name: "MTIsNil", + class: mtNilClass.Class, + updateTenants: tenants, + errMsgs: nil, + expectedTenants: tenants, + mockCalls: func(fakeMetaHandler *fakeSchemaManager) { + fakeMetaHandler.On("UpdateTenants", mock.Anything, mock.Anything).Return(nil) + fakeMetaHandler.On("QueryTenants", mock.Anything, mock.Anything).Return([]*models.Tenant{ + {Name: tenants[0].Name, ActivityStatus: models.TenantActivityStatusCOLD}, + {Name: tenants[1].Name, ActivityStatus: models.TenantActivityStatusHOT}, + }, 0, nil) + }, + }, + { + name: "MTDisabled", + class: mtDisabledClass.Class, + updateTenants: tenants, + errMsgs: nil, + expectedTenants: tenants, + mockCalls: func(fakeMetaHandler *fakeSchemaManager) { + fakeMetaHandler.On("UpdateTenants", mock.Anything, mock.Anything).Return(nil) + fakeMetaHandler.On("QueryTenants", mock.Anything, mock.Anything).Return([]*models.Tenant{ + {Name: tenants[0].Name, ActivityStatus: models.TenantActivityStatusCOLD}, + {Name: tenants[1].Name, ActivityStatus: models.TenantActivityStatusHOT}, + }, 0, nil) + }, + }, + { + name: "UnknownClass", + class: "UnknownClass", + updateTenants: tenants, + errMsgs: nil, + expectedTenants: tenants, + mockCalls: func(fakeMetaHandler *fakeSchemaManager) { + fakeMetaHandler.On("UpdateTenants", mock.Anything, mock.Anything).Return(nil) + fakeMetaHandler.On("QueryTenants", mock.Anything, mock.Anything).Return([]*models.Tenant{ + {Name: tenants[0].Name, ActivityStatus: models.TenantActivityStatusCOLD}, + {Name: tenants[1].Name, ActivityStatus: models.TenantActivityStatusHOT}, + }, 0, nil) + }, + }, + { + name: "EmptyTenantValue", + class: mtEnabledClass.Class, + updateTenants: []*models.Tenant{ + {Name: "", ActivityStatus: models.TenantActivityStatusCOLD}, + }, + errMsgs: []string{"tenant"}, + expectedTenants: tenants, + mockCalls: func(fakeSchemaManager *fakeSchemaManager) {}, + }, + { + name: "InvalidActivityStatus", + class: mtEnabledClass.Class, + updateTenants: []*models.Tenant{ + {Name: tenants[0].Name, ActivityStatus: "DOES_NOT_EXIST_1"}, + {Name: tenants[1].Name, ActivityStatus: "WARM"}, + }, + errMsgs: []string{ + "invalid activity status", + "DOES_NOT_EXIST_1", + "WARM", + }, + expectedTenants: tenants, + mockCalls: func(fakeSchemaManager *fakeSchemaManager) {}, + }, + { + name: "EmptyActivityStatus", + class: mtEnabledClass.Class, + updateTenants: []*models.Tenant{ + {Name: tenants[0].Name}, + {Name: tenants[1].Name, ActivityStatus: ""}, + }, + errMsgs: []string{"invalid activity status"}, + expectedTenants: tenants, + mockCalls: func(fakeSchemaManager *fakeSchemaManager) {}, + }, + { + name: "Success", + class: mtEnabledClass.Class, + updateTenants: []*models.Tenant{ + {Name: tenants[0].Name, ActivityStatus: models.TenantActivityStatusCOLD}, + {Name: tenants[1].Name, ActivityStatus: models.TenantActivityStatusHOT}, + }, + errMsgs: []string{}, + expectedTenants: []*models.Tenant{ + {Name: tenants[0].Name, ActivityStatus: models.TenantActivityStatusCOLD}, + {Name: tenants[1].Name, ActivityStatus: models.TenantActivityStatusHOT}, + }, + mockCalls: func(fakeMetaHandler *fakeSchemaManager) { + fakeMetaHandler.On("UpdateTenants", mock.Anything, mock.Anything).Return(nil) + fakeMetaHandler.On("QueryTenants", mock.Anything, mock.Anything).Return([]*models.Tenant{ + {Name: tenants[0].Name, ActivityStatus: models.TenantActivityStatusCOLD}, + {Name: tenants[1].Name, ActivityStatus: models.TenantActivityStatusHOT}, + }, 0, nil) + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + // Isolate schema for each tests + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + test.mockCalls(fakeSchemaManager) + + _, err := handler.UpdateTenants(ctx, nil, test.class, test.updateTenants) + if len(test.errMsgs) == 0 { + require.NoError(t, err) + } else { + for i := range test.errMsgs { + assert.ErrorContains(t, err, test.errMsgs[i]) + } + } + + fakeSchemaManager.AssertExpectations(t) + }) + } +} + +func TestDeleteTenants(t *testing.T) { + var ( + ctx = context.Background() + tenants = []*models.Tenant{ + {Name: "USER1"}, + {Name: "USER2"}, + {Name: "USER3"}, + {Name: "USER4"}, + } + properties = []*models.Property{ + { + Name: "uuid", + DataType: schema.DataTypeText.PropString(), + }, + } + repConfig = &models.ReplicationConfig{Factor: 1} + ) + + mtNilClass := &models.Class{ + Class: "MTnil", + MultiTenancyConfig: nil, + Properties: properties, + ReplicationConfig: repConfig, + Vectorizer: "none", + } + mtDisabledClass := &models.Class{ + Class: "MTdisabled", + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: false}, + Properties: properties, + ReplicationConfig: repConfig, + Vectorizer: "none", + } + mtEnabledClass := &models.Class{ + Class: "MTenabled", + MultiTenancyConfig: &models.MultiTenancyConfig{Enabled: true}, + Properties: properties, + ReplicationConfig: repConfig, + Vectorizer: "none", + } + + type test struct { + name string + class string + tenants []*models.Tenant + errMsgs []string + expectedTenants []*models.Tenant + mockCalls func(fakeSchemaManager *fakeSchemaManager) + } + + tests := []test{ + { + name: "MTIsNil", + class: mtNilClass.Class, + tenants: tenants, + errMsgs: nil, + expectedTenants: tenants, + mockCalls: func(fakeSchemaManager *fakeSchemaManager) { + fakeSchemaManager.On("DeleteTenants", mock.Anything, mock.Anything).Return(nil) + }, + }, + { + name: "MTDisabled", + class: mtDisabledClass.Class, + tenants: tenants, + errMsgs: nil, + expectedTenants: tenants, + mockCalls: func(fakeSchemaManager *fakeSchemaManager) { + fakeSchemaManager.On("DeleteTenants", mock.Anything, mock.Anything).Return(nil) + }, + }, + { + name: "UnknownClass", + class: "UnknownClass", + tenants: tenants, + errMsgs: nil, + expectedTenants: tenants, + mockCalls: func(fakeSchemaManager *fakeSchemaManager) { + fakeSchemaManager.On("DeleteTenants", mock.Anything, mock.Anything).Return(nil) + }, + }, + { + name: "EmptyTenantValue", + class: mtEnabledClass.Class, + tenants: []*models.Tenant{ + {Name: "Aaaa"}, + {Name: ""}, + {Name: "Bbbb"}, + }, + errMsgs: []string{"empty tenant name at index 1"}, + expectedTenants: tenants, + mockCalls: func(fakeSchemaManager *fakeSchemaManager) {}, + }, + { + name: "Success", + class: mtEnabledClass.Class, + tenants: tenants[:2], + errMsgs: []string{}, + expectedTenants: tenants[2:], + mockCalls: func(fakeSchemaManager *fakeSchemaManager) { + fakeSchemaManager.On("DeleteTenants", mock.Anything, mock.Anything).Return(nil) + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + // Isolate schema for each tests + handler, fakeSchemaManager := newTestHandler(t, &fakeDB{}) + test.mockCalls(fakeSchemaManager) + + tenantNames := make([]string, len(test.tenants)) + for i := range test.tenants { + tenantNames[i] = test.tenants[i].Name + } + + err := handler.DeleteTenants(ctx, nil, test.class, tenantNames) + if len(test.errMsgs) == 0 { + require.NoError(t, err) + } else { + for i := range test.errMsgs { + assert.ErrorContains(t, err, test.errMsgs[i]) + } + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/validation.go b/platform/dbops/binaries/weaviate-src/usecases/schema/validation.go new file mode 100644 index 0000000000000000000000000000000000000000..afa780b86014a2b5806b94398d508f3df4112b47 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/validation.go @@ -0,0 +1,199 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "fmt" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" +) + +type validatorNestedProperty func(property *models.NestedProperty, + primitiveDataType, nestedDataType schema.DataType, + isPrimitive, isNested bool, propNamePrefix string) error + +var validatorsNestedProperty = []validatorNestedProperty{ + validateNestedPropertyName, + validateNestedPropertyDataType, + validateNestedPropertyTokenization, + validateNestedPropertyIndexFilterable, + validateNestedPropertyIndexSearchable, + validateNestedPropertyIndexRangeFilters, +} + +func validateNestedProperties(properties []*models.NestedProperty, propNamePrefix string) error { + if len(properties) == 0 { + return fmt.Errorf("property '%s': At least one nested property is required for data type object/object[]", + propNamePrefix) + } + + for _, property := range properties { + primitiveDataType, isPrimitive := schema.AsPrimitive(property.DataType) + nestedDataType, isNested := schema.AsNested(property.DataType) + + for _, validator := range validatorsNestedProperty { + if err := validator(property, primitiveDataType, nestedDataType, isPrimitive, isNested, propNamePrefix); err != nil { + return err + } + } + if isNested { + if err := validateNestedProperties(property.NestedProperties, propNamePrefix+"."+property.Name); err != nil { + return err + } + } + } + return nil +} + +func validateNestedPropertyName(property *models.NestedProperty, + _, _ schema.DataType, + _, _ bool, propNamePrefix string, +) error { + return schema.ValidateNestedPropertyName(property.Name, propNamePrefix) +} + +func validateNestedPropertyDataType(property *models.NestedProperty, + primitiveDataType, _ schema.DataType, + isPrimitive, isNested bool, propNamePrefix string, +) error { + propName := propNamePrefix + "." + property.Name + + if isPrimitive { + // DataTypeString and DataTypeStringArray as deprecated since 1.19 are not allowed + switch primitiveDataType { + case schema.DataTypeString, schema.DataTypeStringArray: + return fmt.Errorf("property '%s': data type '%s' is deprecated and not allowed as nested property", propName, primitiveDataType) + case schema.DataTypeGeoCoordinates, schema.DataTypePhoneNumber: + return fmt.Errorf("property '%s': data type '%s' not allowed as nested property", propName, primitiveDataType) + default: + // do nothing + } + return nil + } + if isNested { + return nil + } + return fmt.Errorf("property '%s': reference data type not allowed", propName) +} + +// Tokenization allowed only for text/text[] data types +func validateNestedPropertyTokenization(property *models.NestedProperty, + primitiveDataType, _ schema.DataType, + isPrimitive, isNested bool, propNamePrefix string, +) error { + propName := propNamePrefix + "." + property.Name + + if isPrimitive { + switch primitiveDataType { + case schema.DataTypeText, schema.DataTypeTextArray: + switch property.Tokenization { + case models.PropertyTokenizationField, models.PropertyTokenizationWord, + models.PropertyTokenizationWhitespace, models.PropertyTokenizationLowercase: + return nil + } + return fmt.Errorf("property '%s': Tokenization '%s' is not allowed for data type '%s'", + propName, property.Tokenization, primitiveDataType) + default: + if property.Tokenization == "" { + return nil + } + return fmt.Errorf("property '%s': Tokenization is not allowed for data type '%s'", + propName, primitiveDataType) + } + } + if property.Tokenization == "" { + return nil + } + if isNested { + return fmt.Errorf("property '%s': Tokenization is not allowed for object/object[] data types", propName) + } + return fmt.Errorf("property '%s': Tokenization is not allowed for reference data type", propName) +} + +// indexFilterable allowed for primitive & ref data types +func validateNestedPropertyIndexFilterable(property *models.NestedProperty, + primitiveDataType, _ schema.DataType, + isPrimitive, _ bool, propNamePrefix string, +) error { + propName := propNamePrefix + "." + property.Name + + // at this point indexSearchable should be set (either by user or by defaults) + if property.IndexFilterable == nil { + return fmt.Errorf("property '%s': `indexFilterable` not set", propName) + } + + if isPrimitive && primitiveDataType == schema.DataTypeBlob { + if *property.IndexFilterable { + return fmt.Errorf("property: '%s': indexFilterable is not allowed for blob data type", + propName) + } + } + + return nil +} + +// indexSearchable allowed for text/text[] data types +func validateNestedPropertyIndexSearchable(property *models.NestedProperty, + primitiveDataType, _ schema.DataType, + isPrimitive, _ bool, propNamePrefix string, +) error { + propName := propNamePrefix + "." + property.Name + + // at this point indexSearchable should be set (either by user or by defaults) + if property.IndexSearchable == nil { + return fmt.Errorf("property '%s': `indexSearchable` not set", propName) + } + + if isPrimitive { + switch primitiveDataType { + case schema.DataTypeText, schema.DataTypeTextArray: + return nil + default: + // do nothing + } + } + if *property.IndexSearchable { + return fmt.Errorf("property '%s': `indexSearchable` is not allowed for other than text/text[] data types", + propName) + } + + return nil +} + +// indexRangeFilters allowed for number/int/date data types +func validateNestedPropertyIndexRangeFilters(property *models.NestedProperty, + primitiveDataType, _ schema.DataType, + isPrimitive, _ bool, propNamePrefix string, +) error { + propName := propNamePrefix + "." + property.Name + + // at this point indexRangeFilters should be set (either by user or by defaults) + if property.IndexRangeFilters == nil { + return fmt.Errorf("property '%s': `indexRangeFilters` not set", propName) + } + + if isPrimitive { + switch primitiveDataType { + case schema.DataTypeNumber, schema.DataTypeInt, schema.DataTypeDate: + return nil + default: + // do nothing + } + } + if *property.IndexRangeFilters { + return fmt.Errorf("property '%s': `indexRangeFilters` is not allowed for other than number/int/date data types", + propName) + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/schema/validation_test.go b/platform/dbops/binaries/weaviate-src/usecases/schema/validation_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ef4b356d46ff4f621666bb84372a7bbd341acc8c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/schema/validation_test.go @@ -0,0 +1,806 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package schema + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" +) + +func Test_Validation_NestedProperties(t *testing.T) { + t.Parallel() + vFalse := false + vTrue := true + + t.Run("does not validate wrong names", func(t *testing.T) { + for _, name := range []string{"prop@1", "prop-2", "prop$3", "4prop"} { + t.Run(name, func(t *testing.T) { + nestedProperties := []*models.NestedProperty{ + { + Name: name, + DataType: schema.DataTypeInt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + }, + } + + for _, ndt := range schema.NestedDataTypes { + t.Run(ndt.String(), func(t *testing.T) { + propPrimitives := &models.Property{ + Name: "objectProp", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: nestedProperties, + } + propLvl2Primitives := &models.Property{ + Name: "objectPropLvl2", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: []*models.NestedProperty{ + { + Name: "nested_object", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: nestedProperties, + }, + }, + } + + for _, prop := range []*models.Property{propPrimitives, propLvl2Primitives} { + t.Run(prop.Name, func(t *testing.T) { + err := validateNestedProperties(prop.NestedProperties, prop.Name) + assert.ErrorContains(t, err, prop.Name) + assert.ErrorContains(t, err, "is not a valid nested property name") + }) + } + }) + } + }) + } + }) + + t.Run("validates primitive data types", func(t *testing.T) { + nestedProperties := []*models.NestedProperty{} + for _, pdt := range schema.PrimitiveDataTypes { + tokenization := "" + switch pdt { + case schema.DataTypeGeoCoordinates, schema.DataTypePhoneNumber: + // skip - not supported as nested + continue + case schema.DataTypeText, schema.DataTypeTextArray: + tokenization = models.PropertyTokenizationWord + default: + // do nothing + } + + nestedProperties = append(nestedProperties, &models.NestedProperty{ + Name: "nested_" + pdt.AsName(), + DataType: pdt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: tokenization, + }) + } + + for _, ndt := range schema.NestedDataTypes { + t.Run(ndt.String(), func(t *testing.T) { + propPrimitives := &models.Property{ + Name: "objectProp", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: nestedProperties, + } + propLvl2Primitives := &models.Property{ + Name: "objectPropLvl2", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: []*models.NestedProperty{ + { + Name: "nested_object", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: nestedProperties, + }, + }, + } + + for _, prop := range []*models.Property{propPrimitives, propLvl2Primitives} { + t.Run(prop.Name, func(t *testing.T) { + err := validateNestedProperties(prop.NestedProperties, prop.Name) + assert.NoError(t, err) + }) + } + }) + } + }) + + t.Run("does not validate deprecated primitive types", func(t *testing.T) { + for _, pdt := range schema.DeprecatedPrimitiveDataTypes { + t.Run(pdt.String(), func(t *testing.T) { + nestedProperties := []*models.NestedProperty{ + { + Name: "nested_" + pdt.AsName(), + DataType: pdt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + }, + } + + for _, ndt := range schema.NestedDataTypes { + t.Run(ndt.String(), func(t *testing.T) { + propPrimitives := &models.Property{ + Name: "objectProp", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: nestedProperties, + } + propLvl2Primitives := &models.Property{ + Name: "objectPropLvl2", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: []*models.NestedProperty{ + { + Name: "nested_object", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: nestedProperties, + }, + }, + } + + for _, prop := range []*models.Property{propPrimitives, propLvl2Primitives} { + t.Run(prop.Name, func(t *testing.T) { + err := validateNestedProperties(prop.NestedProperties, prop.Name) + assert.ErrorContains(t, err, prop.Name) + assert.ErrorContains(t, err, fmt.Sprintf("data type '%s' is deprecated and not allowed as nested property", pdt.String())) + }) + } + }) + } + }) + } + }) + + t.Run("does not validate unsupported primitive types", func(t *testing.T) { + for _, pdt := range []schema.DataType{schema.DataTypeGeoCoordinates, schema.DataTypePhoneNumber} { + t.Run(pdt.String(), func(t *testing.T) { + nestedProperties := []*models.NestedProperty{ + { + Name: "nested_" + pdt.AsName(), + DataType: pdt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + }, + } + + for _, ndt := range schema.NestedDataTypes { + t.Run(ndt.String(), func(t *testing.T) { + propPrimitives := &models.Property{ + Name: "objectProp", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: nestedProperties, + } + propLvl2Primitives := &models.Property{ + Name: "objectPropLvl2", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: []*models.NestedProperty{ + { + Name: "nested_object", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: nestedProperties, + }, + }, + } + + for _, prop := range []*models.Property{propPrimitives, propLvl2Primitives} { + t.Run(prop.Name, func(t *testing.T) { + err := validateNestedProperties(prop.NestedProperties, prop.Name) + assert.ErrorContains(t, err, prop.Name) + assert.ErrorContains(t, err, fmt.Sprintf("data type '%s' not allowed as nested property", pdt.String())) + }) + } + }) + } + }) + } + }) + + t.Run("does not validate ref types", func(t *testing.T) { + nestedProperties := []*models.NestedProperty{ + { + Name: "nested_ref", + DataType: []string{"SomeClass"}, + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + }, + } + + for _, ndt := range schema.NestedDataTypes { + t.Run(ndt.String(), func(t *testing.T) { + propPrimitives := &models.Property{ + Name: "objectProp", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: nestedProperties, + } + propLvl2Primitives := &models.Property{ + Name: "objectPropLvl2", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: []*models.NestedProperty{ + { + Name: "nested_object", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: nestedProperties, + }, + }, + } + + for _, prop := range []*models.Property{propPrimitives, propLvl2Primitives} { + t.Run(prop.Name, func(t *testing.T) { + err := validateNestedProperties(prop.NestedProperties, prop.Name) + assert.ErrorContains(t, err, prop.Name) + assert.ErrorContains(t, err, "reference data type not allowed") + }) + } + }) + } + }) + + t.Run("does not validate empty nested properties", func(t *testing.T) { + for _, ndt := range schema.NestedDataTypes { + t.Run(ndt.String(), func(t *testing.T) { + propPrimitives := &models.Property{ + Name: "objectProp", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + } + propLvl2Primitives := &models.Property{ + Name: "objectPropLvl2", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: []*models.NestedProperty{ + { + Name: "nested_object", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + }, + }, + } + + for _, prop := range []*models.Property{propPrimitives, propLvl2Primitives} { + t.Run(prop.Name, func(t *testing.T) { + err := validateNestedProperties(prop.NestedProperties, prop.Name) + assert.ErrorContains(t, err, prop.Name) + assert.ErrorContains(t, err, "At least one nested property is required for data type object/object[]") + }) + } + }) + } + }) + + t.Run("does not validate tokenization on non text/text[] primitive data types", func(t *testing.T) { + for _, pdt := range schema.PrimitiveDataTypes { + switch pdt { + case schema.DataTypeText, schema.DataTypeTextArray: + continue + case schema.DataTypeGeoCoordinates, schema.DataTypePhoneNumber: + // skip - not supported as nested + continue + default: + // do nothing + } + + t.Run(pdt.String(), func(t *testing.T) { + nestedProperties := []*models.NestedProperty{ + { + Name: "nested_" + pdt.AsName(), + DataType: pdt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: models.PropertyTokenizationWord, + }, + } + + for _, ndt := range schema.NestedDataTypes { + t.Run(ndt.String(), func(t *testing.T) { + propPrimitives := &models.Property{ + Name: "objectProp", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: nestedProperties, + } + propLvl2Primitives := &models.Property{ + Name: "objectPropLvl2", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: []*models.NestedProperty{ + { + Name: "nested_object", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: nestedProperties, + }, + }, + } + + for _, prop := range []*models.Property{propPrimitives, propLvl2Primitives} { + t.Run(prop.Name, func(t *testing.T) { + err := validateNestedProperties(prop.NestedProperties, prop.Name) + assert.ErrorContains(t, err, prop.Name) + assert.ErrorContains(t, err, fmt.Sprintf("Tokenization is not allowed for data type '%s'", pdt.String())) + }) + } + }) + } + }) + } + }) + + t.Run("does not validate tokenization on nested data types", func(t *testing.T) { + nestedProperties := []*models.NestedProperty{ + { + Name: "nested_int", + DataType: schema.DataTypeInt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + }, + } + + for _, ndt := range schema.NestedDataTypes { + t.Run(ndt.String(), func(t *testing.T) { + propLvl2Primitives := &models.Property{ + Name: "objectPropLvl2", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: []*models.NestedProperty{ + { + Name: "nested_object", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: models.PropertyTokenizationWord, + NestedProperties: nestedProperties, + }, + }, + } + + for _, prop := range []*models.Property{propLvl2Primitives} { + t.Run(prop.Name, func(t *testing.T) { + err := validateNestedProperties(prop.NestedProperties, prop.Name) + assert.ErrorContains(t, err, prop.Name) + assert.ErrorContains(t, err, "Tokenization is not allowed for object/object[] data types") + }) + } + }) + } + }) + + t.Run("validates indexFilterable on primitive data types", func(t *testing.T) { + nestedProperties := []*models.NestedProperty{} + for _, pdt := range schema.PrimitiveDataTypes { + tokenization := "" + switch pdt { + case schema.DataTypeBlob: + // skip - not indexable + continue + case schema.DataTypeGeoCoordinates, schema.DataTypePhoneNumber: + // skip - not supported as nested + continue + case schema.DataTypeText, schema.DataTypeTextArray: + tokenization = models.PropertyTokenizationWord + default: + // do nothing + } + + nestedProperties = append(nestedProperties, &models.NestedProperty{ + Name: "nested_" + pdt.AsName(), + DataType: pdt.PropString(), + IndexFilterable: &vTrue, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: tokenization, + }) + } + + for _, ndt := range schema.NestedDataTypes { + t.Run(ndt.String(), func(t *testing.T) { + propPrimitives := &models.Property{ + Name: "objectProp", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: nestedProperties, + } + propLvl2Primitives := &models.Property{ + Name: "objectPropLvl2", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: []*models.NestedProperty{ + { + Name: "nested_object", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: nestedProperties, + }, + }, + } + + for _, prop := range []*models.Property{propPrimitives, propLvl2Primitives} { + t.Run(prop.Name, func(t *testing.T) { + err := validateNestedProperties(prop.NestedProperties, prop.Name) + assert.NoError(t, err) + }) + } + }) + } + }) + + t.Run("does not validate indexFilterable on blob data type", func(t *testing.T) { + nestedProperties := []*models.NestedProperty{ + { + Name: "nested_blob", + DataType: schema.DataTypeBlob.PropString(), + IndexFilterable: &vTrue, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + }, + } + + for _, ndt := range schema.NestedDataTypes { + t.Run(ndt.String(), func(t *testing.T) { + propPrimitives := &models.Property{ + Name: "objectProp", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: nestedProperties, + } + propLvl2Primitives := &models.Property{ + Name: "objectPropLvl2", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: []*models.NestedProperty{ + { + Name: "nested_object", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: nestedProperties, + }, + }, + } + + for _, prop := range []*models.Property{propPrimitives, propLvl2Primitives} { + t.Run(prop.Name, func(t *testing.T) { + err := validateNestedProperties(prop.NestedProperties, prop.Name) + assert.ErrorContains(t, err, prop.Name) + assert.ErrorContains(t, err, "indexFilterable is not allowed for blob data type") + }) + } + }) + } + }) + + t.Run("validates indexFilterable on nested data types", func(t *testing.T) { + nestedProperties := []*models.NestedProperty{ + { + Name: "nested_int", + DataType: schema.DataTypeInt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + }, + } + + for _, ndt := range schema.NestedDataTypes { + t.Run(ndt.String(), func(t *testing.T) { + propLvl2Primitives := &models.Property{ + Name: "objectPropLvl2", + DataType: ndt.PropString(), + IndexFilterable: &vTrue, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: []*models.NestedProperty{ + { + Name: "nested_object", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: nestedProperties, + }, + }, + } + + for _, prop := range []*models.Property{propLvl2Primitives} { + t.Run(prop.Name, func(t *testing.T) { + err := validateNestedProperties(prop.NestedProperties, prop.Name) + assert.NoError(t, err) + }) + } + }) + } + }) + + t.Run("validates indexSearchable on text/text[] data types", func(t *testing.T) { + nestedProperties := []*models.NestedProperty{} + for _, pdt := range []schema.DataType{schema.DataTypeText, schema.DataTypeTextArray} { + nestedProperties = append(nestedProperties, &models.NestedProperty{ + Name: "nested_" + pdt.AsName(), + DataType: pdt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vTrue, + IndexRangeFilters: &vFalse, + Tokenization: models.PropertyTokenizationWord, + }) + } + + for _, ndt := range schema.NestedDataTypes { + t.Run(ndt.String(), func(t *testing.T) { + propPrimitives := &models.Property{ + Name: "objectProp", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: nestedProperties, + } + propLvl2Primitives := &models.Property{ + Name: "objectPropLvl2", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: []*models.NestedProperty{ + { + Name: "nested_object", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: nestedProperties, + }, + }, + } + + for _, prop := range []*models.Property{propPrimitives, propLvl2Primitives} { + t.Run(prop.Name, func(t *testing.T) { + err := validateNestedProperties(prop.NestedProperties, prop.Name) + assert.NoError(t, err) + }) + } + }) + } + }) + + t.Run("does not validate indexSearchable on primitive data types", func(t *testing.T) { + nestedProperties := []*models.NestedProperty{} + for _, pdt := range schema.PrimitiveDataTypes { + switch pdt { + case schema.DataTypeText, schema.DataTypeTextArray: + continue + case schema.DataTypeGeoCoordinates, schema.DataTypePhoneNumber: + // skip - not supported as nested + continue + default: + // do nothing + } + + t.Run(pdt.String(), func(t *testing.T) { + nestedProperties = append(nestedProperties, &models.NestedProperty{ + Name: "nested_" + pdt.AsName(), + DataType: pdt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vTrue, + IndexRangeFilters: &vFalse, + Tokenization: "", + }) + + for _, ndt := range schema.NestedDataTypes { + t.Run(ndt.String(), func(t *testing.T) { + propPrimitives := &models.Property{ + Name: "objectProp", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: nestedProperties, + } + propLvl2Primitives := &models.Property{ + Name: "objectPropLvl2", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: []*models.NestedProperty{ + { + Name: "nested_object", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: nestedProperties, + }, + }, + } + + for _, prop := range []*models.Property{propPrimitives, propLvl2Primitives} { + t.Run(prop.Name, func(t *testing.T) { + err := validateNestedProperties(prop.NestedProperties, prop.Name) + assert.ErrorContains(t, err, prop.Name) + assert.ErrorContains(t, err, "`indexSearchable` is not allowed for other than text/text[] data types") + }) + } + }) + } + }) + } + }) + + t.Run("does not validate indexSearchable on nested data types", func(t *testing.T) { + nestedProperties := []*models.NestedProperty{ + { + Name: "nested_int", + DataType: schema.DataTypeInt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + }, + } + + for _, ndt := range schema.NestedDataTypes { + t.Run(ndt.String(), func(t *testing.T) { + propLvl2Primitives := &models.Property{ + Name: "objectPropLvl2", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vFalse, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: []*models.NestedProperty{ + { + Name: "nested_object", + DataType: ndt.PropString(), + IndexFilterable: &vFalse, + IndexSearchable: &vTrue, + IndexRangeFilters: &vFalse, + Tokenization: "", + NestedProperties: nestedProperties, + }, + }, + } + + for _, prop := range []*models.Property{propLvl2Primitives} { + t.Run(prop.Name, func(t *testing.T) { + err := validateNestedProperties(prop.NestedProperties, prop.Name) + assert.ErrorContains(t, err, prop.Name) + assert.ErrorContains(t, err, "`indexSearchable` is not allowed for other than text/text[] data types") + }) + } + }) + } + }) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/sharding/remote_index.go b/platform/dbops/binaries/weaviate-src/usecases/sharding/remote_index.go new file mode 100644 index 0000000000000000000000000000000000000000..b42564afa51eafb860b6d1ba67e305323cb38438 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/sharding/remote_index.go @@ -0,0 +1,545 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package sharding + +import ( + "context" + "errors" + "fmt" + "io" + "math/rand" + "sync" + "sync/atomic" + "time" + + "github.com/weaviate/weaviate/entities/dto" + "github.com/weaviate/weaviate/entities/models" + + "github.com/go-openapi/strfmt" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/additional" + "github.com/weaviate/weaviate/entities/aggregation" + enterrors "github.com/weaviate/weaviate/entities/errors" + "github.com/weaviate/weaviate/entities/filters" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/entities/searchparams" + "github.com/weaviate/weaviate/entities/storobj" + "github.com/weaviate/weaviate/usecases/file" + "github.com/weaviate/weaviate/usecases/objects" +) + +type RemoteIndex struct { + class string + stateGetter shardingStateGetter + client RemoteIndexClient + nodeResolver nodeResolver +} + +type shardingStateGetter interface { + // ShardOwner returns id of owner node + ShardOwner(class, shard string) (string, error) + ShardReplicas(class, shard string) ([]string, error) +} + +func NewRemoteIndex(className string, + stateGetter shardingStateGetter, nodeResolver nodeResolver, + client RemoteIndexClient, +) *RemoteIndex { + return &RemoteIndex{ + class: className, + stateGetter: stateGetter, + client: client, + nodeResolver: nodeResolver, + } +} + +type nodeResolver interface { + NodeHostname(nodeName string) (string, bool) +} + +type RemoteIndexClient interface { + PutObject(ctx context.Context, hostName, indexName, shardName string, + obj *storobj.Object, schemaVersion uint64) error + BatchPutObjects(ctx context.Context, hostName, indexName, shardName string, + objs []*storobj.Object, repl *additional.ReplicationProperties, schemaVersion uint64) []error + BatchAddReferences(ctx context.Context, hostName, indexName, shardName string, + refs objects.BatchReferences, schemaVersion uint64) []error + GetObject(ctx context.Context, hostname, indexName, shardName string, + id strfmt.UUID, props search.SelectProperties, + additional additional.Properties) (*storobj.Object, error) + Exists(ctx context.Context, hostname, indexName, shardName string, + id strfmt.UUID) (bool, error) + DeleteObject(ctx context.Context, hostname, indexName, shardName string, + id strfmt.UUID, deletionTime time.Time, schemaVersion uint64) error + MergeObject(ctx context.Context, hostname, indexName, shardName string, + mergeDoc objects.MergeDocument, schemaVersion uint64) error + MultiGetObjects(ctx context.Context, hostname, indexName, shardName string, + ids []strfmt.UUID) ([]*storobj.Object, error) + SearchShard(ctx context.Context, hostname, indexName, shardName string, + searchVector []models.Vector, targetVector []string, distance float32, limit int, filters *filters.LocalFilter, + keywordRanking *searchparams.KeywordRanking, sort []filters.Sort, + cursor *filters.Cursor, groupBy *searchparams.GroupBy, + additional additional.Properties, targetCombination *dto.TargetCombination, properties []string, + ) ([]*storobj.Object, []float32, error) + + Aggregate(ctx context.Context, hostname, indexName, shardName string, + params aggregation.Params) (*aggregation.Result, error) + FindUUIDs(ctx context.Context, hostName, indexName, shardName string, + filters *filters.LocalFilter) ([]strfmt.UUID, error) + DeleteObjectBatch(ctx context.Context, hostName, indexName, shardName string, + uuids []strfmt.UUID, deletionTime time.Time, dryRun bool, schemaVersion uint64) objects.BatchSimpleObjects + GetShardQueueSize(ctx context.Context, hostName, indexName, shardName string) (int64, error) + GetShardStatus(ctx context.Context, hostName, indexName, shardName string) (string, error) + UpdateShardStatus(ctx context.Context, hostName, indexName, shardName, targetStatus string, schemaVersion uint64) error + + PutFile(ctx context.Context, hostName, indexName, shardName, fileName string, + payload io.ReadSeekCloser) error + + // PauseFileActivity pauses the shard replica background processes on the specified node. + // You should explicitly resume the background processes once you're done. + PauseFileActivity(ctx context.Context, hostName, indexName, shardName string, schemaVersion uint64) error + // ResumeFileActivity resumes the shard replica background processes on the specified node. + ResumeFileActivity(ctx context.Context, hostName, indexName, shardName string) error + // ListFiles returns a list of files that can be used to get the shard data at the time the pause was + // requested. + ListFiles(ctx context.Context, hostName, indexName, shardName string) ([]string, error) + // GetFileMetadata returns file info at the given path in the shard's root directory. + GetFileMetadata(ctx context.Context, hostName, indexName, shardName, fileName string) (file.FileMetadata, error) + // GetFile returns a reader for the file at the given path in the shard's root directory. + // The caller must close the returned io.ReadCloser if no error is returned. + GetFile(ctx context.Context, hostName, indexName, shardName, fileName string) (io.ReadCloser, error) + // AddAsyncReplicationTargetNode adds the async replication target node for a shard. + AddAsyncReplicationTargetNode(ctx context.Context, hostName, indexName, shardName string, targetNodeOverride additional.AsyncReplicationTargetNodeOverride, schemaVersion uint64) error + // RemoveAsyncReplicationTargetNode removes the async replication target node for a shard. + RemoveAsyncReplicationTargetNode(ctx context.Context, hostName, indexName, shardName string, targetNodeOverride additional.AsyncReplicationTargetNodeOverride) error +} + +func (ri *RemoteIndex) PutObject(ctx context.Context, shardName string, + obj *storobj.Object, schemaVersion uint64, +) error { + owner, err := ri.stateGetter.ShardOwner(ri.class, shardName) + if err != nil { + return fmt.Errorf("class %s has no physical shard %q: %w", ri.class, shardName, err) + } + + host, ok := ri.nodeResolver.NodeHostname(owner) + if !ok { + return fmt.Errorf("resolve node name %q to host", owner) + } + + return ri.client.PutObject(ctx, host, ri.class, shardName, obj, schemaVersion) +} + +// helper for single errors that affect the entire batch, assign the error to +// every single item in the batch +func duplicateErr(in error, count int) []error { + out := make([]error, count) + for i := range out { + out[i] = in + } + return out +} + +func (ri *RemoteIndex) BatchPutObjects(ctx context.Context, shardName string, + objs []*storobj.Object, schemaVersion uint64, +) []error { + owner, err := ri.stateGetter.ShardOwner(ri.class, shardName) + if err != nil { + return duplicateErr(fmt.Errorf("class %s has no physical shard %q: %w", + ri.class, shardName, err), len(objs)) + } + + host, ok := ri.nodeResolver.NodeHostname(owner) + if !ok { + return duplicateErr(fmt.Errorf("resolve node name %q to host", + owner), len(objs)) + } + + return ri.client.BatchPutObjects(ctx, host, ri.class, shardName, objs, nil, schemaVersion) +} + +func (ri *RemoteIndex) BatchAddReferences(ctx context.Context, shardName string, + refs objects.BatchReferences, schemaVersion uint64, +) []error { + owner, err := ri.stateGetter.ShardOwner(ri.class, shardName) + if err != nil { + return duplicateErr(fmt.Errorf("class %s has no physical shard %q: %w", + ri.class, shardName, err), len(refs)) + } + + host, ok := ri.nodeResolver.NodeHostname(owner) + if !ok { + return duplicateErr(fmt.Errorf("resolve node name %q to host", + owner), len(refs)) + } + + return ri.client.BatchAddReferences(ctx, host, ri.class, shardName, refs, schemaVersion) +} + +func (ri *RemoteIndex) Exists(ctx context.Context, shardName string, + id strfmt.UUID, +) (bool, error) { + owner, err := ri.stateGetter.ShardOwner(ri.class, shardName) + if err != nil { + return false, fmt.Errorf("class %s has no physical shard %q: %w", ri.class, shardName, err) + } + + host, ok := ri.nodeResolver.NodeHostname(owner) + if !ok { + return false, fmt.Errorf("resolve node name %q to host", owner) + } + + return ri.client.Exists(ctx, host, ri.class, shardName, id) +} + +func (ri *RemoteIndex) DeleteObject(ctx context.Context, shardName string, + id strfmt.UUID, deletionTime time.Time, schemaVersion uint64, +) error { + owner, err := ri.stateGetter.ShardOwner(ri.class, shardName) + if err != nil { + return fmt.Errorf("class %s has no physical shard %q: %w", ri.class, shardName, err) + } + + host, ok := ri.nodeResolver.NodeHostname(owner) + if !ok { + return fmt.Errorf("resolve node name %q to host", owner) + } + + return ri.client.DeleteObject(ctx, host, ri.class, shardName, id, deletionTime, schemaVersion) +} + +func (ri *RemoteIndex) MergeObject(ctx context.Context, shardName string, + mergeDoc objects.MergeDocument, schemaVersion uint64, +) error { + owner, err := ri.stateGetter.ShardOwner(ri.class, shardName) + if err != nil { + return fmt.Errorf("class %s has no physical shard %q: %w", ri.class, shardName, err) + } + + host, ok := ri.nodeResolver.NodeHostname(owner) + if !ok { + return fmt.Errorf("resolve node name %q to host", owner) + } + + return ri.client.MergeObject(ctx, host, ri.class, shardName, mergeDoc, schemaVersion) +} + +func (ri *RemoteIndex) GetObject(ctx context.Context, shardName string, + id strfmt.UUID, props search.SelectProperties, + additional additional.Properties, +) (*storobj.Object, error) { + owner, err := ri.stateGetter.ShardOwner(ri.class, shardName) + if err != nil { + return nil, fmt.Errorf("class %s has no physical shard %q: %w", ri.class, shardName, err) + } + + host, ok := ri.nodeResolver.NodeHostname(owner) + if !ok { + return nil, fmt.Errorf("resolve node name %q to host", owner) + } + + return ri.client.GetObject(ctx, host, ri.class, shardName, id, props, additional) +} + +func (ri *RemoteIndex) MultiGetObjects(ctx context.Context, shardName string, + ids []strfmt.UUID, +) ([]*storobj.Object, error) { + owner, err := ri.stateGetter.ShardOwner(ri.class, shardName) + if err != nil { + return nil, fmt.Errorf("class %s has no physical shard %q: %w", ri.class, shardName, err) + } + + host, ok := ri.nodeResolver.NodeHostname(owner) + if !ok { + return nil, fmt.Errorf("resolve node name %q to host", owner) + } + + return ri.client.MultiGetObjects(ctx, host, ri.class, shardName, ids) +} + +type ReplicasSearchResult struct { + Objects []*storobj.Object + Scores []float32 + Node string +} + +func (ri *RemoteIndex) SearchAllReplicas(ctx context.Context, + log logrus.FieldLogger, + shard string, + queryVec []models.Vector, + targetVector []string, + distance float32, + limit int, + filters *filters.LocalFilter, + keywordRanking *searchparams.KeywordRanking, + sort []filters.Sort, + cursor *filters.Cursor, + groupBy *searchparams.GroupBy, + adds additional.Properties, + localNode string, + targetCombination *dto.TargetCombination, + properties []string, +) ([]ReplicasSearchResult, error) { + remoteShardQuery := func(node, host string) (ReplicasSearchResult, error) { + objs, scores, err := ri.client.SearchShard(ctx, host, ri.class, shard, + queryVec, targetVector, distance, limit, filters, keywordRanking, sort, cursor, groupBy, adds, targetCombination, properties) + if err != nil { + return ReplicasSearchResult{}, err + } + return ReplicasSearchResult{Objects: objs, Scores: scores, Node: node}, nil + } + return ri.queryAllReplicas(ctx, log, shard, remoteShardQuery, localNode) +} + +func (ri *RemoteIndex) SearchShard(ctx context.Context, shard string, + queryVec []models.Vector, + targetVector []string, + distance float32, + limit int, + filters *filters.LocalFilter, + keywordRanking *searchparams.KeywordRanking, + sort []filters.Sort, + cursor *filters.Cursor, + groupBy *searchparams.GroupBy, + adds additional.Properties, + targetCombination *dto.TargetCombination, + properties []string, +) ([]*storobj.Object, []float32, string, error) { + type pair struct { + first []*storobj.Object + second []float32 + } + f := func(node, host string) (interface{}, error) { + objs, scores, err := ri.client.SearchShard(ctx, host, ri.class, shard, + queryVec, targetVector, distance, limit, filters, keywordRanking, sort, cursor, groupBy, adds, targetCombination, properties) + if err != nil { + return nil, err + } + return pair{objs, scores}, err + } + rr, node, err := ri.queryReplicas(ctx, shard, f) + if err != nil { + return nil, nil, node, err + } + r := rr.(pair) + return r.first, r.second, node, err +} + +func (ri *RemoteIndex) Aggregate( + ctx context.Context, + shard string, + params aggregation.Params, +) (*aggregation.Result, error) { + f := func(_, host string) (interface{}, error) { + r, err := ri.client.Aggregate(ctx, host, ri.class, shard, params) + if err != nil { + return nil, err + } + return r, nil + } + rr, _, err := ri.queryReplicas(ctx, shard, f) + if err != nil { + return nil, err + } + return rr.(*aggregation.Result), err +} + +func (ri *RemoteIndex) FindUUIDs(ctx context.Context, shardName string, + filters *filters.LocalFilter, +) ([]strfmt.UUID, error) { + owner, err := ri.stateGetter.ShardOwner(ri.class, shardName) + if err != nil { + return nil, fmt.Errorf("class %s has no physical shard %q: %w", ri.class, shardName, err) + } + + host, ok := ri.nodeResolver.NodeHostname(owner) + if !ok { + return nil, fmt.Errorf("resolve node name %q to host", owner) + } + + return ri.client.FindUUIDs(ctx, host, ri.class, shardName, filters) +} + +func (ri *RemoteIndex) DeleteObjectBatch(ctx context.Context, shardName string, + uuids []strfmt.UUID, deletionTime time.Time, dryRun bool, schemaVersion uint64, +) objects.BatchSimpleObjects { + owner, err := ri.stateGetter.ShardOwner(ri.class, shardName) + if err != nil { + err := fmt.Errorf("class %s has no physical shard %q: %w", ri.class, shardName, err) + return objects.BatchSimpleObjects{objects.BatchSimpleObject{Err: err}} + } + + host, ok := ri.nodeResolver.NodeHostname(owner) + if !ok { + err := fmt.Errorf("resolve node name %q to host", owner) + return objects.BatchSimpleObjects{objects.BatchSimpleObject{Err: err}} + } + + return ri.client.DeleteObjectBatch(ctx, host, ri.class, shardName, uuids, deletionTime, dryRun, schemaVersion) +} + +func (ri *RemoteIndex) GetShardQueueSize(ctx context.Context, shardName string) (int64, error) { + owner, err := ri.stateGetter.ShardOwner(ri.class, shardName) + if err != nil { + return 0, fmt.Errorf("class %s has no physical shard %q: %w", ri.class, shardName, err) + } + + host, ok := ri.nodeResolver.NodeHostname(owner) + if !ok { + return 0, fmt.Errorf("resolve node name %q to host", owner) + } + + return ri.client.GetShardQueueSize(ctx, host, ri.class, shardName) +} + +func (ri *RemoteIndex) GetShardStatus(ctx context.Context, shardName string) (string, error) { + owner, err := ri.stateGetter.ShardOwner(ri.class, shardName) + if err != nil { + return "", fmt.Errorf("class %s has no physical shard %q: %w", ri.class, shardName, err) + } + + host, ok := ri.nodeResolver.NodeHostname(owner) + if !ok { + return "", fmt.Errorf("resolve node name %q to host", owner) + } + + return ri.client.GetShardStatus(ctx, host, ri.class, shardName) +} + +func (ri *RemoteIndex) UpdateShardStatus(ctx context.Context, shardName, targetStatus string, schemaVersion uint64) error { + owner, err := ri.stateGetter.ShardOwner(ri.class, shardName) + if err != nil { + return fmt.Errorf("class %s has no physical shard %q: %w", ri.class, shardName, err) + } + + host, ok := ri.nodeResolver.NodeHostname(owner) + if !ok { + return fmt.Errorf("resolve node name %q to host", owner) + } + + return ri.client.UpdateShardStatus(ctx, host, ri.class, shardName, targetStatus, schemaVersion) +} + +func (ri *RemoteIndex) queryAllReplicas( + ctx context.Context, + log logrus.FieldLogger, + shard string, + do func(nodeName, host string) (ReplicasSearchResult, error), + localNode string, +) (resp []ReplicasSearchResult, err error) { + replicas, err := ri.stateGetter.ShardReplicas(ri.class, shard) + if err != nil || len(replicas) == 0 { + return nil, fmt.Errorf("class %q has no physical shard %q: %w", ri.class, shard, err) + } + + queryOne := func(replica string) (ReplicasSearchResult, error) { + host, ok := ri.nodeResolver.NodeHostname(replica) + if !ok || host == "" { + return ReplicasSearchResult{}, fmt.Errorf("unable to resolve node name %q to host", replica) + } + return do(replica, host) + } + + var queriesSent atomic.Int64 + + queryAll := func(replicas []string) (resp []ReplicasSearchResult, err error) { + var mu sync.Mutex // protect resp + errlist + var searchResult ReplicasSearchResult + var errList error + + wg := sync.WaitGroup{} + for _, node := range replicas { + node := node // prevent loop variable capture + if node == localNode { + // Skip local node to ensure we don't query again our local shard -> it is handled separately in the search + continue + } + + wg.Add(1) + enterrors.GoWrapper(func() { + defer wg.Done() + + if errC := ctx.Err(); errC != nil { + mu.Lock() + errList = errors.Join(errList, fmt.Errorf("error while searching shard=%s replica node=%s: %w", shard, node, errC)) + mu.Unlock() + return + } + + queriesSent.Add(1) + if searchResult, err = queryOne(node); err != nil { + mu.Lock() + errList = errors.Join(errList, fmt.Errorf("error while searching shard=%s replica node=%s: %w", shard, node, err)) + mu.Unlock() + return + } + + mu.Lock() + resp = append(resp, searchResult) + mu.Unlock() + }, log) + } + wg.Wait() + + if errList != nil { + // Simply log the errors but don't return them unless we have no valid result + log.Warnf("errors happened during full replicas search for shard '%s' errors: %s", shard, errList) + } + + if len(resp) == 0 { + return nil, errList + } + if len(resp) != int(queriesSent.Load()) { + log.Warnf("full replicas search response does not match replica count: response=%d replicas=%d", len(resp), len(replicas)) + } + return resp, nil + } + return queryAll(replicas) +} + +func (ri *RemoteIndex) queryReplicas( + ctx context.Context, + shard string, + do func(nodeName, host string) (interface{}, error), +) (resp interface{}, node string, err error) { + replicas, err := ri.stateGetter.ShardReplicas(ri.class, shard) + if err != nil || len(replicas) == 0 { + return nil, + "", + fmt.Errorf("class %q has no physical shard %q: %w", ri.class, shard, err) + } + + queryOne := func(replica string) (interface{}, error) { + host, ok := ri.nodeResolver.NodeHostname(replica) + if !ok || host == "" { + return nil, fmt.Errorf("resolve node name %q to host", replica) + } + return do(replica, host) + } + + queryUntil := func(replicas []string) (resp interface{}, node string, err error) { + for _, node = range replicas { + if errC := ctx.Err(); errC != nil { + return nil, node, errC + } + if resp, err = queryOne(node); err == nil { + return resp, node, nil + } + } + return + } + first := rand.Intn(len(replicas)) + if resp, node, err = queryUntil(replicas[first:]); err != nil && first != 0 { + return queryUntil(replicas[:first]) + } + return +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/sharding/remote_index_incoming.go b/platform/dbops/binaries/weaviate-src/usecases/sharding/remote_index_incoming.go new file mode 100644 index 0000000000000000000000000000000000000000..59850e2ec983531104b4a1fb96017da1d171607b --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/sharding/remote_index_incoming.go @@ -0,0 +1,472 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package sharding + +import ( + "context" + "fmt" + "io" + "time" + + "github.com/weaviate/weaviate/entities/dto" + + "github.com/go-openapi/strfmt" + "github.com/pkg/errors" + "github.com/weaviate/weaviate/cluster/router/types" + "github.com/weaviate/weaviate/entities/additional" + "github.com/weaviate/weaviate/entities/aggregation" + enterrors "github.com/weaviate/weaviate/entities/errors" + "github.com/weaviate/weaviate/entities/filters" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/entities/searchparams" + "github.com/weaviate/weaviate/entities/storobj" + "github.com/weaviate/weaviate/usecases/file" + "github.com/weaviate/weaviate/usecases/objects" + "github.com/weaviate/weaviate/usecases/replica/hashtree" +) + +type RemoteIncomingRepo interface { + GetIndexForIncomingSharding(className schema.ClassName) RemoteIndexIncomingRepo +} + +type RemoteIncomingSchema interface { + ReadOnlyClassWithVersion(ctx context.Context, class string, version uint64) (*models.Class, error) +} + +type RemoteIndexIncomingRepo interface { + IncomingPutObject(ctx context.Context, shardName string, + obj *storobj.Object, schemaVersion uint64) error + IncomingBatchPutObjects(ctx context.Context, shardName string, + objs []*storobj.Object, schemaVersion uint64) []error + IncomingBatchAddReferences(ctx context.Context, shardName string, + refs objects.BatchReferences, schemaVersion uint64) []error + IncomingGetObject(ctx context.Context, shardName string, id strfmt.UUID, + selectProperties search.SelectProperties, + additional additional.Properties) (*storobj.Object, error) + IncomingExists(ctx context.Context, shardName string, + id strfmt.UUID) (bool, error) + IncomingDeleteObject(ctx context.Context, shardName string, + id strfmt.UUID, deletionTime time.Time, schemaVersion uint64) error + IncomingMergeObject(ctx context.Context, shardName string, + mergeDoc objects.MergeDocument, schemaVersion uint64) error + IncomingMultiGetObjects(ctx context.Context, shardName string, + ids []strfmt.UUID) ([]*storobj.Object, error) + IncomingSearch(ctx context.Context, shardName string, + vectors []models.Vector, targetVectors []string, distance float32, limit int, + filters *filters.LocalFilter, keywordRanking *searchparams.KeywordRanking, + sort []filters.Sort, cursor *filters.Cursor, groupBy *searchparams.GroupBy, + additional additional.Properties, targetCombination *dto.TargetCombination, properties []string, + ) ([]*storobj.Object, []float32, error) + IncomingAggregate(ctx context.Context, shardName string, + params aggregation.Params, modules interface{}) (*aggregation.Result, error) + + IncomingFindUUIDs(ctx context.Context, shardName string, + filters *filters.LocalFilter) ([]strfmt.UUID, error) + IncomingDeleteObjectBatch(ctx context.Context, shardName string, + uuids []strfmt.UUID, deletionTime time.Time, dryRun bool, schemaVersion uint64) objects.BatchSimpleObjects + IncomingGetShardQueueSize(ctx context.Context, shardName string) (int64, error) + IncomingGetShardStatus(ctx context.Context, shardName string) (string, error) + IncomingUpdateShardStatus(ctx context.Context, shardName, targetStatus string, schemaVersion uint64) error + IncomingOverwriteObjects(ctx context.Context, shard string, + vobjects []*objects.VObject) ([]types.RepairResponse, error) + IncomingDigestObjects(ctx context.Context, shardName string, + ids []strfmt.UUID) (result []types.RepairResponse, err error) + IncomingDigestObjectsInRange(ctx context.Context, shardName string, + initialUUID, finalUUID strfmt.UUID, limit int) (result []types.RepairResponse, err error) + IncomingHashTreeLevel(ctx context.Context, shardName string, + level int, discriminant *hashtree.Bitset) (digests []hashtree.Digest, err error) + + // Scale-Out Replication POC + IncomingFilePutter(ctx context.Context, shardName, + filePath string) (io.WriteCloser, error) + IncomingCreateShard(ctx context.Context, className string, shardName string) error + IncomingReinitShard(ctx context.Context, shardName string) error + // IncomingPauseFileActivity See adapters/clients.RemoteIndex.IncomingPauseFileActivity + IncomingPauseFileActivity(ctx context.Context, shardName string) error + // IncomingResumeFileActivity See adapters/clients.RemoteIndex.IncomingResumeFileActivity + IncomingResumeFileActivity(ctx context.Context, shardName string) error + // IncomingListFiles See adapters/clients.RemoteIndex.IncomingListFiles + IncomingListFiles(ctx context.Context, shardName string) ([]string, error) + // IncomingGetFileMetadata See adapters/clients.RemoteIndex.GetFileMetadata + IncomingGetFileMetadata(ctx context.Context, shardName, relativeFilePath string) (file.FileMetadata, error) + // IncomingGetFile See adapters/clients.RemoteIndex.GetFile + IncomingGetFile(ctx context.Context, shardName, relativeFilePath string) (io.ReadCloser, error) + // IncomingAddAsyncReplicationTargetNode See adapters/clients.RemoteIndex.AddAsyncReplicationTargetNode + IncomingAddAsyncReplicationTargetNode(ctx context.Context, shardName string, targetNodeOverride additional.AsyncReplicationTargetNodeOverride) error + // IncomingRemoveAsyncReplicationTargetNode See adapters/clients.RemoteIndex.RemoveAsyncReplicationTargetNode + IncomingRemoveAsyncReplicationTargetNode(ctx context.Context, shardName string, targetNodeOverride additional.AsyncReplicationTargetNodeOverride) error +} + +type RemoteIndexIncoming struct { + repo RemoteIncomingRepo + schema RemoteIncomingSchema + modules interface{} +} + +func NewRemoteIndexIncoming(repo RemoteIncomingRepo, schema RemoteIncomingSchema, modules interface{}) *RemoteIndexIncoming { + return &RemoteIndexIncoming{ + repo: repo, + schema: schema, + modules: modules, + } +} + +func (rii *RemoteIndexIncoming) PutObject(ctx context.Context, indexName, + shardName string, obj *storobj.Object, schemaVersion uint64, +) error { + index, err := rii.indexForIncomingWrite(ctx, indexName, schemaVersion) + if err != nil { + return err + } + + return index.IncomingPutObject(ctx, shardName, obj, schemaVersion) +} + +func (rii *RemoteIndexIncoming) BatchPutObjects(ctx context.Context, indexName, + shardName string, objs []*storobj.Object, schemaVersion uint64, +) []error { + index, err := rii.indexForIncomingWrite(ctx, indexName, schemaVersion) + if err != nil { + return duplicateErr(err, len(objs)) + } + + return index.IncomingBatchPutObjects(ctx, shardName, objs, schemaVersion) +} + +func (rii *RemoteIndexIncoming) BatchAddReferences(ctx context.Context, indexName, + shardName string, refs objects.BatchReferences, schemaVersion uint64, +) []error { + index, err := rii.indexForIncomingWrite(ctx, indexName, schemaVersion) + if err != nil { + return duplicateErr(err, len(refs)) + } + + return index.IncomingBatchAddReferences(ctx, shardName, refs, schemaVersion) +} + +func (rii *RemoteIndexIncoming) GetObject(ctx context.Context, indexName, + shardName string, id strfmt.UUID, selectProperties search.SelectProperties, + additional additional.Properties, +) (*storobj.Object, error) { + index := rii.repo.GetIndexForIncomingSharding(schema.ClassName(indexName)) + if index == nil { + return nil, enterrors.NewErrUnprocessable(errors.Errorf("local index %q not found", indexName)) + } + + return index.IncomingGetObject(ctx, shardName, id, selectProperties, additional) +} + +func (rii *RemoteIndexIncoming) Exists(ctx context.Context, indexName, + shardName string, id strfmt.UUID, +) (bool, error) { + index := rii.repo.GetIndexForIncomingSharding(schema.ClassName(indexName)) + if index == nil { + return false, enterrors.NewErrUnprocessable(errors.Errorf("local index %q not found", indexName)) + } + + return index.IncomingExists(ctx, shardName, id) +} + +func (rii *RemoteIndexIncoming) DeleteObject(ctx context.Context, indexName, + shardName string, id strfmt.UUID, deletionTime time.Time, schemaVersion uint64, +) error { + index, err := rii.indexForIncomingWrite(ctx, indexName, schemaVersion) + if err != nil { + return err + } + + return index.IncomingDeleteObject(ctx, shardName, id, deletionTime, schemaVersion) +} + +func (rii *RemoteIndexIncoming) MergeObject(ctx context.Context, indexName, + shardName string, mergeDoc objects.MergeDocument, schemaVersion uint64, +) error { + index, err := rii.indexForIncomingWrite(ctx, indexName, schemaVersion) + if err != nil { + return err + } + + return index.IncomingMergeObject(ctx, shardName, mergeDoc, schemaVersion) +} + +func (rii *RemoteIndexIncoming) MultiGetObjects(ctx context.Context, indexName, + shardName string, ids []strfmt.UUID, +) ([]*storobj.Object, error) { + index := rii.repo.GetIndexForIncomingSharding(schema.ClassName(indexName)) + if index == nil { + return nil, enterrors.NewErrUnprocessable(errors.Errorf("local index %q not found", indexName)) + } + + return index.IncomingMultiGetObjects(ctx, shardName, ids) +} + +func (rii *RemoteIndexIncoming) Search(ctx context.Context, indexName, shardName string, + vectors []models.Vector, targetVectors []string, distance float32, limit int, filters *filters.LocalFilter, + keywordRanking *searchparams.KeywordRanking, sort []filters.Sort, cursor *filters.Cursor, + groupBy *searchparams.GroupBy, additional additional.Properties, targetCombination *dto.TargetCombination, + properties []string, +) ([]*storobj.Object, []float32, error) { + index := rii.repo.GetIndexForIncomingSharding(schema.ClassName(indexName)) + if index == nil { + return nil, nil, enterrors.NewErrUnprocessable(errors.Errorf("local index %q not found", indexName)) + } + + return index.IncomingSearch( + ctx, shardName, vectors, targetVectors, distance, limit, filters, keywordRanking, sort, cursor, groupBy, additional, targetCombination, properties) +} + +func (rii *RemoteIndexIncoming) Aggregate(ctx context.Context, indexName, shardName string, + params aggregation.Params, +) (*aggregation.Result, error) { + index := rii.repo.GetIndexForIncomingSharding(schema.ClassName(indexName)) + if index == nil { + return nil, enterrors.NewErrUnprocessable(errors.Errorf("local index %q not found", indexName)) + } + + return index.IncomingAggregate(ctx, shardName, params, rii.modules) +} + +func (rii *RemoteIndexIncoming) FindUUIDs(ctx context.Context, indexName, shardName string, + filters *filters.LocalFilter, +) ([]strfmt.UUID, error) { + index := rii.repo.GetIndexForIncomingSharding(schema.ClassName(indexName)) + if index == nil { + return nil, enterrors.NewErrUnprocessable(errors.Errorf("local index %q not found", indexName)) + } + + return index.IncomingFindUUIDs(ctx, shardName, filters) +} + +func (rii *RemoteIndexIncoming) DeleteObjectBatch(ctx context.Context, indexName, shardName string, + uuids []strfmt.UUID, deletionTime time.Time, dryRun bool, schemaVersion uint64, +) objects.BatchSimpleObjects { + index, err := rii.indexForIncomingWrite(ctx, indexName, schemaVersion) + if err != nil { + return objects.BatchSimpleObjects{objects.BatchSimpleObject{Err: err}} + } + + return index.IncomingDeleteObjectBatch(ctx, shardName, uuids, deletionTime, dryRun, schemaVersion) +} + +func (rii *RemoteIndexIncoming) GetShardQueueSize(ctx context.Context, + indexName, shardName string, +) (int64, error) { + index := rii.repo.GetIndexForIncomingSharding(schema.ClassName(indexName)) + if index == nil { + return 0, enterrors.NewErrUnprocessable(errors.Errorf("local index %q not found", indexName)) + } + + return index.IncomingGetShardQueueSize(ctx, shardName) +} + +func (rii *RemoteIndexIncoming) GetShardStatus(ctx context.Context, + indexName, shardName string, +) (string, error) { + index := rii.repo.GetIndexForIncomingSharding(schema.ClassName(indexName)) + if index == nil { + return "", enterrors.NewErrUnprocessable(errors.Errorf("local index %q not found", indexName)) + } + + return index.IncomingGetShardStatus(ctx, shardName) +} + +func (rii *RemoteIndexIncoming) UpdateShardStatus(ctx context.Context, + indexName, shardName, targetStatus string, schemaVersion uint64, +) error { + index, err := rii.indexForIncomingWrite(ctx, indexName, schemaVersion) + if err != nil { + return err + } + + return index.IncomingUpdateShardStatus(ctx, shardName, targetStatus, schemaVersion) +} + +func (rii *RemoteIndexIncoming) FilePutter(ctx context.Context, + indexName, shardName, filePath string, +) (io.WriteCloser, error) { + index := rii.repo.GetIndexForIncomingSharding(schema.ClassName(indexName)) + if index == nil { + return nil, errors.Errorf("local index %q not found", indexName) + } + + return index.IncomingFilePutter(ctx, shardName, filePath) +} + +func (rii *RemoteIndexIncoming) CreateShard(ctx context.Context, + indexName, shardName string, +) error { + index := rii.repo.GetIndexForIncomingSharding(schema.ClassName(indexName)) + if index == nil { + return errors.Errorf("local index %q not found", indexName) + } + + return index.IncomingCreateShard(ctx, indexName, shardName) +} + +func (rii *RemoteIndexIncoming) ReInitShard(ctx context.Context, + indexName, shardName string, +) error { + index := rii.repo.GetIndexForIncomingSharding(schema.ClassName(indexName)) + if index == nil { + return errors.Errorf("local index %q not found", indexName) + } + + return index.IncomingReinitShard(ctx, shardName) +} + +// PauseFileActivity see adapters/clients.RemoteIndex.PauseFileActivity +func (rii *RemoteIndexIncoming) PauseFileActivity(ctx context.Context, + indexName, shardName string, schemaVersion uint64, +) error { + index, err := rii.indexForIncomingWrite(ctx, indexName, schemaVersion) + if err != nil { + return fmt.Errorf("local index %q not found: %w", indexName, err) + } + + return index.IncomingPauseFileActivity(ctx, shardName) +} + +// ResumeFileActivity see adapters/clients.RemoteIndex.ResumeFileActivity +func (rii *RemoteIndexIncoming) ResumeFileActivity(ctx context.Context, + indexName, shardName string, +) error { + index := rii.repo.GetIndexForIncomingSharding(schema.ClassName(indexName)) + if index == nil { + return errors.Errorf("local index %q not found", indexName) + } + + return index.IncomingResumeFileActivity(ctx, shardName) +} + +// ListFiles see adapters/clients.RemoteIndex.ListFiles +func (rii *RemoteIndexIncoming) ListFiles(ctx context.Context, + indexName, shardName string, +) ([]string, error) { + index := rii.repo.GetIndexForIncomingSharding(schema.ClassName(indexName)) + if index == nil { + return nil, errors.Errorf("local index %q not found", indexName) + } + + return index.IncomingListFiles(ctx, shardName) +} + +// GetFileMetadata see adapters/clients.RemoteIndex.GetFileMetadata +func (rii *RemoteIndexIncoming) GetFileMetadata(ctx context.Context, + indexName, shardName, relativeFilePath string, +) (file.FileMetadata, error) { + index := rii.repo.GetIndexForIncomingSharding(schema.ClassName(indexName)) + if index == nil { + return file.FileMetadata{}, errors.Errorf("local index %q not found", indexName) + } + + return index.IncomingGetFileMetadata(ctx, shardName, relativeFilePath) +} + +// GetFile see adapters/clients.RemoteIndex.GetFile +func (rii *RemoteIndexIncoming) GetFile(ctx context.Context, + indexName, shardName, relativeFilePath string, +) (io.ReadCloser, error) { + index := rii.repo.GetIndexForIncomingSharding(schema.ClassName(indexName)) + if index == nil { + return nil, errors.Errorf("local index %q not found", indexName) + } + + return index.IncomingGetFile(ctx, shardName, relativeFilePath) +} + +func (rii *RemoteIndexIncoming) OverwriteObjects(ctx context.Context, + indexName, shardName string, vobjects []*objects.VObject, +) ([]types.RepairResponse, error) { + index := rii.repo.GetIndexForIncomingSharding(schema.ClassName(indexName)) + if index == nil { + return nil, fmt.Errorf("local index %q not found", indexName) + } + + return index.IncomingOverwriteObjects(ctx, shardName, vobjects) +} + +func (rii *RemoteIndexIncoming) DigestObjects(ctx context.Context, + indexName, shardName string, ids []strfmt.UUID, +) ([]types.RepairResponse, error) { + index := rii.repo.GetIndexForIncomingSharding(schema.ClassName(indexName)) + if index == nil { + return nil, enterrors.NewErrUnprocessable(fmt.Errorf("local index %q not found", indexName)) + } + + return index.IncomingDigestObjects(ctx, shardName, ids) +} + +func (rii *RemoteIndexIncoming) indexForIncomingWrite(ctx context.Context, indexName string, + schemaVersion uint64, +) (RemoteIndexIncomingRepo, error) { + // wait for schema and store to reach version >= schemaVersion + if _, err := rii.schema.ReadOnlyClassWithVersion(ctx, indexName, schemaVersion); err != nil { + return nil, fmt.Errorf("local index %q not found: %w", indexName, err) + } + index := rii.repo.GetIndexForIncomingSharding(schema.ClassName(indexName)) + if index == nil { + return nil, fmt.Errorf("local index %q not found", indexName) + } + + return index, nil +} + +func (rii *RemoteIndexIncoming) DigestObjectsInRange(ctx context.Context, + indexName, shardName string, initialUUID, finalUUID strfmt.UUID, limit int, +) ([]types.RepairResponse, error) { + index := rii.repo.GetIndexForIncomingSharding(schema.ClassName(indexName)) + if index == nil { + return nil, fmt.Errorf("local index %q not found", indexName) + } + + return index.IncomingDigestObjectsInRange(ctx, shardName, initialUUID, finalUUID, limit) +} + +func (rii *RemoteIndexIncoming) HashTreeLevel(ctx context.Context, + indexName, shardName string, level int, discriminant *hashtree.Bitset, +) (digests []hashtree.Digest, err error) { + index := rii.repo.GetIndexForIncomingSharding(schema.ClassName(indexName)) + if index == nil { + return nil, fmt.Errorf("local index %q not found", indexName) + } + + return index.IncomingHashTreeLevel(ctx, shardName, level, discriminant) +} + +func (rii *RemoteIndexIncoming) AddAsyncReplicationTargetNode( + ctx context.Context, + indexName, shardName string, + targetNodeOverride additional.AsyncReplicationTargetNodeOverride, + schemaVersion uint64, +) error { + index, err := rii.indexForIncomingWrite(ctx, indexName, schemaVersion) + if err != nil { + return fmt.Errorf("local index %q not found: %w", indexName, err) + } + + return index.IncomingAddAsyncReplicationTargetNode(ctx, shardName, targetNodeOverride) +} + +func (rii *RemoteIndexIncoming) RemoveAsyncReplicationTargetNode( + ctx context.Context, + indexName, shardName string, + targetNodeOverride additional.AsyncReplicationTargetNodeOverride, +) error { + index := rii.repo.GetIndexForIncomingSharding(schema.ClassName(indexName)) + if index == nil { + return fmt.Errorf("local index %q not found", indexName) + } + + return index.IncomingRemoveAsyncReplicationTargetNode(ctx, shardName, targetNodeOverride) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/sharding/remote_index_test.go b/platform/dbops/binaries/weaviate-src/usecases/sharding/remote_index_test.go new file mode 100644 index 0000000000000000000000000000000000000000..90a6ccdd92cb9a25d50e3ae9ef2da4fecd53fc77 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/sharding/remote_index_test.go @@ -0,0 +1,124 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package sharding + +import ( + "context" + "errors" + "fmt" + "testing" +) + +var errAny = errors.New("anyErr") + +func TestQueryReplica(t *testing.T) { + var ( + ctx = context.Background() + canceledCtx, cancledFunc = context.WithCancel(ctx) + ) + cancledFunc() + doIf := func(targetNode string) func(node, host string) (interface{}, error) { + return func(node, host string) (interface{}, error) { + if node != targetNode { + return nil, errAny + } + return node, nil + } + } + tests := []struct { + ctx context.Context + resolver fakeNodeResolver + schema fakeSchema + targetNode string + success bool + name string + }{ + { + ctx, newFakeResolver(0, 0), newFakeSchema(0, 0), "N0", false, "empty schema", + }, + { + ctx, newFakeResolver(0, 1), newFakeSchema(1, 2), "N2", false, "unresolved name", + }, + { + ctx, newFakeResolver(0, 1), newFakeSchema(0, 1), "N0", true, "one replica", + }, + { + ctx, newFakeResolver(0, 9), newFakeSchema(0, 9), "N2", true, "random selection", + }, + { + canceledCtx, newFakeResolver(0, 9), newFakeSchema(0, 9), "N2", false, "canceled", + }, + } + + for _, test := range tests { + rindex := RemoteIndex{"C", &test.schema, nil, &test.resolver} + got, lastNode, err := rindex.queryReplicas(test.ctx, "S", doIf(test.targetNode)) + if !test.success { + if got != nil { + t.Errorf("%s: want: nil, got: %v", test.name, got) + } else if err == nil { + t.Errorf("%s: must return an error", test.name) + } + continue + } + if lastNode != test.targetNode { + t.Errorf("%s: last responding node want:%s got:%s", test.name, test.targetNode, lastNode) + } + } +} + +func newFakeResolver(fromNode, toNode int) fakeNodeResolver { + m := make(map[string]string, toNode-fromNode) + for i := fromNode; i < toNode; i++ { + m[fmt.Sprintf("N%d", i)] = fmt.Sprintf("H%d", i) + } + return fakeNodeResolver{m} +} + +func newFakeSchema(fromNode, toNode int) fakeSchema { + nodes := make([]string, 0, toNode-fromNode) + for i := fromNode; i < toNode; i++ { + nodes = append(nodes, fmt.Sprintf("N%d", i)) + } + return fakeSchema{nodes} +} + +type fakeNodeResolver struct { + rTable map[string]string +} + +func (r *fakeNodeResolver) AllHostnames() []string { + hosts := make([]string, 0, len(r.rTable)) + + for _, h := range r.rTable { + hosts = append(hosts, h) + } + + return hosts +} + +func (f *fakeNodeResolver) NodeHostname(name string) (string, bool) { + host, ok := f.rTable[name] + return host, ok +} + +type fakeSchema struct { + nodes []string +} + +func (f *fakeSchema) ShardOwner(class, shard string) (string, error) { + return "", nil +} + +func (f *fakeSchema) ShardReplicas(class, shard string) ([]string, error) { + return f.nodes, nil +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/sharding/remote_node.go b/platform/dbops/binaries/weaviate-src/usecases/sharding/remote_node.go new file mode 100644 index 0000000000000000000000000000000000000000..57d3b7790dda681a5fbebff328cbe0844c2fd701 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/sharding/remote_node.go @@ -0,0 +1,52 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package sharding + +import ( + "context" + "fmt" + + "github.com/weaviate/weaviate/entities/models" +) + +type RemoteNodeClient interface { + GetNodeStatus(ctx context.Context, hostName, className, shardName, output string) (*models.NodeStatus, error) + GetStatistics(ctx context.Context, hostName string) (*models.Statistics, error) +} + +type RemoteNode struct { + client RemoteNodeClient + nodeResolver nodeResolver +} + +func NewRemoteNode(nodeResolver nodeResolver, client RemoteNodeClient) *RemoteNode { + return &RemoteNode{ + client: client, + nodeResolver: nodeResolver, + } +} + +func (rn *RemoteNode) GetNodeStatus(ctx context.Context, nodeName, className, shardName, output string) (*models.NodeStatus, error) { + host, ok := rn.nodeResolver.NodeHostname(nodeName) + if !ok { + return nil, fmt.Errorf("resolve node name %q to host", nodeName) + } + return rn.client.GetNodeStatus(ctx, host, className, shardName, output) +} + +func (rn *RemoteNode) GetStatistics(ctx context.Context, nodeName string) (*models.Statistics, error) { + host, ok := rn.nodeResolver.NodeHostname(nodeName) + if !ok { + return nil, fmt.Errorf("resolve node name %q to host", nodeName) + } + return rn.client.GetStatistics(ctx, host) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/sharding/remote_node_incoming.go b/platform/dbops/binaries/weaviate-src/usecases/sharding/remote_node_incoming.go new file mode 100644 index 0000000000000000000000000000000000000000..95a53a9696a39d9b480b32e79fdde80421277717 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/sharding/remote_node_incoming.go @@ -0,0 +1,41 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package sharding + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" +) + +type RemoteNodeIncomingRepo interface { + IncomingGetNodeStatus(ctx context.Context, className, shardName, output string) (*models.NodeStatus, error) + IncomingGetNodeStatistics() (*models.Statistics, error) +} + +type RemoteNodeIncoming struct { + repo RemoteNodeIncomingRepo +} + +func NewRemoteNodeIncoming(repo RemoteNodeIncomingRepo) *RemoteNodeIncoming { + return &RemoteNodeIncoming{ + repo: repo, + } +} + +func (rni *RemoteNodeIncoming) GetNodeStatus(ctx context.Context, className, shardName, output string) (*models.NodeStatus, error) { + return rni.repo.IncomingGetNodeStatus(ctx, className, shardName, output) +} + +func (rni *RemoteNodeIncoming) GetStatistics(ctx context.Context) (*models.Statistics, error) { + return rni.repo.IncomingGetNodeStatistics() +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/sharding/state.go b/platform/dbops/binaries/weaviate-src/usecases/sharding/state.go new file mode 100644 index 0000000000000000000000000000000000000000..1984c0095575b95cb8096355189477ffdf25fa36 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/sharding/state.go @@ -0,0 +1,669 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package sharding + +import ( + "errors" + "fmt" + "math" + "math/rand" + "slices" + "sort" + + "github.com/spaolacci/murmur3" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/cluster" + "github.com/weaviate/weaviate/usecases/sharding/config" +) + +const shardNameLength = 12 + +var ErrReplicaAlreadyExists = errors.New("replica already exists") + +type State struct { + IndexID string `json:"indexID"` // for monitoring, reporting purposes. Does not influence the shard-calculations + Config config.Config `json:"config"` + Physical map[string]Physical `json:"physical"` + Virtual []Virtual `json:"virtual"` + PartitioningEnabled bool `json:"partitioningEnabled"` + ReplicationFactor int64 `json:"replicationFactor"` + + // different for each node, not to be serialized + localNodeName string // TODO: localNodeName is static it is better to store just once +} + +// MigrateFromOldFormat checks if the old (pre-v1.17) format was used and +// migrates it into the new format for backward-compatibility with all classes +// created before v1.17 +func (s *State) MigrateFromOldFormat() { + for shardName, shard := range s.Physical { + if shard.LegacyBelongsToNodeForBackwardCompat != "" && len(shard.BelongsToNodes) == 0 { + shard.BelongsToNodes = []string{ + shard.LegacyBelongsToNodeForBackwardCompat, + } + shard.LegacyBelongsToNodeForBackwardCompat = "" + } + s.Physical[shardName] = shard + } +} + +// MigrateShardingStateReplicationFactor sets the ReplicationFactor field if it is unset (zero). +// For partitioned states, it defaults to 1. For non-partitioned states, it checks that all shards +// have a consistent number of physical replicas and uses that as the replication factor. +// +// Returns: +// - error: if physical shards are missing in a non-partitioned state or if shard replica counts are inconsistent +func (s *State) MigrateShardingStateReplicationFactor() error { + if s.ReplicationFactor > 0 { + return nil + } + + if s.PartitioningEnabled { + // If partitioning is enabled, the ReplicationFactor is not used, + // but we set it to 1 just to be on the safe side. + s.ReplicationFactor = 1 + return nil + } + + var firstShard string + var replicationFactor int64 + isFirstShard := true + + for shard := range s.Physical { + shardReplicationFactor, err := s.getShardReplicationFactor(shard) + if err != nil { + return err + } + + if isFirstShard { + firstShard = shard + replicationFactor = shardReplicationFactor + isFirstShard = false + } else if shardReplicationFactor != replicationFactor { + // All physical replicas of a shard should have the same replication factor since it's a + // cluster-wide configuration. Inconsistencies in this value indicate a corrupted sharding + // state that could lead to data reliability issues if not addressed or detected. + // This validation ensures the state is consistent and helps detect potential issues + // that would otherwise manifest as unpredictable behavior during operation. + return fmt.Errorf( + "inconsistent replication factors across shards: %s has %d, %s has %d", + firstShard, replicationFactor, + shard, shardReplicationFactor, + ) + } + } + + s.ReplicationFactor = replicationFactor + return nil +} + +// getShardReplicationFactor determines the number of replicas assigned to a given shard. +// It is used during migration to verify consistency across shards. +// +// If the state's ReplicationFactor is unset (<1), it assumes a default of 1 as we do not want +// any shard to end-up having no replica at all. Otherwise, it returns the current ReplicationFactor. +// +// Returns an error if the specified shard does not exist. +func (s *State) getShardReplicationFactor(shard string) (int64, error) { + _, ok := s.Physical[shard] + if !ok { + return 0, fmt.Errorf("could not find shard %s", shard) + } + + if s.ReplicationFactor < 1 { + return 1, nil + } + + return s.ReplicationFactor, nil +} + +type Virtual struct { + Name string `json:"name"` + Upper uint64 `json:"upper"` + OwnsPercentage float64 `json:"ownsPercentage"` + AssignedToPhysical string `json:"assignedToPhysical"` +} + +type Physical struct { + Name string `json:"name"` + OwnsVirtual []string `json:"ownsVirtual,omitempty"` + OwnsPercentage float64 `json:"ownsPercentage"` + + LegacyBelongsToNodeForBackwardCompat string `json:"belongsToNode,omitempty"` + BelongsToNodes []string `json:"belongsToNodes,omitempty"` + + Status string `json:"status,omitempty"` +} + +// BelongsToNode for backward-compatibility when there was no replication. It +// always returns the first node of the list +func (p Physical) BelongsToNode() string { + return p.BelongsToNodes[0] +} + +func (s *State) AddReplicaToShard(shard string, replica string) error { + if err := s.MigrateShardingStateReplicationFactor(); err != nil { + return fmt.Errorf("error while migrating sharding state: %w", err) + } + phys, ok := s.Physical[shard] + if !ok { + return fmt.Errorf("could not find shard %s", shard) + } + if err := phys.AddReplica(replica); err != nil { + return err + } + s.Physical[shard] = phys + return nil +} + +func (s *State) DeleteReplicaFromShard(shard string, replica string) error { + if err := s.MigrateShardingStateReplicationFactor(); err != nil { + return fmt.Errorf("error while migrating sharding state: %w", err) + } + phys, ok := s.Physical[shard] + if !ok { + return fmt.Errorf("could not find shard %s", shard) + } + + numberOfReplicas, err := s.NumberOfReplicas(shard) + if err != nil { + return fmt.Errorf("error while getting number of replicas for shard %s: %w", shard, err) + } + + if numberOfReplicas <= s.ReplicationFactor { + return fmt.Errorf("unable to delete replica from shard, minimum replication factor %d", s.ReplicationFactor) + } + + if err := phys.DeleteReplica(replica); err != nil { + return err + } + s.Physical[shard] = phys + return nil +} + +func (s *State) NumberOfReplicas(shard string) (int64, error) { + phys, ok := s.Physical[shard] + if !ok { + return 0, fmt.Errorf("could not find shard %s", shard) + } + + return int64(len(phys.BelongsToNodes)), nil +} + +func (p *Physical) AddReplica(replica string) error { + if slices.Contains(p.BelongsToNodes, replica) { + return fmt.Errorf("%w: %s", ErrReplicaAlreadyExists, replica) + } + p.BelongsToNodes = append(p.BelongsToNodes, replica) + return nil +} + +func (p *Physical) DeleteReplica(replica string) error { + if !slices.Contains(p.BelongsToNodes, replica) { + return nil // replica not found, nothing to do + } + idx := slices.Index(p.BelongsToNodes, replica) + p.BelongsToNodes = slices.Delete(p.BelongsToNodes, idx, idx+1) + return nil +} + +func (p *Physical) ActivityStatus() string { + return schema.ActivityStatus(p.Status) +} + +func (p *Physical) IsLocalShard(nodeName string) bool { + for _, node := range p.BelongsToNodes { + if node == nodeName { + return true + } + } + + return false +} + +func InitState(id string, config config.Config, nodeLocalName string, names []string, replFactor int64, partitioningEnabled bool) (*State, error) { + if replFactor < 1 { + return nil, fmt.Errorf("replication factor must be at least 1, got %d", replFactor) + } + + if f, n := replFactor, len(names); f > int64(n) { + return nil, fmt.Errorf("could not find enough weaviate nodes for replication: %d available, %d requested", len(names), f) + } + + out := &State{ + Config: config, + IndexID: id, + localNodeName: nodeLocalName, + PartitioningEnabled: partitioningEnabled, + ReplicationFactor: replFactor, + } + + if partitioningEnabled { + out.Physical = make(map[string]Physical, 128) + return out, nil + } + + if err := out.initPhysical(names, replFactor); err != nil { + return nil, err + } + out.initVirtual() + out.distributeVirtualAmongPhysical() + return out, nil +} + +// Shard returns the shard name if it exits and empty string otherwise +func (s *State) Shard(partitionKey, objectID string) string { + if s.PartitioningEnabled { + if _, ok := s.Physical[partitionKey]; ok { + return partitionKey // will change in the future + } + return "" + } + return s.PhysicalShard([]byte(objectID)) +} + +func (s *State) PhysicalShard(in []byte) string { + if len(s.Physical) == 0 { + panic("no physical shards present") + } + + if len(s.Virtual) == 0 { + panic("no virtual shards present") + } + + h := murmur3.New64() + h.Write(in) + token := h.Sum64() + + virtual := s.virtualByToken(token) + + return virtual.AssignedToPhysical +} + +// CountPhysicalShards return a count of physical shards +func (s *State) CountPhysicalShards() int { + return len(s.Physical) +} + +func (s *State) AllPhysicalShards() []string { + var names []string + for _, physical := range s.Physical { + names = append(names, physical.Name) + } + + sort.Slice(names, func(a, b int) bool { + return names[a] < names[b] + }) + + return names +} + +func (s *State) AllPhysicalShardsAndReplicas() map[string][]string { + shardsToReplicas := make(map[string][]string, len(s.Physical)) + for _, physical := range s.Physical { + shardsToReplicas[physical.Name] = physical.BelongsToNodes + } + return shardsToReplicas +} + +func (s *State) AllLocalPhysicalShards() []string { + var names []string + for _, physical := range s.Physical { + if s.IsLocalShard(physical.Name) { + names = append(names, physical.Name) + } + } + + sort.Slice(names, func(a, b int) bool { + return names[a] < names[b] + }) + + return names +} + +func (s *State) SetLocalName(name string) { + s.localNodeName = name +} + +func (s *State) IsLocalShard(name string) bool { + for _, node := range s.Physical[name].BelongsToNodes { + if node == s.localNodeName { + return true + } + } + + return false +} + +// initPhysical assigns shards to nodes according to the following rules: +// +// - The starting point of the ring is random +// - Shard N+1's first node is the right neighbor of shard N's first node +// - If a shard has multiple nodes (replication) they are always the right +// neighbors of the first node of that shard +// +// Example with 3 nodes, 2 shards, replicationFactor=2: +// +// Shard 1: Node1, Node2 +// Shard 2: Node2, Node3 +// +// Example with 3 nodes, 3 shards, replicationFactor=3: +// +// Shard 1: Node1, Node2, Node3 +// Shard 2: Node2, Node3, Node1 +// Shard 3: Node3, Node1, Node2 +// +// Example with 12 nodes, 3 shards, replicationFactor=5: +// +// Shard 1: Node7, Node8, Node9, Node10, Node 11 +// Shard 2: Node8, Node9, Node10, Node 11, Node 12 +// Shard 3: Node9, Node10, Node11, Node 12, Node 1 +func (s *State) initPhysical(nodes []string, replFactor int64) error { + if len(nodes) == 0 { + return fmt.Errorf("there is no nodes provided, can't initiate state for empty node list") + } + it, err := cluster.NewNodeIterator(nodes, cluster.StartAfter) + if err != nil { + return err + } + it.SetStartNode(nodes[len(nodes)-1]) + + s.Physical = map[string]Physical{} + + nodeSet := make(map[string]bool) + for i := 0; i < s.Config.DesiredCount; i++ { + name := generateShardName() + shard := Physical{Name: name} + shard.BelongsToNodes = make([]string, 0, replFactor) + for { // select shard + node := it.Next() + if len(nodeSet) == len(nodes) { // this is a new round + for k := range nodeSet { + delete(nodeSet, k) + } + } + if !nodeSet[node] { + nodeSet[node] = true + shard.BelongsToNodes = append(shard.BelongsToNodes, node) + break + } + } + + for i := replFactor; i > 1; i-- { + shard.BelongsToNodes = append(shard.BelongsToNodes, it.Next()) + } + + s.Physical[name] = shard + } + + return nil +} + +// GetPartitions based on the specified shards, available nodes, and replFactor +// It doesn't change the internal state +// TODO-RAFT: Ensure this function is higherorder, if the repartition result is changed, this will result in +// inconsistency when applying old log entry for add tenants +func (s State) GetPartitions(nodes []string, shards []string, replFactor int64) (map[string][]string, error) { + if len(nodes) == 0 { + return nil, fmt.Errorf("list of storage nodes is empty") + } + if replFactor > int64(len(nodes)) { + return nil, fmt.Errorf("not enough replicas: found %d want %d", len(nodes), replFactor) + } + it, err := cluster.NewNodeIterator(nodes, cluster.StartAfter) + if err != nil { + return nil, err + } + it.SetStartNode(nodes[len(nodes)-1]) + partitions := make(map[string][]string, len(shards)) + nodeSet := make(map[string]bool) + for _, name := range shards { + if existedShard, exists := s.Physical[name]; exists && + existedShard.Status != models.TenantActivityStatusFROZEN && + existedShard.Status != models.TenantActivityStatusFREEZING { + continue + } + owners := make([]string, 0, replFactor) + for { // select shard + node := it.Next() + if len(nodeSet) == len(nodes) { // this is a new round + for k := range nodeSet { + delete(nodeSet, k) + } + } + if !nodeSet[node] { + nodeSet[node] = true + owners = append(owners, node) + break + } + } + + for i := replFactor; i > 1; i-- { + owners = append(owners, it.Next()) + } + + partitions[name] = owners + } + + return partitions, nil +} + +// AddPartition to physical shards +func (s *State) AddPartition(name string, nodes []string, status string) (Physical, error) { + if err := s.MigrateShardingStateReplicationFactor(); err != nil { + return Physical{}, fmt.Errorf("error while migrating sharding state: %w", err) + } + p := Physical{ + Name: name, + BelongsToNodes: nodes, + OwnsPercentage: 1.0, + Status: status, + } + s.Physical[name] = p + return p, nil +} + +// DeletePartition to physical shards. Return `true` if given partition is +// actually deleted. +func (s *State) DeletePartition(name string) (string, bool, error) { + if err := s.MigrateShardingStateReplicationFactor(); err != nil { + return "", false, fmt.Errorf("error while migrating sharding state: %w", err) + } + t, ok := s.Physical[name] + if !ok { + return "", false, nil + } + status := t.Status + delete(s.Physical, name) + return status, true, nil +} + +// ApplyNodeMapping replaces node names with their new value form nodeMapping in s. +// If s.LegacyBelongsToNodeForBackwardCompat is non empty, it will also perform node name replacement if present in nodeMapping. +func (s *State) ApplyNodeMapping(nodeMapping map[string]string) { + if len(nodeMapping) == 0 { + return + } + + for k, v := range s.Physical { + if v.LegacyBelongsToNodeForBackwardCompat != "" { + if newNodeName, ok := nodeMapping[v.LegacyBelongsToNodeForBackwardCompat]; ok { + v.LegacyBelongsToNodeForBackwardCompat = newNodeName + } + } + + for i, nodeName := range v.BelongsToNodes { + if newNodeName, ok := nodeMapping[nodeName]; ok { + v.BelongsToNodes[i] = newNodeName + } + } + + s.Physical[k] = v + } +} + +func (s *State) initVirtual() { + count := s.Config.DesiredVirtualCount + s.Virtual = make([]Virtual, count) + + for i := range s.Virtual { + name := generateShardName() + h := murmur3.New64() + h.Write([]byte(name)) + s.Virtual[i] = Virtual{Name: name, Upper: h.Sum64()} + } + + sort.Slice(s.Virtual, func(a, b int) bool { + return s.Virtual[a].Upper < s.Virtual[b].Upper + }) + + for i := range s.Virtual { + var tokenCount uint64 + if i == 0 { + tokenCount = s.Virtual[0].Upper + (math.MaxUint64 - s.Virtual[len(s.Virtual)-1].Upper) + } else { + tokenCount = s.Virtual[i].Upper - s.Virtual[i-1].Upper + } + s.Virtual[i].OwnsPercentage = float64(tokenCount) / float64(math.MaxUint64) + + } +} + +// this is a primitive distribution that only works for initializing. Once we +// want to support dynamic sharding, we need to come up with something better +// than this +func (s *State) distributeVirtualAmongPhysical() { + ids := make([]string, len(s.Virtual)) + for i, v := range s.Virtual { + ids[i] = v.Name + } + + rand.Shuffle(len(s.Virtual), func(a, b int) { + ids[a], ids[b] = ids[b], ids[a] + }) + + physicalIDs := make([]string, 0, len(s.Physical)) + for name := range s.Physical { + physicalIDs = append(physicalIDs, name) + } + + for i, vid := range ids { + pickedPhysical := physicalIDs[i%len(physicalIDs)] + + virtual := s.VirtualByName(vid) + virtual.AssignedToPhysical = pickedPhysical + physical := s.Physical[pickedPhysical] + physical.OwnsVirtual = append(physical.OwnsVirtual, vid) + physical.OwnsPercentage += virtual.OwnsPercentage + s.Physical[pickedPhysical] = physical + } +} + +// uses linear search, but should only be used during shard init and update +// operations, not in regular +func (s *State) VirtualByName(name string) *Virtual { + for i := range s.Virtual { + if s.Virtual[i].Name == name { + return &s.Virtual[i] + } + } + + return nil +} + +func (s *State) virtualByToken(token uint64) *Virtual { + for i := range s.Virtual { + if token > s.Virtual[i].Upper { + continue + } + + return &s.Virtual[i] + } + + return &s.Virtual[0] +} + +const shardNameChars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" + +func generateShardName() string { + b := make([]byte, shardNameLength) + for i := range b { + b[i] = shardNameChars[rand.Intn(len(shardNameChars))] + } + + return string(b) +} + +func (s State) DeepCopy() State { + var virtualCopy []Virtual + + physicalCopy := make(map[string]Physical, len(s.Physical)) + for name, shard := range s.Physical { + physicalCopy[name] = shard.DeepCopy() + } + + if len(s.Virtual) > 0 { + virtualCopy = make([]Virtual, len(s.Virtual)) + } + for i, virtual := range s.Virtual { + virtualCopy[i] = virtual.DeepCopy() + } + + state := State{ + localNodeName: s.localNodeName, + IndexID: s.IndexID, + Config: s.Config.DeepCopy(), + Physical: physicalCopy, + Virtual: virtualCopy, + PartitioningEnabled: s.PartitioningEnabled, + ReplicationFactor: s.ReplicationFactor, + } + + // TODO: in case of error we return an empty sharding state temporarily. The plan is to remove this + // DeepCopy method in a followup PR. + err := state.MigrateShardingStateReplicationFactor() + if err != nil { + return State{} + } + + return state +} + +func (p Physical) DeepCopy() Physical { + var ownsVirtualCopy []string + if len(p.OwnsVirtual) > 0 { + ownsVirtualCopy = make([]string, len(p.OwnsVirtual)) + copy(ownsVirtualCopy, p.OwnsVirtual) + } + + belongsCopy := make([]string, len(p.BelongsToNodes)) + copy(belongsCopy, p.BelongsToNodes) + + return Physical{ + Name: p.Name, + OwnsVirtual: ownsVirtualCopy, + OwnsPercentage: p.OwnsPercentage, + BelongsToNodes: belongsCopy, + Status: p.Status, + } +} + +func (v Virtual) DeepCopy() Virtual { + return Virtual{ + Name: v.Name, + Upper: v.Upper, + OwnsPercentage: v.OwnsPercentage, + AssignedToPhysical: v.AssignedToPhysical, + } +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/sharding/state_serialization.go b/platform/dbops/binaries/weaviate-src/usecases/sharding/state_serialization.go new file mode 100644 index 0000000000000000000000000000000000000000..6c34f7da361d864d4aa7b438b7429823859fd09c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/sharding/state_serialization.go @@ -0,0 +1,34 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package sharding + +import ( + "encoding/json" + + "github.com/weaviate/weaviate/usecases/cluster" +) + +func (s *State) JSON() ([]byte, error) { + return json.Marshal(s) +} + +func StateFromJSON(in []byte, nodes cluster.NodeSelector) (*State, error) { + s := State{} + + if err := json.Unmarshal(in, &s); err != nil { + return nil, err + } + + s.localNodeName = nodes.LocalName() + + return &s, nil +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/sharding/state_test.go b/platform/dbops/binaries/weaviate-src/usecases/sharding/state_test.go new file mode 100644 index 0000000000000000000000000000000000000000..994e85ac9678bf95410febbe094d9c2bc7bb149c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/sharding/state_test.go @@ -0,0 +1,651 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package sharding + +import ( + "crypto/rand" + "encoding/json" + "fmt" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/usecases/cluster/mocks" + "github.com/weaviate/weaviate/usecases/sharding/config" +) + +func TestState(t *testing.T) { + size := 1000 + + cfg, err := config.ParseConfig(map[string]interface{}{"desiredCount": float64(4)}, 14) + require.Nil(t, err) + + nodes := mocks.NewMockNodeSelector("node1", "node2") + state, err := InitState("my-index", cfg, nodes.LocalName(), nodes.StorageCandidates(), 1, false) + require.Nil(t, err) + + physicalCount := map[string]int{} + var names [][]byte + + for i := 0; i < size; i++ { + name := make([]byte, 16) + rand.Read(name) + names = append(names, name) + + phid := state.PhysicalShard(name) + physicalCount[phid]++ + } + + // verify each shard contains at least 15% of data. The expected value would + // be 25%, but since this is random, we should take a lower value to reduce + // flakyness + + for name, count := range physicalCount { + if owns := float64(count) / float64(size); owns < 0.15 { + t.Errorf("expected shard %q to own at least 15%%, but it only owns %f", name, owns) + } + } + + // Marshal and recreate, verify results + bytes, err := state.JSON() + require.Nil(t, err) + + // destroy old version + state = nil + + stateReloaded, err := StateFromJSON(bytes, nodes) + require.Nil(t, err) + + physicalCountReloaded := map[string]int{} + + // hash the same values again and verify the counts are exactly the same + for _, name := range names { + phid := stateReloaded.PhysicalShard(name) + physicalCountReloaded[phid]++ + } + + assert.Equal(t, physicalCount, physicalCountReloaded) +} + +func TestInitState(t *testing.T) { + type test struct { + nodes []string + replicationFactor int + shards int + ok bool + } + + // this tests asserts that nodes are assigned evenly with various + // combinations. + + tests := []test{ + { + nodes: []string{"node1", "node2", "node3"}, + replicationFactor: 1, + shards: 3, + ok: true, + }, + { + nodes: []string{"node1", "node2", "node3"}, + replicationFactor: 2, + shards: 3, + ok: true, + }, + { + nodes: []string{"node1", "node2", "node3"}, + replicationFactor: 3, + shards: 1, + ok: true, + }, + { + nodes: []string{"node1", "node2", "node3"}, + replicationFactor: 3, + shards: 3, + ok: true, + }, + { + nodes: []string{"node1", "node2", "node3"}, + replicationFactor: 3, + shards: 2, + ok: true, + }, + { + nodes: []string{"node1", "node2", "node3", "node4", "node5", "node6"}, + replicationFactor: 4, + shards: 6, + ok: true, + }, + { + nodes: []string{"node1", "node2"}, + replicationFactor: 4, + shards: 4, + ok: false, + }, + { + nodes: []string{"node1", "node2", "node3", "node4", "node5", "node6", "node7", "node8", "node9", "node10", "node11", "node12"}, + replicationFactor: 3, + shards: 4, + ok: true, + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf("Shards=%d_RF=%d", test.shards, test.replicationFactor), + func(t *testing.T) { + nodes := mocks.NewMockNodeSelector(test.nodes...) + cfg, err := config.ParseConfig(map[string]interface{}{ + "desiredCount": float64(test.shards), + "replicas": float64(test.replicationFactor), + }, 3) + require.Nil(t, err) + + state, err := InitState("my-index", cfg, nodes.LocalName(), nodes.StorageCandidates(), int64(test.replicationFactor), false) + if !test.ok { + require.NotNil(t, err) + return + } + require.Nil(t, err) + + nodeCounter := map[string]int{} + actual := 0 + for _, shard := range state.Physical { + for _, node := range shard.BelongsToNodes { + nodeCounter[node]++ + actual++ + } + } + + assert.Equal(t, len(nodeCounter), len(test.nodes)) + + // assert that total no of associations is correct + desired := test.shards * test.replicationFactor + assert.Equal(t, desired, actual, "correct number of node associations") + + // assert that shards are hit evenly + expectedAssociations := test.shards * test.replicationFactor / len(test.nodes) + for _, count := range nodeCounter { + assert.Equal(t, expectedAssociations, count) + } + }) + } +} + +func TestInitStateWithZeroReplicationFactor(t *testing.T) { + nodes := mocks.NewMockNodeSelector("node1", "node2", "node3") + cfg, err := config.ParseConfig(map[string]interface{}{"desiredCount": float64(3)}, 3) + require.NoError(t, err) + + _, err = InitState("index-zero", cfg, nodes.LocalName(), nodes.StorageCandidates(), 0, false) + require.Errorf(t, err, "replication factor zero is not allowed") +} + +func TestGetPartitions(t *testing.T) { + t.Run("EmptyCandidatesList", func(t *testing.T) { + shards := []string{"H1"} + state := State{} + partitions, err := state.GetPartitions(mocks.NewMockNodeSelector().StorageCandidates(), shards, 1) + require.Nil(t, partitions) + require.ErrorContains(t, err, "empty") + }) + t.Run("NotEnoughReplicas", func(t *testing.T) { + shards := []string{"H1"} + state := State{} + partitions, err := state.GetPartitions(mocks.NewMockNodeSelector("N1").StorageCandidates(), shards, 2) + require.Nil(t, partitions) + require.ErrorContains(t, err, "not enough replicas") + }) + t.Run("Success/RF3", func(t *testing.T) { + nodes := mocks.NewMockNodeSelector("N1", "N2", "N3") + shards := []string{"H1", "H2", "H3", "H4", "H5"} + state := State{} + got, err := state.GetPartitions(nodes.StorageCandidates(), shards, 3) + require.Nil(t, err) + want := map[string][]string{ + "H1": {"N1", "N2", "N3"}, + "H2": {"N2", "N3", "N1"}, + "H3": {"N3", "N1", "N2"}, + "H4": {"N3", "N1", "N2"}, + "H5": {"N1", "N2", "N3"}, + } + require.Equal(t, want, got) + }) + + t.Run("Success/RF2", func(t *testing.T) { + nodes := mocks.NewMockNodeSelector("N1", "N2", "N3", "N4", "N5", "N6", "N7") + shards := []string{"H1", "H2", "H3", "H4", "H5"} + state := State{} + got, err := state.GetPartitions(nodes.StorageCandidates(), shards, 2) + require.Nil(t, err) + want := map[string][]string{ + "H1": {"N1", "N2"}, + "H2": {"N3", "N4"}, + "H3": {"N5", "N6"}, + "H4": {"N7", "N1"}, + "H5": {"N2", "N3"}, + } + require.Equal(t, want, got) + }) +} + +func TestAddPartition(t *testing.T) { + var ( + nodes1 = []string{"N", "M"} + nodes2 = []string{"L", "M", "O"} + ) + cfg, err := config.ParseConfig(map[string]interface{}{"desiredCount": float64(4)}, 14) + require.Nil(t, err) + + nodes := mocks.NewMockNodeSelector("node1", "node2") + s, err := InitState("my-index", cfg, nodes.LocalName(), nodes.StorageCandidates(), 1, true) + require.Nil(t, err) + + _, err = s.AddPartition("A", nodes1, models.TenantActivityStatusHOT) + require.NoErrorf(t, err, "unexpect error while adding partition for tenant A") + _, err = s.AddPartition("B", nodes2, models.TenantActivityStatusCOLD) + require.NoErrorf(t, err, "unexpect error while adding partition for tenant B") + + want := map[string]Physical{ + "A": {Name: "A", BelongsToNodes: nodes1, OwnsPercentage: 1, Status: models.TenantActivityStatusHOT}, + "B": {Name: "B", BelongsToNodes: nodes2, OwnsPercentage: 1, Status: models.TenantActivityStatusCOLD}, + } + require.Equal(t, want, s.Physical) +} + +func TestStateDeepCopy(t *testing.T) { + original := State{ + IndexID: "original", + Config: config.Config{ + VirtualPerPhysical: 1, + DesiredCount: 2, + ActualCount: 3, + DesiredVirtualCount: 4, + ActualVirtualCount: 5, + Key: "original", + Strategy: "original", + Function: "original", + }, + localNodeName: "original", + Physical: map[string]Physical{ + "physical1": { + Name: "original", + OwnsVirtual: []string{"original"}, + OwnsPercentage: 7, + BelongsToNodes: []string{"original"}, + Status: models.TenantActivityStatusHOT, + }, + }, + Virtual: []Virtual{ + { + Name: "original", + Upper: 8, + OwnsPercentage: 9, + AssignedToPhysical: "original", + }, + }, + ReplicationFactor: 3, + } + + control := State{ + IndexID: "original", + Config: config.Config{ + VirtualPerPhysical: 1, + DesiredCount: 2, + ActualCount: 3, + DesiredVirtualCount: 4, + ActualVirtualCount: 5, + Key: "original", + Strategy: "original", + Function: "original", + }, + localNodeName: "original", + Physical: map[string]Physical{ + "physical1": { + Name: "original", + OwnsVirtual: []string{"original"}, + OwnsPercentage: 7, + BelongsToNodes: []string{"original"}, + Status: models.TenantActivityStatusHOT, + }, + }, + Virtual: []Virtual{ + { + Name: "original", + Upper: 8, + OwnsPercentage: 9, + AssignedToPhysical: "original", + }, + }, + ReplicationFactor: 3, + } + + assert.Equal(t, control, original, "control matches initially") + + copied := original.DeepCopy() + assert.Equal(t, control, copied, "copy matches original") + + // modify literally every field + copied.localNodeName = "changed" + copied.IndexID = "changed" + copied.Config.VirtualPerPhysical = 11 + copied.Config.DesiredCount = 22 + copied.Config.ActualCount = 33 + copied.Config.DesiredVirtualCount = 44 + copied.Config.ActualVirtualCount = 55 + copied.Config.Key = "changed" + copied.Config.Strategy = "changed" + copied.Config.Function = "changed" + physical1 := copied.Physical["physical1"] + physical1.Name = "changed" + physical1.BelongsToNodes = append(physical1.BelongsToNodes, "changed") + physical1.OwnsPercentage = 100 + physical1.OwnsVirtual = append(physical1.OwnsVirtual, "changed") + physical1.Status = models.TenantActivityStatusCOLD + copied.Physical["physical1"] = physical1 + copied.Physical["physical2"] = Physical{} + copied.Virtual[0].Name = "original" + copied.Virtual[0].Upper = 8 + copied.Virtual[0].OwnsPercentage = 9 + copied.Virtual[0].AssignedToPhysical = "original" + copied.Virtual = append(copied.Virtual, Virtual{}) + + assert.Equal(t, control, original, "original still matches control even with changes in copy") +} + +func TestBackwardCompatibilityBefore1_17(t *testing.T) { + // As part of v1.17, replication is introduced and the structure of the + // physical shard is slightly changed. Instead of `belongsToNode string`, the + // association is now `belongsToNodes []string`. A migration helper was + // introduced to make sure we're backward compatible. + + oldVersion := State{ + Physical: map[string]Physical{ + "hello-replication": { + Name: "hello-replication", + LegacyBelongsToNodeForBackwardCompat: "the-best-node", + }, + }, + } + oldVersionJSON, err := json.Marshal(oldVersion) + require.Nil(t, err) + + var newVersion State + err = json.Unmarshal(oldVersionJSON, &newVersion) + require.Nil(t, err) + + newVersion.MigrateFromOldFormat() + + assert.Equal(t, []string{"the-best-node"}, + newVersion.Physical["hello-replication"].BelongsToNodes) +} + +func TestApplyNodeMapping(t *testing.T) { + type test struct { + name string + state State + control State + nodeMapping map[string]string + } + + tests := []test{ + { + name: "no mapping", + state: State{ + Physical: map[string]Physical{ + "hello-node-mapping": { + Name: "hello-node-mapping", + LegacyBelongsToNodeForBackwardCompat: "node1", + BelongsToNodes: []string{"node1"}, + }, + }, + }, + control: State{ + Physical: map[string]Physical{ + "hello-node-mapping": { + Name: "hello-node-mapping", + LegacyBelongsToNodeForBackwardCompat: "node1", + BelongsToNodes: []string{"node1"}, + }, + }, + }, + }, + { + name: "map one node", + state: State{ + Physical: map[string]Physical{ + "hello-node-mapping": { + Name: "hello-node-mapping", + LegacyBelongsToNodeForBackwardCompat: "node1", + BelongsToNodes: []string{"node1"}, + }, + }, + }, + control: State{ + Physical: map[string]Physical{ + "hello-node-mapping": { + Name: "hello-node-mapping", + LegacyBelongsToNodeForBackwardCompat: "new-node1", + BelongsToNodes: []string{"new-node1"}, + }, + }, + }, + nodeMapping: map[string]string{"node1": "new-node1"}, + }, + { + name: "map multiple nodes", + state: State{ + Physical: map[string]Physical{ + "hello-node-mapping": { + Name: "hello-node-mapping", + LegacyBelongsToNodeForBackwardCompat: "node1", + BelongsToNodes: []string{"node1", "node2"}, + }, + }, + }, + control: State{ + Physical: map[string]Physical{ + "hello-node-mapping": { + Name: "hello-node-mapping", + LegacyBelongsToNodeForBackwardCompat: "new-node1", + BelongsToNodes: []string{"new-node1", "new-node2"}, + }, + }, + }, + nodeMapping: map[string]string{"node1": "new-node1", "node2": "new-node2"}, + }, + { + name: "map multiple nodes with exceptions", + state: State{ + Physical: map[string]Physical{ + "hello-node-mapping": { + Name: "hello-node-mapping", + LegacyBelongsToNodeForBackwardCompat: "node1", + BelongsToNodes: []string{"node1", "node2", "node3"}, + }, + }, + }, + control: State{ + Physical: map[string]Physical{ + "hello-node-mapping": { + Name: "hello-node-mapping", + LegacyBelongsToNodeForBackwardCompat: "new-node1", + BelongsToNodes: []string{"new-node1", "new-node2", "node3"}, + }, + }, + }, + nodeMapping: map[string]string{"node1": "new-node1", "node2": "new-node2"}, + }, + { + name: "map multiple nodes with legacy exception", + state: State{ + Physical: map[string]Physical{ + "hello-node-mapping": { + Name: "hello-node-mapping", + LegacyBelongsToNodeForBackwardCompat: "node3", + BelongsToNodes: []string{"node1", "node2", "node3"}, + }, + }, + }, + control: State{ + Physical: map[string]Physical{ + "hello-node-mapping": { + Name: "hello-node-mapping", + LegacyBelongsToNodeForBackwardCompat: "node3", + BelongsToNodes: []string{"new-node1", "new-node2", "node3"}, + }, + }, + }, + nodeMapping: map[string]string{"node1": "new-node1", "node2": "new-node2"}, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + tc.state.ApplyNodeMapping(tc.nodeMapping) + assert.Equal(t, tc.control, tc.state) + }) + } +} + +func TestShardReplicationFactor(t *testing.T) { + t.Run("add replica", func(t *testing.T) { + nodes := mocks.NewMockNodeSelector("N1", "N2", "N3", "N4", "N5") + cfg, err := config.ParseConfig(map[string]interface{}{"desiredCount": float64(3)}, 3) + require.NoErrorf(t, err, "unexpected error while parsing config") + + state, err := InitState("my-index", cfg, nodes.LocalName(), nodes.StorageCandidates(), 2, false) + require.NoErrorf(t, err, "unexpected error while initializing state") + + shardName := state.AllPhysicalShards()[0] + numberOfReplicas, err := state.NumberOfReplicas(shardName) + require.NoErrorf(t, err, "error while getting number of replicas for shard %s", shardName) + require.Equal(t, int64(2), numberOfReplicas, "unexpected replication factor") + require.Equal(t, int64(2), state.ReplicationFactor, "unexpected minimum replication factor") + + initialReplicaCount := len(state.Physical[shardName].BelongsToNodes) + require.Equal(t, 2, initialReplicaCount) + + err = state.AddReplicaToShard(shardName, "test-replica") + require.NoErrorf(t, err, "unexpected error while adding a replica") + + numberOfReplicas, err = state.NumberOfReplicas(shardName) + require.NoErrorf(t, err, "error while getting number of replicas for shard %s", shardName) + require.Equal(t, int64(3), numberOfReplicas) + require.Equal(t, 3, len(state.Physical[shardName].BelongsToNodes)) + }) + + t.Run("add existing replica", func(t *testing.T) { + nodes := mocks.NewMockNodeSelector("N1", "N2", "N3", "N4", "N5") + cfg, err := config.ParseConfig(map[string]interface{}{"desiredCount": float64(3)}, 3) + require.NoErrorf(t, err, "unexpected error while parsing config") + + state, err := InitState("my-index", cfg, nodes.LocalName(), nodes.StorageCandidates(), 2, false) + require.NoErrorf(t, err, "unexpected error while initializing state") + + shardName := state.AllPhysicalShards()[0] + numberOfReplicas, err := state.NumberOfReplicas(shardName) + require.NoErrorf(t, err, "error while getting number of replicas for shard %s", shardName) + require.Equal(t, int64(2), numberOfReplicas, "unexpected replication factor") + require.Equal(t, int64(2), state.ReplicationFactor, "unexpected minimum replication factor") + + initialReplicaCount := len(state.Physical[shardName].BelongsToNodes) + require.Equal(t, 2, initialReplicaCount) + + err = state.AddReplicaToShard(shardName, "test-replica") + require.NoErrorf(t, err, "unexpected error while adding a replica") + + numberOfReplicas, err = state.NumberOfReplicas(shardName) + require.NoErrorf(t, err, "error while getting number of replicas for shard %s", shardName) + require.Equal(t, int64(3), numberOfReplicas) + require.Equal(t, 3, len(state.Physical[shardName].BelongsToNodes)) + + err = state.AddReplicaToShard(shardName, "test-replica") + require.Errorf(t, err, "expected a failure while adding exisiting shard") + + require.Equal(t, 3, len(state.Physical[shardName].BelongsToNodes)) + require.Truef(t, strings.Contains(err.Error(), "already exists"), "expected already existing replica error") + + numberOfReplicas, err = state.NumberOfReplicas(shardName) + require.NoErrorf(t, err, "error while getting number of replicas for shard %s", shardName) + require.Equal(t, int64(3), numberOfReplicas) + }) + + t.Run("delete replica", func(t *testing.T) { + nodes := mocks.NewMockNodeSelector("N1", "N2", "N3", "N4", "N5") + cfg, err := config.ParseConfig(map[string]interface{}{"desiredCount": float64(3)}, 3) + require.NoErrorf(t, err, "unexpected error while parsing config") + + state, err := InitState("my-index", cfg, nodes.LocalName(), nodes.StorageCandidates(), 2, false) + require.NoErrorf(t, err, "unexpected error while initializing state") + + shardName := state.AllPhysicalShards()[0] + initialReplicaCount := len(state.Physical[shardName].BelongsToNodes) + require.Equal(t, 2, initialReplicaCount) + + err = state.AddReplicaToShard(shardName, "test-replica1") + require.NoError(t, err, "unexpected error while adding replica") + + // 3 replicas with a minimum of 2, deleting a replica is exepcted to work + replicaToDelete := state.Physical[shardName].BelongsToNodes[0] + err = state.DeleteReplicaFromShard(shardName, replicaToDelete) + require.NoErrorf(t, err, "unexpected error while deleting replica from shard") + }) + + t.Run("delete replica failure", func(t *testing.T) { + nodes := mocks.NewMockNodeSelector("N1", "N2", "N3", "N4", "N5") + cfg, err := config.ParseConfig(map[string]interface{}{"desiredCount": float64(3)}, 3) + require.NoErrorf(t, err, "unexpected error while parsing config") + + state, err := InitState("my-index", cfg, nodes.LocalName(), nodes.StorageCandidates(), 2, false) + require.NoErrorf(t, err, "unexpected error while initializing state") + + shardName := state.AllPhysicalShards()[0] + initialReplicaCount := len(state.Physical[shardName].BelongsToNodes) + require.Equal(t, 2, initialReplicaCount) + + // 2 replicas with a minimum of 2ca, deleting a replica is expected to fail + replicaToDelete := state.Physical[shardName].BelongsToNodes[0] + err = state.DeleteReplicaFromShard(shardName, replicaToDelete) + require.Errorf(t, err, "expected a failure while removing a replica") + }) + + t.Run("delete non-existing replica", func(t *testing.T) { + nodes := mocks.NewMockNodeSelector("N1", "N2", "N3", "N4", "N5") + cfg, err := config.ParseConfig(map[string]interface{}{"desiredCount": float64(3)}, 3) + require.NoErrorf(t, err, "unexpected error while parsing config") + + state, err := InitState("my-index", cfg, nodes.LocalName(), nodes.StorageCandidates(), 2, false) + require.NoErrorf(t, err, "unexpected error while initializing state") + + shardName := state.AllPhysicalShards()[0] + initialReplicaCount := len(state.Physical[shardName].BelongsToNodes) + require.Equal(t, 2, initialReplicaCount) + + // 3 replicas with a minimum of 3, deleting a replica is expected to fail + replicaToDelete := state.Physical[shardName].BelongsToNodes[0] + "-dummy" + err = state.DeleteReplicaFromShard(shardName, replicaToDelete) + require.Errorf(t, err, "expected a failure while removing a replica") + }) + + t.Run("deep copy", func(t *testing.T) { + original := State{ + ReplicationFactor: 2, + } + + copied := original.DeepCopy() + require.Equal(t, int64(2), copied.ReplicationFactor) + + copied.ReplicationFactor = 4 + require.Equal(t, int64(2), original.ReplicationFactor) + }) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/telemetry/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/usecases/telemetry/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..51ff63cad4efe7ab59d62b393c42911f2dfcd215 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/telemetry/fakes_for_test.go @@ -0,0 +1,48 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package telemetry + +import ( + "context" + + "github.com/stretchr/testify/mock" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" +) + +type fakeNodesStatusGetter struct { + mock.Mock +} + +func (n *fakeNodesStatusGetter) LocalNodeStatus(ctx context.Context, + className, shardName, verbosity string, +) *models.NodeStatus { + args := n.Called(ctx, className, shardName, verbosity) + if args.Get(0) != nil { + return args.Get(0).(*models.NodeStatus) + } + return nil +} + +type fakeSchemaManager struct { + mock.Mock +} + +func (f *fakeSchemaManager) GetSchemaSkipAuth() schema.Schema { + if len(f.ExpectedCalls) > 0 { + args := f.Called() + if args.Get(0) != nil { + return args.Get(0).(schema.Schema) + } + } + return schema.Schema{} +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/telemetry/payload.go b/platform/dbops/binaries/weaviate-src/usecases/telemetry/payload.go new file mode 100644 index 0000000000000000000000000000000000000000..94697ba6aecb4a48688f65915c10569551870569 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/telemetry/payload.go @@ -0,0 +1,39 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package telemetry + +import ( + "github.com/go-openapi/strfmt" +) + +// PayloadType is the discrete set of statuses which indicate the type of payload sent +var PayloadType = struct { + Init string + Update string + Terminate string +}{ + Init: "INIT", + Update: "UPDATE", + Terminate: "TERMINATE", +} + +// Payload is the object transmitted for telemetry purposes +type Payload struct { + MachineID strfmt.UUID `json:"machineId"` + Type string `json:"type"` + Version string `json:"version"` + ObjectsCount int64 `json:"objs"` + OS string `json:"os"` + Arch string `json:"arch"` + UsedModules []string `json:"usedModules,omitempty"` + CollectionsCount int `json:"collectionsCount"` +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/telemetry/telemetry.go b/platform/dbops/binaries/weaviate-src/usecases/telemetry/telemetry.go new file mode 100644 index 0000000000000000000000000000000000000000..36c63cb498e55b16249536cbf8df471b4d3806d8 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/telemetry/telemetry.go @@ -0,0 +1,265 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package telemetry + +import ( + "bytes" + "context" + "encoding/base64" + "encoding/json" + "fmt" + "io" + "net/http" + "runtime" + "sort" + "strings" + "time" + + "github.com/go-openapi/strfmt" + "github.com/google/uuid" + "github.com/sirupsen/logrus" + enterrors "github.com/weaviate/weaviate/entities/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/verbosity" + "github.com/weaviate/weaviate/usecases/config" +) + +const ( + defaultConsumer = "aHR0cHM6Ly90ZWxlbWV0cnkud2Vhdmlh" + + "dGUuaW8vd2VhdmlhdGUtdGVsZW1ldHJ5" + defaultPushInterval = 24 * time.Hour +) + +type nodesStatusGetter interface { + LocalNodeStatus(ctx context.Context, className, shardName, output string) *models.NodeStatus +} + +type schemaManager interface { + GetSchemaSkipAuth() schema.Schema +} + +// Telemeter is responsible for managing the transmission of telemetry data +type Telemeter struct { + machineID strfmt.UUID + nodesStatusGetter nodesStatusGetter + schemaManager schemaManager + logger logrus.FieldLogger + shutdown chan struct{} + failedToStart bool + consumer string + pushInterval time.Duration +} + +// New creates a new Telemeter instance +func New(nodesStatusGetter nodesStatusGetter, schemaManager schemaManager, + logger logrus.FieldLogger, +) *Telemeter { + tel := &Telemeter{ + machineID: strfmt.UUID(uuid.NewString()), + nodesStatusGetter: nodesStatusGetter, + schemaManager: schemaManager, + logger: logger, + shutdown: make(chan struct{}), + consumer: defaultConsumer, + pushInterval: defaultPushInterval, + } + return tel +} + +// Start begins telemetry for the node +func (tel *Telemeter) Start(ctx context.Context) error { + payload, err := tel.push(ctx, PayloadType.Init) + if err != nil { + tel.failedToStart = true + return fmt.Errorf("push: %w", err) + } + f := func() { + t := time.NewTicker(tel.pushInterval) + defer t.Stop() + for { + select { + case <-tel.shutdown: + return + case <-t.C: + payload, err = tel.push(ctx, PayloadType.Update) + if err != nil { + tel.logger. + WithField("action", "telemetry_push"). + WithField("payload", fmt.Sprintf("%+v", payload)). + WithField("retry_at", time.Now().Add(tel.pushInterval).Format(time.RFC3339)). + Error(err.Error()) + continue + } + tel.logger. + WithField("action", "telemetry_push"). + WithField("payload", fmt.Sprintf("%+v", payload)). + Info("telemetry update") + } + } + } + enterrors.GoWrapper(f, tel.logger) + + tel.logger. + WithField("action", "telemetry_push"). + WithField("payload", fmt.Sprintf("%+v", payload)). + Info("telemetry started") + return nil +} + +// Stop shuts down the telemeter +func (tel *Telemeter) Stop(ctx context.Context) error { + if tel.failedToStart { + return nil + } + + select { + case <-ctx.Done(): + return fmt.Errorf("shutdown telemetry: %w", ctx.Err()) + case tel.shutdown <- struct{}{}: + payload, err := tel.push(ctx, PayloadType.Terminate) + if err != nil { + tel.logger. + WithField("action", "telemetry_push"). + WithField("payload", fmt.Sprintf("%+v", payload)). + Error(err.Error()) + return err + } + tel.logger. + WithField("action", "telemetry_push"). + WithField("payload", fmt.Sprintf("%+v", payload)). + Info("telemetry terminated") + return nil + } +} + +// push sends telemetry data to the consumer url +func (tel *Telemeter) push(ctx context.Context, payloadType string) (*Payload, error) { + payload, err := tel.buildPayload(ctx, payloadType) + if err != nil { + return nil, fmt.Errorf("build payload: %w", err) + } + + b, err := json.Marshal(payload) + if err != nil { + return nil, fmt.Errorf("marshal payload: %w", err) + } + + url, err := base64.StdEncoding.DecodeString(tel.consumer) + if err != nil { + return nil, fmt.Errorf("decode url: %w", err) + } + + resp, err := http.Post(string(url), "application/json", bytes.NewReader(b)) + if err != nil { + return nil, fmt.Errorf("failed to send request: %w", err) + } + defer resp.Body.Close() + if resp.StatusCode != http.StatusOK { + body, _ := io.ReadAll(resp.Body) + return nil, fmt.Errorf("request unsuccessful, status code: %d, body: %s", resp.StatusCode, string(body)) + } + return payload, nil +} + +func (tel *Telemeter) buildPayload(ctx context.Context, payloadType string) (*Payload, error) { + usedMods, err := tel.getUsedModules() + if err != nil { + return nil, fmt.Errorf("get used modules: %w", err) + } + + var objs int64 + // The first payload should not include object count, + // because all the shards may not be loaded yet. We + // don't want to force load for telemetry alone + if payloadType != PayloadType.Init { + objs, err = tel.getObjectCount(ctx) + if err != nil { + return nil, fmt.Errorf("get object count: %w", err) + } + } + + cols, err := tel.getCollectionsCount(ctx) + if err != nil { + return nil, fmt.Errorf("get collections count: %w", err) + } + + return &Payload{ + MachineID: tel.machineID, + Type: payloadType, + Version: config.ServerVersion, + ObjectsCount: objs, + OS: runtime.GOOS, + Arch: runtime.GOARCH, + UsedModules: usedMods, + CollectionsCount: cols, + }, nil +} + +func (tel *Telemeter) getUsedModules() ([]string, error) { + sch := tel.schemaManager.GetSchemaSkipAuth() + usedModulesMap := map[string]struct{}{} + + if sch.Objects != nil { + for _, class := range sch.Objects.Classes { + if modCfg, ok := class.ModuleConfig.(map[string]interface{}); ok { + for name, cfg := range modCfg { + usedModulesMap[tel.determineModule(name, cfg)] = struct{}{} + } + } + for _, vectorConfig := range class.VectorConfig { + if modCfg, ok := vectorConfig.Vectorizer.(map[string]interface{}); ok { + for name, cfg := range modCfg { + usedModulesMap[tel.determineModule(name, cfg)] = struct{}{} + } + } + } + } + } + + var usedModules []string + for modName := range usedModulesMap { + usedModules = append(usedModules, modName) + } + sort.Strings(usedModules) + return usedModules, nil +} + +func (tel *Telemeter) determineModule(name string, cfg interface{}) string { + if strings.Contains(name, "palm") || strings.Contains(name, "google") { + if settings, ok := cfg.(map[string]interface{}); ok { + if apiEndpoint, ok := settings["apiEndpoint"]; ok { + if apiEndpointStr, ok := apiEndpoint.(string); ok && apiEndpointStr == "generativelanguage.googleapis.com" { + return fmt.Sprintf("%s-ai-studio", strings.Replace(name, "palm", "google", 1)) + } + } + } + return fmt.Sprintf("%s-vertex-ai", strings.Replace(name, "palm", "google", 1)) + } + return name +} + +func (tel *Telemeter) getObjectCount(ctx context.Context) (int64, error) { + status := tel.nodesStatusGetter.LocalNodeStatus(ctx, "", "", verbosity.OutputVerbose) + if status == nil || status.Stats == nil { + return 0, fmt.Errorf("received nil node stats") + } + return status.Stats.ObjectCount, nil +} + +func (tel *Telemeter) getCollectionsCount(context.Context) (int, error) { + sch := tel.schemaManager.GetSchemaSkipAuth() + if sch.Objects == nil { + return 0, nil + } + return len(sch.Objects.Classes), nil +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/telemetry/telemetry_test.go b/platform/dbops/binaries/weaviate-src/usecases/telemetry/telemetry_test.go new file mode 100644 index 0000000000000000000000000000000000000000..af9a03e585eb9619f81895e0c8ae6722eb535c4c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/telemetry/telemetry_test.go @@ -0,0 +1,373 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package telemetry + +import ( + "context" + "encoding/base64" + "encoding/json" + "fmt" + "io" + "net/http" + "net/http/httptest" + "runtime" + "testing" + "time" + + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/verbosity" + "github.com/weaviate/weaviate/usecases/config" +) + +func TestTelemetry_BuildPayload(t *testing.T) { + t.Run("happy path", func(t *testing.T) { + t.Run("on init", func(t *testing.T) { + tel, sg, sm := newTestTelemeter() + sg.On("LocalNodeStatus", context.Background(), "", "", verbosity.OutputVerbose).Return( + &models.NodeStatus{ + Stats: &models.NodeStats{ + ObjectCount: 100, + }, + }) + sm.On("GetSchemaSkipAuth").Return( + schema.Schema{ + Objects: &models.Schema{Classes: []*models.Class{ + { + Class: "GoogleModuleWithGoogleAIStudioEmptyConfig", + ModuleConfig: map[string]interface{}{ + "text2vec-google": nil, + }, + }, + { + Class: "LegacyConfiguration", + ModuleConfig: map[string]interface{}{ + "text2vec-google": map[string]interface{}{ + "modelId": "text-embedding-004", + "apiEndpoint": "generativelanguage.googleapis.com", + }, + "generative-openai": map[string]interface{}{}, + }, + }, + { + Class: "NamedVector", + VectorConfig: map[string]models.VectorConfig{ + "description": { + Vectorizer: map[string]interface{}{ + "text2vec-openai": map[string]interface{}{ + "properties": []interface{}{"description"}, + "vectorizeClassName": false, + }, + }, + VectorIndexType: "flat", + }, + }, + }, + { + Class: "NamedVectorWithNilVectorizer", + VectorConfig: map[string]models.VectorConfig{ + "description": { + Vectorizer: nil, + VectorIndexType: "flat", + }, + }, + }, + { + Class: "BothNamedVectorAndLegacyConfiguration", + ModuleConfig: map[string]interface{}{ + "generative-google": map[string]interface{}{ + "apiEndpoint": "generativelanguage.googleapis.com", + }, + }, + VectorConfig: map[string]models.VectorConfig{ + "description_google": { + Vectorizer: map[string]interface{}{ + "text2vec-google": map[string]interface{}{ + "properties": []interface{}{"description"}, + "vectorizeClassName": false, + }, + }, + VectorIndexType: "flat", + }, + "description_aws": { + Vectorizer: map[string]interface{}{ + "text2vec-aws": map[string]interface{}{ + "properties": []interface{}{"description"}, + "vectorizeClassName": false, + }, + }, + VectorIndexType: "flat", + }, + "description_openai": { + Vectorizer: map[string]interface{}{ + "text2vec-openai": map[string]interface{}{}, + }, + VectorIndexType: "flat", + }, + }, + }, + }}, + }) + payload, err := tel.buildPayload(context.Background(), PayloadType.Init) + assert.Nil(t, err) + assert.Equal(t, tel.machineID, payload.MachineID) + assert.Equal(t, PayloadType.Init, payload.Type) + assert.Equal(t, config.ServerVersion, payload.Version) + assert.Equal(t, int64(0), payload.ObjectsCount) + assert.Equal(t, 5, payload.CollectionsCount) + assert.Equal(t, runtime.GOOS, payload.OS) + assert.Equal(t, runtime.GOARCH, payload.Arch) + assert.NotEmpty(t, payload.UsedModules) + assert.Len(t, payload.UsedModules, 6) + assert.Contains(t, payload.UsedModules, "text2vec-aws") + assert.Contains(t, payload.UsedModules, "text2vec-openai") + assert.Contains(t, payload.UsedModules, "text2vec-google-vertex-ai") + assert.Contains(t, payload.UsedModules, "text2vec-google-ai-studio") + assert.Contains(t, payload.UsedModules, "generative-google-ai-studio") + assert.Contains(t, payload.UsedModules, "generative-openai") + }) + + t.Run("on update", func(t *testing.T) { + tel, sg, sm := newTestTelemeter() + sg.On("LocalNodeStatus", context.Background(), "", "", verbosity.OutputVerbose).Return( + &models.NodeStatus{ + Stats: &models.NodeStats{ + ObjectCount: 1000, + }, + }) + sm.On("GetSchemaSkipAuth").Return( + schema.Schema{ + Objects: &models.Schema{Classes: []*models.Class{ + { + Class: "Class", + ModuleConfig: map[string]interface{}{ + "generative-openai": map[string]interface{}{}, + }, + VectorConfig: map[string]models.VectorConfig{ + "description_google": { + Vectorizer: map[string]interface{}{ + "text2vec-google": map[string]interface{}{ + "properties": []interface{}{"description"}, + "vectorizeClassName": false, + }, + }, + VectorIndexType: "flat", + }, + "description_aws": { + Vectorizer: map[string]interface{}{ + "text2vec-aws": map[string]interface{}{ + "properties": []interface{}{"description"}, + "vectorizeClassName": false, + }, + }, + VectorIndexType: "flat", + }, + }, + }, + }}, + }) + payload, err := tel.buildPayload(context.Background(), PayloadType.Update) + assert.Nil(t, err) + assert.Equal(t, tel.machineID, payload.MachineID) + assert.Equal(t, PayloadType.Update, payload.Type) + assert.Equal(t, config.ServerVersion, payload.Version) + assert.Equal(t, int64(1000), payload.ObjectsCount) + assert.Equal(t, runtime.GOOS, payload.OS) + assert.Equal(t, runtime.GOARCH, payload.Arch) + assert.NotEmpty(t, payload.UsedModules) + assert.Len(t, payload.UsedModules, 3) + assert.Contains(t, payload.UsedModules, "text2vec-google-vertex-ai") + assert.Contains(t, payload.UsedModules, "text2vec-aws") + assert.Contains(t, payload.UsedModules, "generative-openai") + }) + + t.Run("on terminate", func(t *testing.T) { + tel, sg, _ := newTestTelemeter() + sg.On("LocalNodeStatus", context.Background(), "", "", verbosity.OutputVerbose).Return( + &models.NodeStatus{ + Stats: &models.NodeStats{ + ObjectCount: 300_000_000_000, + }, + }) + payload, err := tel.buildPayload(context.Background(), PayloadType.Terminate) + assert.Nil(t, err) + assert.Equal(t, tel.machineID, payload.MachineID) + assert.Equal(t, PayloadType.Terminate, payload.Type) + assert.Equal(t, config.ServerVersion, payload.Version) + assert.Equal(t, int64(300_000_000_000), payload.ObjectsCount) + assert.Equal(t, runtime.GOOS, payload.OS) + assert.Equal(t, runtime.GOARCH, payload.Arch) + assert.Empty(t, payload.UsedModules) + }) + }) + + t.Run("failure path", func(t *testing.T) { + t.Run("fail to get node status", func(t *testing.T) { + tel, sg, _ := newTestTelemeter() + sg.On("LocalNodeStatus", context.Background(), "", "", verbosity.OutputVerbose).Return(nil) + payload, err := tel.buildPayload(context.Background(), PayloadType.Terminate) + assert.Nil(t, payload) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "get object count") + }) + + t.Run("fail to get node status stats", func(t *testing.T) { + tel, sg, _ := newTestTelemeter() + sg.On("LocalNodeStatus", context.Background(), "", "", verbosity.OutputVerbose).Return(&models.NodeStatus{}) + payload, err := tel.buildPayload(context.Background(), PayloadType.Terminate) + assert.Nil(t, payload) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "get object count") + }) + }) +} + +func TestTelemetry_WithConsumer(t *testing.T) { + config.ServerVersion = "X.X.X" + server := httptest.NewServer(&testConsumer{t}) + defer server.Close() + + consumerURL := fmt.Sprintf("%s/weaviate-telemetry", server.URL) + opts := []telemetryOpt{ + withConsumerURL(consumerURL), + withPushInterval(100 * time.Millisecond), + } + tel, sg, sm := newTestTelemeter(opts...) + + sg.On("LocalNodeStatus", context.Background(), "", "", verbosity.OutputVerbose).Return( + &models.NodeStatus{ + Stats: &models.NodeStats{ + ObjectCount: 100, + }, + }) + + sm.On("GetSchemaSkipAuth").Return( + schema.Schema{ + Objects: &models.Schema{Classes: []*models.Class{ + { + Class: "Class", + ModuleConfig: map[string]interface{}{ + "generative-openai": map[string]interface{}{}, + }, + VectorConfig: map[string]models.VectorConfig{ + "description_google": { + Vectorizer: map[string]interface{}{ + "text2vec-google": map[string]interface{}{ + "properties": []interface{}{"description"}, + "vectorizeClassName": false, + "apiEndpoint": "generativelanguage.googleapis.com", + }, + }, + VectorIndexType: "flat", + }, + "description_aws": { + Vectorizer: map[string]interface{}{ + "text2vec-aws": map[string]interface{}{ + "properties": []interface{}{"description"}, + "vectorizeClassName": false, + }, + }, + VectorIndexType: "flat", + }, + }, + }, + }}, + }) + + err := tel.Start(context.Background()) + require.Nil(t, err) + + ticker := time.NewTicker(100 * time.Millisecond) + start := time.Now() + wait := make(chan struct{}) + go func() { + for range ticker.C { + if time.Since(start) > time.Second { + err = tel.Stop(context.Background()) + assert.Nil(t, err) + wait <- struct{}{} + } + } + }() + <-wait +} + +type telemetryOpt func(*Telemeter) + +func withConsumerURL(url string) telemetryOpt { + encoded := base64.StdEncoding.EncodeToString([]byte(url)) + return func(tel *Telemeter) { + tel.consumer = encoded + } +} + +func withPushInterval(interval time.Duration) telemetryOpt { + return func(tel *Telemeter) { + tel.pushInterval = interval + } +} + +func newTestTelemeter(opts ...telemetryOpt, +) (*Telemeter, *fakeNodesStatusGetter, *fakeSchemaManager, +) { + sg := &fakeNodesStatusGetter{} + sm := &fakeSchemaManager{} + logger, _ := test.NewNullLogger() + tel := New(sg, sm, logger) + for _, opt := range opts { + opt(tel) + } + return tel, sg, sm +} + +type testConsumer struct { + t *testing.T +} + +func (h *testConsumer) ServeHTTP(w http.ResponseWriter, r *http.Request) { + assert.Equal(h.t, "/weaviate-telemetry", r.URL.String()) + assert.Equal(h.t, http.MethodPost, r.Method) + b, err := io.ReadAll(r.Body) + defer r.Body.Close() + require.Nil(h.t, err) + + var payload Payload + err = json.Unmarshal(b, &payload) + require.Nil(h.t, err) + + assert.NotEmpty(h.t, payload.MachineID) + assert.Contains(h.t, []string{ + PayloadType.Init, + PayloadType.Update, + PayloadType.Terminate, + }, payload.Type) + assert.Equal(h.t, config.ServerVersion, payload.Version) + if payload.Type == PayloadType.Init { + assert.Zero(h.t, payload.ObjectsCount) + } else { + assert.NotZero(h.t, payload.ObjectsCount) + } + assert.Equal(h.t, runtime.GOOS, payload.OS) + assert.Equal(h.t, runtime.GOARCH, payload.Arch) + assert.NotEmpty(h.t, payload.CollectionsCount) + assert.NotEmpty(h.t, payload.UsedModules) + assert.Len(h.t, payload.UsedModules, 3) + assert.Contains(h.t, payload.UsedModules, "text2vec-google-ai-studio") + assert.Contains(h.t, payload.UsedModules, "text2vec-aws") + assert.Contains(h.t, payload.UsedModules, "generative-openai") + + h.t.Logf("request body: %s", string(b)) + w.WriteHeader(http.StatusOK) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/aggregate_type_inspector.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/aggregate_type_inspector.go new file mode 100644 index 0000000000000000000000000000000000000000..e313d96e5e03512dba008f075cb656c797f5331a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/aggregate_type_inspector.go @@ -0,0 +1,102 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +import ( + "fmt" + + "github.com/weaviate/weaviate/entities/aggregation" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" +) + +type typeInspector struct { + getClass func(string) *models.Class +} + +func newTypeInspector(getClass func(string) *models.Class) *typeInspector { + return &typeInspector{ + getClass: getClass, + } +} + +func (i *typeInspector) WithTypes(res *aggregation.Result, params aggregation.Params) (*aggregation.Result, error) { + if res == nil { + return nil, nil + } + + for _, prop := range params.Properties { + if !i.hasTypeAggregator(prop.Aggregators) { + // nothing to do for us + continue + } + + class := i.getClass(params.ClassName.String()) + if class == nil { + return nil, fmt.Errorf("could not find class %s in schema", params.ClassName) + } + + schemaProp, err := schema.GetPropertyByName(class, prop.Name.String()) + if err != nil { + return nil, err + } + + err = i.extendResWithType(res, prop.Name.String(), schemaProp.DataType) + if err != nil { + return nil, fmt.Errorf("with types: prop %s: %w", prop.Name, err) + } + } + + return res, nil +} + +func (i *typeInspector) hasTypeAggregator(aggs []aggregation.Aggregator) bool { + for _, agg := range aggs { + if agg == aggregation.TypeAggregator { + return true + } + } + + return false +} + +func (i *typeInspector) extendResWithType(res *aggregation.Result, propName string, dataType []string) error { + for groupIndex, group := range res.Groups { + prop, ok := group.Properties[propName] + if !ok { + prop = aggregation.Property{} + } + + propType, err := schema.FindPropertyDataTypeWithRefs(i.getClass, dataType, false, "") + if err != nil { + return err + } + + if propType.IsPrimitive() { + prop.SchemaType = string(propType.AsPrimitive()) + } else if propType.IsNested() { // TODO nested -> check if sufficient just schematype + prop.SchemaType = string(propType.AsNested()) + } else { + prop.Type = aggregation.PropertyTypeReference + prop.SchemaType = string(schema.DataTypeCRef) + prop.ReferenceAggregation.PointingTo = dataType + if res.Groups[groupIndex].Properties == nil { + // prevent nil pointer panic + res.Groups[groupIndex].Properties = map[string]aggregation.Property{} + } + } + + res.Groups[groupIndex].Properties[propName] = prop + } + + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/explorer.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/explorer.go new file mode 100644 index 0000000000000000000000000000000000000000..f9d0a2b1c11f07b91a0be0984a254d60ca691636 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/explorer.go @@ -0,0 +1,935 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +import ( + "context" + "fmt" + "runtime" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema/configvalidation" + + enterrors "github.com/weaviate/weaviate/entities/errors" + + "github.com/go-openapi/strfmt" + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/additional" + "github.com/weaviate/weaviate/entities/autocut" + "github.com/weaviate/weaviate/entities/dto" + "github.com/weaviate/weaviate/entities/filters" + "github.com/weaviate/weaviate/entities/inverted" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/schema/crossref" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/entities/searchparams" + "github.com/weaviate/weaviate/entities/storobj" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/floatcomp" + "github.com/weaviate/weaviate/usecases/modulecomponents/generictypes" + uc "github.com/weaviate/weaviate/usecases/schema" + "github.com/weaviate/weaviate/usecases/traverser/grouper" +) + +var _NUMCPU = runtime.GOMAXPROCS(0) + +// Explorer is a helper construct to perform vector-based searches. It does not +// contain monitoring or authorization checks. It should thus never be directly +// used by an API, but through a Traverser. +type Explorer struct { + searcher objectsSearcher + logger logrus.FieldLogger + modulesProvider ModulesProvider + schemaGetter uc.SchemaGetter + nearParamsVector *nearParamsVector + targetParamHelper *TargetVectorParamHelper + metrics explorerMetrics + config config.Config +} + +type explorerMetrics interface { + AddUsageDimensions(className, queryType, operation string, dims int) +} + +type ModulesProvider interface { + ValidateSearchParam(name string, value interface{}, className string) error + CrossClassValidateSearchParam(name string, value interface{}) error + IsTargetVectorMultiVector(className, targetVector string) (bool, error) + VectorFromSearchParam(ctx context.Context, className, targetVector, tenant, param string, params interface{}, + findVectorFn modulecapabilities.FindVectorFn[[]float32]) ([]float32, error) + MultiVectorFromSearchParam(ctx context.Context, className, targetVector, tenant, param string, params interface{}, + findVectorFn modulecapabilities.FindVectorFn[[][]float32]) ([][]float32, error) + TargetsFromSearchParam(className string, params interface{}) ([]string, error) + CrossClassVectorFromSearchParam(ctx context.Context, param string, + params interface{}, findVectorFn modulecapabilities.FindVectorFn[[]float32]) ([]float32, string, error) + MultiCrossClassVectorFromSearchParam(ctx context.Context, param string, + params interface{}, findVectorFn modulecapabilities.FindVectorFn[[][]float32]) ([][]float32, string, error) + GetExploreAdditionalExtend(ctx context.Context, in []search.Result, + moduleParams map[string]interface{}, searchVector models.Vector, + argumentModuleParams map[string]interface{}) ([]search.Result, error) + ListExploreAdditionalExtend(ctx context.Context, in []search.Result, + moduleParams map[string]interface{}, + argumentModuleParams map[string]interface{}) ([]search.Result, error) + VectorFromInput(ctx context.Context, className, input, targetVector string) ([]float32, error) + MultiVectorFromInput(ctx context.Context, className, input, targetVector string) ([][]float32, error) +} + +type objectsSearcher interface { + hybridSearcher + + // GraphQL Get{} queries + Search(ctx context.Context, params dto.GetParams) ([]search.Result, error) + VectorSearch(ctx context.Context, params dto.GetParams, targetVectors []string, searchVectors []models.Vector) ([]search.Result, error) + + // GraphQL Explore{} queries + CrossClassVectorSearch(ctx context.Context, vector models.Vector, targetVector string, offset, limit int, + filters *filters.LocalFilter) ([]search.Result, error) + + // Near-params searcher + Object(ctx context.Context, className string, id strfmt.UUID, + props search.SelectProperties, additional additional.Properties, + properties *additional.ReplicationProperties, tenant string) (*search.Result, error) + ObjectsByID(ctx context.Context, id strfmt.UUID, props search.SelectProperties, additional additional.Properties, tenant string) (search.Results, error) +} + +type hybridSearcher interface { + SparseObjectSearch(ctx context.Context, params dto.GetParams) ([]*storobj.Object, []float32, error) + ResolveReferences(ctx context.Context, objs search.Results, props search.SelectProperties, + groupBy *searchparams.GroupBy, additional additional.Properties, tenant string) (search.Results, error) +} + +// NewExplorer with search and connector repo +func NewExplorer(searcher objectsSearcher, logger logrus.FieldLogger, modulesProvider ModulesProvider, metrics explorerMetrics, conf config.Config) *Explorer { + return &Explorer{ + searcher: searcher, + logger: logger, + modulesProvider: modulesProvider, + metrics: metrics, + schemaGetter: nil, // schemaGetter is set later + nearParamsVector: newNearParamsVector(modulesProvider, searcher), + targetParamHelper: NewTargetParamHelper(), + config: conf, + } +} + +func (e *Explorer) SetSchemaGetter(sg uc.SchemaGetter) { + e.schemaGetter = sg +} + +// GetClass from search and connector repo +func (e *Explorer) GetClass(ctx context.Context, + params dto.GetParams, +) ([]interface{}, error) { + if params.Pagination == nil { + params.Pagination = &filters.Pagination{ + Offset: 0, + Limit: int(e.config.QueryDefaults.LimitGraphQL), + } + } + + if err := e.validateSort(params.ClassName, params.Sort); err != nil { + return nil, errors.Wrap(err, "invalid 'sort' parameter") + } + + if err := e.validateCursor(params); err != nil { + return nil, errors.Wrap(err, "cursor api: invalid 'after' parameter") + } + + if params.KeywordRanking != nil { + res, err := e.getClassKeywordBased(ctx, params) + if err != nil { + return nil, err + } + return e.searchResultsToGetResponse(ctx, res, nil, params) + } + + if params.NearVector != nil || params.NearObject != nil || len(params.ModuleParams) > 0 { + res, searchVector, err := e.getClassVectorSearch(ctx, params) + if err != nil { + return nil, err + } + return e.searchResultsToGetResponse(ctx, res, searchVector, params) + } + + res, err := e.getClassList(ctx, params) + if err != nil { + return nil, err + } + return e.searchResultsToGetResponse(ctx, res, nil, params) +} + +func (e *Explorer) getClassKeywordBased(ctx context.Context, params dto.GetParams) ([]search.Result, error) { + if params.NearVector != nil || params.NearObject != nil || len(params.ModuleParams) > 0 { + return nil, errors.Errorf("conflict: both near and keyword-based (bm25) arguments present, choose one") + } + + if len(params.KeywordRanking.Query) == 0 { + return nil, errors.Errorf("keyword search (bm25) must have query set") + } + + if len(params.AdditionalProperties.ModuleParams) > 0 { + // if a module-specific additional prop is set, assume it needs the vector + // present for backward-compatibility. This could be improved by actually + // asking the module based on specific conditions + params.AdditionalProperties.Vector = true + } + + res, err := e.searcher.Search(ctx, params) + if err != nil { + var e inverted.MissingIndexError + if errors.As(err, &e) { + return nil, e + } + return nil, errors.Errorf("explorer: get class: vector search: %v", err) + } + + if e.modulesProvider != nil { + res, err = e.modulesProvider.GetExploreAdditionalExtend(ctx, res, params.AdditionalProperties.ModuleParams, nil, params.ModuleParams) + if err != nil { + return nil, errors.Errorf("explorer: get class: extend: %v", err) + } + } + + if params.GroupBy != nil { + groupedResults, err := e.groupSearchResults(ctx, res, params.GroupBy) + if err != nil { + return nil, err + } + return groupedResults, nil + } + return res, nil +} + +func (e *Explorer) getClassVectorSearch(ctx context.Context, + params dto.GetParams, +) ([]search.Result, models.Vector, error) { + targetVectors, err := e.targetFromParams(ctx, params) + if err != nil { + return nil, nil, errors.Errorf("explorer: get class: vectorize params: %v", err) + } + + targetVectors, err = e.targetParamHelper.GetTargetVectorOrDefault(e.schemaGetter.GetSchemaSkipAuth(), + params.ClassName, targetVectors) + if err != nil { + return nil, nil, errors.Errorf("explorer: get class: validate target vector: %v", err) + } + + res, searchVectors, err := e.searchForTargets(ctx, params, targetVectors, nil) + if err != nil { + return nil, nil, errors.Wrap(err, "explorer: get class: concurrentTargetVectorSearch)") + } + + if len(searchVectors) > 0 { + return res, searchVectors[0], nil + } + return res, []float32{}, nil +} + +func (e *Explorer) searchForTargets(ctx context.Context, params dto.GetParams, targetVectors []string, searchVectorParams *searchparams.NearVector) ([]search.Result, []models.Vector, error) { + var err error + searchVectors := make([]models.Vector, len(targetVectors)) + eg := enterrors.NewErrorGroupWrapper(e.logger) + eg.SetLimit(2 * _NUMCPU) + for i := range targetVectors { + i := i + eg.Go(func() error { + var searchVectorParam *searchparams.NearVector + if params.NearVector != nil { + searchVectorParam = params.NearVector + } else if searchVectorParams != nil { + searchVectorParam = searchVectorParams + } + + vec, err := e.vectorFromParamsForTarget(ctx, searchVectorParam, params.NearObject, params.ModuleParams, params.ClassName, params.Tenant, targetVectors[i], i) + if err != nil { + return errors.Errorf("explorer: get class: vectorize search vector: %v", err) + } + searchVectors[i] = vec + return nil + }) + } + + if err := eg.Wait(); err != nil { + return nil, nil, err + } + + if len(params.AdditionalProperties.ModuleParams) > 0 || params.Group != nil { + // if a module-specific additional prop is set, assume it needs the vector + // present for backward-compatibility. This could be improved by actually + // asking the module based on specific conditions + // if a group is set, vectors are needed + params.AdditionalProperties.Vector = true + } + + res, err := e.searcher.VectorSearch(ctx, params, targetVectors, searchVectors) + if err != nil { + return nil, nil, errors.Errorf("explorer: get class: vector search: %v", err) + } + + if params.Pagination.Autocut > 0 { + scores := make([]float32, len(res)) + for i := range res { + scores[i] = res[i].Dist + } + cutOff := autocut.Autocut(scores, params.Pagination.Autocut) + res = res[:cutOff] + } + + if params.Group != nil { + grouped, err := grouper.New(e.logger).Group(res, params.Group.Strategy, params.Group.Force) + if err != nil { + return nil, nil, errors.Errorf("grouper: %v", err) + } + + res = grouped + } + + if e.modulesProvider != nil { + res, err = e.modulesProvider.GetExploreAdditionalExtend(ctx, res, + params.AdditionalProperties.ModuleParams, searchVectors[0], params.ModuleParams) + if err != nil { + return nil, nil, errors.Errorf("explorer: get class: extend: %v", err) + } + } + e.trackUsageGet(res, params) + + return res, searchVectors, nil +} + +func MinInt(ints ...int) int { + min := ints[0] + for _, i := range ints { + if i < min { + min = i + } + } + return min +} + +func MaxInt(ints ...int) int { + max := ints[0] + for _, i := range ints { + if i > max { + max = i + } + } + return max +} + +func (e *Explorer) CalculateTotalLimit(pagination *filters.Pagination) (int, error) { + if pagination == nil { + return 0, fmt.Errorf("invalid params, pagination object is nil") + } + + if pagination.Limit == -1 { + return int(e.config.QueryDefaults.Limit + int64(pagination.Offset)), nil + } + + totalLimit := pagination.Offset + pagination.Limit + + return MinInt(totalLimit, int(e.config.QueryMaximumResults)), nil +} + +func (e *Explorer) getClassList(ctx context.Context, + params dto.GetParams, +) ([]search.Result, error) { + // we will modify the params because of the workaround outlined below, + // however, we only want to track what the user actually set for the usage + // metrics, not our own workaround, so here's a copy of the original user + // input + userSetAdditionalVector := params.AdditionalProperties.Vector + + // if both grouping and whereFilter/sort are present, the below + // class search will eventually call storobj.FromBinaryOptional + // to unmarshal the record. in this case, we must manually set + // the vector addl prop to unmarshal the result vector into each + // result payload. if we skip this step, the grouper will attempt + // to compute the distance with a `nil` vector, resulting in NaN. + // this was the cause of [github issue 1958] + // (https://github.com/weaviate/weaviate/issues/1958) + if params.Group != nil && (params.Filters != nil || params.Sort != nil) { + params.AdditionalProperties.Vector = true + } + var res []search.Result + var err error + if params.HybridSearch != nil { + res, err = e.Hybrid(ctx, params) + if err != nil { + return nil, err + } + } else { + res, err = e.searcher.Search(ctx, params) + if err != nil { + var e inverted.MissingIndexError + if errors.As(err, &e) { + return nil, e + } + return nil, fmt.Errorf("explorer: list class: search: %w", err) + } + } + + if params.Group != nil { + grouped, err := grouper.New(e.logger).Group(res, params.Group.Strategy, params.Group.Force) + if err != nil { + return nil, errors.Errorf("grouper: %v", err) + } + + res = grouped + } + + if e.modulesProvider != nil { + + res, err = e.modulesProvider.ListExploreAdditionalExtend(ctx, res, + params.AdditionalProperties.ModuleParams, params.ModuleParams) + if err != nil { + return nil, errors.Errorf("explorer: list class: extend: %v", err) + } + } + + if userSetAdditionalVector { + e.trackUsageGetExplicitVector(res, params) + } + + return res, nil +} + +func (e *Explorer) searchResultsToGetResponse(ctx context.Context, input []search.Result, searchVector models.Vector, params dto.GetParams) ([]interface{}, error) { + output := make([]interface{}, 0, len(input)) + results, err := e.searchResultsToGetResponseWithType(ctx, input, searchVector, params) + if err != nil { + return nil, err + } + + if params.GroupBy != nil { + for _, result := range results { + wrapper := map[string]interface{}{} + wrapper["_additional"] = result.AdditionalProperties + output = append(output, wrapper) + } + } else { + for _, result := range results { + output = append(output, result.Schema) + } + } + return output, nil +} + +func (e *Explorer) searchResultsToGetResponseWithType(ctx context.Context, input []search.Result, searchVector models.Vector, params dto.GetParams) ([]search.Result, error) { + var output []search.Result + replEnabled, err := e.replicationEnabled(params) + if err != nil { + return nil, fmt.Errorf("search results to get response: %w", err) + } + for _, res := range input { + if ctx.Err() != nil { + return nil, ctx.Err() + } + additionalProperties := make(map[string]interface{}) + + if res.AdditionalProperties != nil { + for additionalProperty, value := range res.AdditionalProperties { + if value != nil { + additionalProperties[additionalProperty] = value + } + } + } + + if searchVector != nil { + // Dist is between 0..2, we need to reduce to the user space of 0..1 + normalizedResultDist := res.Dist / 2 + + certainty := ExtractCertaintyFromParams(params) + if 1-(normalizedResultDist) < float32(certainty) && 1-normalizedResultDist >= 0 { + // TODO: Clean this up. The >= check is so that this logic does not run + // non-cosine distance. + continue + } + + if certainty == 0 { + distance, withDistance := ExtractDistanceFromParams(params) + if withDistance && (!floatcomp.InDelta(float64(res.Dist), distance, 1e-6) && + float64(res.Dist) > distance) { + continue + } + } + + if params.AdditionalProperties.Certainty { + targetVectors := e.targetParamHelper.GetTargetVectorsFromParams(params) + class := e.schemaGetter.ReadOnlyClass(params.ClassName) + if err := configvalidation.CheckCertaintyCompatibility(class, targetVectors); err != nil { + return nil, errors.Errorf("additional: %s for class: %v", err, params.ClassName) + } + + additionalProperties["certainty"] = additional.DistToCertainty(float64(res.Dist)) + } + + if params.AdditionalProperties.Distance { + additionalProperties["distance"] = res.Dist + } + } + + if params.AdditionalProperties.ID { + additionalProperties["id"] = res.ID + } + + if params.AdditionalProperties.Score { + additionalProperties["score"] = res.Score + } + + if params.AdditionalProperties.ExplainScore { + additionalProperties["explainScore"] = res.ExplainScore + } + + if params.AdditionalProperties.Vector { + additionalProperties["vector"] = res.Vector + } + + if len(params.AdditionalProperties.Vectors) > 0 { + vectors := make(map[string]models.Vector) + for _, targetVector := range params.AdditionalProperties.Vectors { + vectors[targetVector] = res.Vectors[targetVector] + } + additionalProperties["vectors"] = vectors + } + + if params.AdditionalProperties.CreationTimeUnix { + additionalProperties["creationTimeUnix"] = res.Created + } + + if params.AdditionalProperties.LastUpdateTimeUnix { + additionalProperties["lastUpdateTimeUnix"] = res.Updated + } + + if replEnabled { + additionalProperties["isConsistent"] = res.IsConsistent + } + + if len(additionalProperties) > 0 { + if additionalProperties["group"] != nil { + e.extractAdditionalPropertiesFromGroupRefs(additionalProperties["group"], params.GroupBy.Properties) + } + res.Schema.(map[string]interface{})["_additional"] = additionalProperties + } + + e.extractAdditionalPropertiesFromRefs(res.Schema, params.Properties) + + output = append(output, res) + } + + return output, nil +} + +func (e *Explorer) extractAdditionalPropertiesFromGroupRefs( + additionalGroup interface{}, + props search.SelectProperties, +) { + if group, ok := additionalGroup.(*additional.Group); ok { + if len(group.Hits) > 0 { + for _, hit := range group.Hits { + e.extractAdditionalPropertiesFromRefs(hit, props) + } + } + } +} + +func (e *Explorer) extractAdditionalPropertiesFromRefs(propertySchema interface{}, params search.SelectProperties) { + for _, selectProp := range params { + for _, refClass := range selectProp.Refs { + propertySchemaMap, ok := propertySchema.(map[string]interface{}) + if ok { + refProperty := propertySchemaMap[selectProp.Name] + if refProperty != nil { + e.extractAdditionalPropertiesFromRef(refProperty, refClass) + } + } + if refClass.RefProperties != nil { + propertySchemaMap, ok := propertySchema.(map[string]interface{}) + if ok { + innerPropertySchema := propertySchemaMap[selectProp.Name] + if innerPropertySchema != nil { + innerRef, ok := innerPropertySchema.([]interface{}) + if ok { + for _, props := range innerRef { + innerRefSchema, ok := props.(search.LocalRef) + if ok { + e.extractAdditionalPropertiesFromRefs(innerRefSchema.Fields, refClass.RefProperties) + } + } + } + } + } + } + } + } +} + +func (e *Explorer) extractAdditionalPropertiesFromRef(ref interface{}, + refClass search.SelectClass, +) { + innerRefClass, ok := ref.([]interface{}) + if ok { + for _, innerRefProp := range innerRefClass { + innerRef, ok := innerRefProp.(search.LocalRef) + if !ok { + continue + } + if innerRef.Class == refClass.ClassName { + additionalProperties := make(map[string]interface{}) + if refClass.AdditionalProperties.ID { + additionalProperties["id"] = innerRef.Fields["id"] + } + if refClass.AdditionalProperties.Vector { + additionalProperties["vector"] = innerRef.Fields["vector"] + } + if len(refClass.AdditionalProperties.Vectors) > 0 { + additionalProperties["vectors"] = innerRef.Fields["vectors"] + } + if refClass.AdditionalProperties.CreationTimeUnix { + additionalProperties["creationTimeUnix"] = innerRef.Fields["creationTimeUnix"] + } + if refClass.AdditionalProperties.LastUpdateTimeUnix { + additionalProperties["lastUpdateTimeUnix"] = innerRef.Fields["lastUpdateTimeUnix"] + } + if len(additionalProperties) > 0 { + innerRef.Fields["_additional"] = additionalProperties + } + } + } + } +} + +func (e *Explorer) CrossClassVectorSearch(ctx context.Context, + params ExploreParams, +) ([]search.Result, error) { + if err := e.validateExploreParams(params); err != nil { + return nil, errors.Wrap(err, "invalid params") + } + + vector, targetVector, err := e.vectorFromExploreParams(ctx, params) + if err != nil { + return nil, errors.Errorf("vectorize params: %v", err) + } + + res, err := e.searcher.CrossClassVectorSearch(ctx, vector, targetVector, params.Offset, params.Limit, nil) + if err != nil { + return nil, errors.Errorf("vector search: %v", err) + } + + e.trackUsageExplore(res, params) + + results := []search.Result{} + for _, item := range res { + item.Beacon = crossref.NewLocalhost(item.ClassName, item.ID).String() + err = e.appendResultsIfSimilarityThresholdMet(item, &results, params) + if err != nil { + return nil, errors.Errorf("append results based on similarity: %s", err) + } + } + + return results, nil +} + +func (e *Explorer) appendResultsIfSimilarityThresholdMet(item search.Result, + results *[]search.Result, params ExploreParams, +) error { + distance, withDistance := extractDistanceFromExploreParams(params) + certainty := extractCertaintyFromExploreParams(params) + + if withDistance && (floatcomp.InDelta(float64(item.Dist), distance, 1e-6) || + item.Dist <= float32(distance)) { + *results = append(*results, item) + } else if certainty != 0 && item.Certainty >= float32(certainty) { + *results = append(*results, item) + } else if !withDistance && certainty == 0 { + *results = append(*results, item) + } + + return nil +} + +func (e *Explorer) validateExploreParams(params ExploreParams) error { + if params.NearVector == nil && params.NearObject == nil && len(params.ModuleParams) == 0 { + return errors.Errorf("received no search params, one of [nearVector, nearObject] " + + "or module search params is required for an exploration") + } + + return nil +} + +func (e *Explorer) targetFromParams(ctx context.Context, + params dto.GetParams, +) ([]string, error) { + return e.nearParamsVector.targetFromParams(ctx, params.NearVector, + params.NearObject, params.ModuleParams, params.ClassName, params.Tenant) +} + +func (e *Explorer) vectorFromParamsForTarget(ctx context.Context, + nv *searchparams.NearVector, no *searchparams.NearObject, moduleParams map[string]interface{}, className, tenant, target string, index int, +) (models.Vector, error) { + return e.nearParamsVector.vectorFromParams(ctx, nv, no, moduleParams, className, tenant, target, index) +} + +func (e *Explorer) vectorFromExploreParams(ctx context.Context, + params ExploreParams, +) (models.Vector, string, error) { + err := e.nearParamsVector.validateNearParams(params.NearVector, params.NearObject, params.ModuleParams) + if err != nil { + return nil, "", err + } + + if len(params.ModuleParams) == 1 { + for name, value := range params.ModuleParams { + return e.crossClassVectorFromModules(ctx, name, value) + } + } + + if params.NearVector != nil { + targetVector := "" + if len(params.NearVector.TargetVectors) == 1 { + targetVector = params.NearVector.TargetVectors[0] + } + return params.NearVector.Vectors[0], targetVector, nil + } + + if params.NearObject != nil { + // TODO: cross class + vector, targetVector, err := e.nearParamsVector.crossClassVectorFromNearObjectParams(ctx, params.NearObject) + if err != nil { + return nil, "", errors.Errorf("nearObject params: %v", err) + } + + return vector, targetVector, nil + } + + // either nearObject or nearVector or module search param has to be set, + // so if we land here, something has gone very wrong + panic("vectorFromExploreParams was called without any known params present") +} + +// similar to vectorFromModules, but not specific to a single class +func (e *Explorer) crossClassVectorFromModules(ctx context.Context, + paramName string, paramValue interface{}, +) ([]float32, string, error) { + if e.modulesProvider != nil { + vector, targetVector, err := e.modulesProvider.CrossClassVectorFromSearchParam(ctx, + paramName, paramValue, generictypes.FindVectorFn(e.nearParamsVector.findVector), + ) + if err != nil { + return nil, "", errors.Errorf("vectorize params: %v", err) + } + return vector, targetVector, nil + } + return nil, "", errors.New("no modules defined") +} + +func (e *Explorer) GetSchema() schema.Schema { + return e.schemaGetter.GetSchemaSkipAuth() +} + +func (e *Explorer) replicationEnabled(params dto.GetParams) (bool, error) { + if e.schemaGetter == nil { + return false, fmt.Errorf("schemaGetter not set") + } + + class := e.schemaGetter.ReadOnlyClass(params.ClassName) + if class == nil { + return false, fmt.Errorf("class not found in schema: %q", params.ClassName) + } + + return class.ReplicationConfig != nil && class.ReplicationConfig.Factor > 1, nil +} + +func ExtractDistanceFromParams(params dto.GetParams) (distance float64, withDistance bool) { + if params.NearVector != nil { + distance = params.NearVector.Distance + withDistance = params.NearVector.WithDistance + return + } + + if params.NearObject != nil { + distance = params.NearObject.Distance + withDistance = params.NearObject.WithDistance + return + } + + if params.HybridSearch != nil { + if params.HybridSearch.NearTextParams != nil { + distance = params.HybridSearch.NearTextParams.Distance + withDistance = params.HybridSearch.NearTextParams.WithDistance + return + } + if params.HybridSearch.NearVectorParams != nil { + distance = params.HybridSearch.NearVectorParams.Distance + withDistance = params.HybridSearch.NearVectorParams.WithDistance + return + } + } + + if len(params.ModuleParams) == 1 { + distance, withDistance = extractDistanceFromModuleParams(params.ModuleParams) + } + + return +} + +func ExtractCertaintyFromParams(params dto.GetParams) (certainty float64) { + if params.NearVector != nil { + certainty = params.NearVector.Certainty + return + } + + if params.NearObject != nil { + certainty = params.NearObject.Certainty + return + } + + if len(params.ModuleParams) == 1 { + certainty = extractCertaintyFromModuleParams(params.ModuleParams) + return + } + + return +} + +func extractCertaintyFromExploreParams(params ExploreParams) (certainty float64) { + if params.NearVector != nil { + certainty = params.NearVector.Certainty + return + } + + if params.NearObject != nil { + certainty = params.NearObject.Certainty + return + } + + if len(params.ModuleParams) == 1 { + certainty = extractCertaintyFromModuleParams(params.ModuleParams) + } + + return +} + +func extractDistanceFromExploreParams(params ExploreParams) (distance float64, withDistance bool) { + if params.NearVector != nil { + distance = params.NearVector.Distance + withDistance = params.NearVector.WithDistance + return + } + + if params.NearObject != nil { + distance = params.NearObject.Distance + withDistance = params.NearObject.WithDistance + return + } + + if len(params.ModuleParams) == 1 { + distance, withDistance = extractDistanceFromModuleParams(params.ModuleParams) + } + + return +} + +func extractCertaintyFromModuleParams(moduleParams map[string]interface{}) float64 { + for _, param := range moduleParams { + if nearParam, ok := param.(modulecapabilities.NearParam); ok { + if nearParam.SimilarityMetricProvided() { + if certainty := nearParam.GetCertainty(); certainty != 0 { + return certainty + } + } + } + } + + return 0 +} + +func extractDistanceFromModuleParams(moduleParams map[string]interface{}) (distance float64, withDistance bool) { + for _, param := range moduleParams { + if nearParam, ok := param.(modulecapabilities.NearParam); ok { + if nearParam.SimilarityMetricProvided() { + if certainty := nearParam.GetCertainty(); certainty != 0 { + distance, withDistance = 0, false + return + } + distance, withDistance = nearParam.GetDistance(), true + return + } + } + } + + return +} + +func (e *Explorer) trackUsageGet(res search.Results, params dto.GetParams) { + if len(res) == 0 { + return + } + + op := e.usageOperationFromGetParams(params) + if e.metrics != nil { + e.metrics.AddUsageDimensions(params.ClassName, "get_graphql", op, res[0].Dims) + } +} + +func (e *Explorer) trackUsageGetExplicitVector(res search.Results, params dto.GetParams) { + if len(res) == 0 { + return + } + + e.metrics.AddUsageDimensions(params.ClassName, "get_graphql", "_additional.vector", + res[0].Dims) +} + +func (e *Explorer) usageOperationFromGetParams(params dto.GetParams) string { + if params.NearObject != nil { + return "nearObject" + } + + if params.NearVector != nil { + return "nearVector" + } + + // there is at most one module param, so we can return the first we find + for param := range params.ModuleParams { + return param + } + + return "n/a" +} + +func (e *Explorer) trackUsageExplore(res search.Results, params ExploreParams) { + if len(res) == 0 { + return + } + + op := e.usageOperationFromExploreParams(params) + e.metrics.AddUsageDimensions("n/a", "explore_graphql", op, res[0].Dims) +} + +func (e *Explorer) usageOperationFromExploreParams(params ExploreParams) string { + if params.NearObject != nil { + return "nearObject" + } + + if params.NearVector != nil { + return "nearVector" + } + + // there is at most one module param, so we can return the first we find + for param := range params.ModuleParams { + return param + } + + return "n/a" +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/explorer_hybrid.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/explorer_hybrid.go new file mode 100644 index 0000000000000000000000000000000000000000..f4c6805dcba04527203672a07fc6843e47f61eca --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/explorer_hybrid.go @@ -0,0 +1,449 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +import ( + "context" + "fmt" + "math" + + "github.com/go-openapi/strfmt" + + enterrors "github.com/weaviate/weaviate/entities/errors" + "github.com/weaviate/weaviate/entities/models" + + "github.com/weaviate/weaviate/entities/additional" + "github.com/weaviate/weaviate/entities/dto" + "github.com/weaviate/weaviate/entities/filters" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/entities/searchparams" + nearText2 "github.com/weaviate/weaviate/usecases/modulecomponents/arguments/nearText" + "github.com/weaviate/weaviate/usecases/traverser/hybrid" +) + +// Do a bm25 search. The results will be used in the hybrid algorithm +func sparseSearch(ctx context.Context, e *Explorer, params dto.GetParams) ([]*search.Result, string, error) { + params.KeywordRanking = &searchparams.KeywordRanking{ + Query: params.HybridSearch.Query, + Type: "bm25", + Properties: params.HybridSearch.Properties, + } + + params.Group = nil + params.GroupBy = nil + + if params.Pagination == nil { + return nil, "", fmt.Errorf("invalid params, pagination object is nil") + } + + if params.HybridSearch.SearchOperator != "" { + params.KeywordRanking.SearchOperator = params.HybridSearch.SearchOperator + } + + if params.HybridSearch.MinimumOrTokensMatch != 0 { + params.KeywordRanking.MinimumOrTokensMatch = params.HybridSearch.MinimumOrTokensMatch + } + + totalLimit, err := e.CalculateTotalLimit(params.Pagination) + if err != nil { + return nil, "", err + } + + enforcedMin := MaxInt(params.Pagination.Offset+int(e.config.QueryHybridMaximumResults), totalLimit) + + oldLimit := params.Pagination.Limit + params.Pagination.Limit = enforcedMin - params.Pagination.Offset + + results, scores, err := e.searcher.SparseObjectSearch(ctx, params) + if err != nil { + return nil, "", err + } + params.Pagination.Limit = oldLimit + + out := make([]*search.Result, len(results)) + for i, obj := range results { + sr := obj.SearchResultWithScore(additional.Properties{}, scores[i]) + sr.SecondarySortValue = sr.Score + out[i] = &sr + } + + return out, "keyword,bm25", nil +} + +// Do a nearvector search. The results will be used in the hybrid algorithm +func denseSearch(ctx context.Context, e *Explorer, params dto.GetParams, searchname string, targetVectors []string, searchVector *searchparams.NearVector) ([]*search.Result, string, error) { + params.Pagination.Offset = 0 + if params.Pagination.Limit < int(e.config.QueryHybridMaximumResults) { + params.Pagination.Limit = int(e.config.QueryHybridMaximumResults) + } + params.Group = nil + params.GroupBy = nil + + partialResults, searchVectors, err := e.searchForTargets(ctx, params, targetVectors, searchVector) + if err != nil { + return nil, "", err + } + var vector models.Vector + if len(searchVectors) > 0 { + vector = searchVectors[0] + } + + results, err := e.searchResultsToGetResponseWithType(ctx, partialResults, vector, params) + if err != nil { + return nil, "", err + } + + out := make([]*search.Result, 0, len(results)) + for _, sr := range results { + out_sr := sr + out_sr.SecondarySortValue = 1 - sr.Dist + out = append(out, &out_sr) + } + + return out, "vector," + searchname, nil +} + +/* +type NearTextParams struct { + Values []string + Limit int + MoveTo ExploreMove + MoveAwayFrom ExploreMove + Certainty float64 + Distance float64 + WithDistance bool + Network bool + Autocorrect bool + TargetVectors []string +} +*/ +// Do a nearText search. The results will be used in the hybrid algorithm +func nearTextSubSearch(ctx context.Context, e *Explorer, params dto.GetParams, targetVectors []string) ([]*search.Result, string, error) { + var subSearchParams nearText2.NearTextParams + + subSearchParams.Values = params.HybridSearch.NearTextParams.Values + subSearchParams.Limit = params.HybridSearch.NearTextParams.Limit + + subSearchParams.Certainty = params.HybridSearch.NearTextParams.Certainty + subSearchParams.Distance = params.HybridSearch.NearTextParams.Distance + subSearchParams.Limit = params.HybridSearch.NearTextParams.Limit + subSearchParams.MoveTo.Force = params.HybridSearch.NearTextParams.MoveTo.Force + subSearchParams.MoveTo.Values = params.HybridSearch.NearTextParams.MoveTo.Values + + // TODO objects + + subSearchParams.MoveAwayFrom.Force = params.HybridSearch.NearTextParams.MoveAwayFrom.Force + subSearchParams.MoveAwayFrom.Values = params.HybridSearch.NearTextParams.MoveAwayFrom.Values + // TODO objects + + subSearchParams.Network = params.HybridSearch.NearTextParams.Network + + subSearchParams.WithDistance = params.HybridSearch.NearTextParams.WithDistance + + subSearchParams.TargetVectors = targetVectors // TODO support multiple target vectors + + subsearchWrap := params + if subsearchWrap.ModuleParams == nil { + subsearchWrap.ModuleParams = map[string]interface{}{} + } + + subsearchWrap.ModuleParams["nearText"] = &subSearchParams + + subsearchWrap.HybridSearch = nil + subsearchWrap.Group = nil + subsearchWrap.GroupBy = nil + partialResults, vectors, err := e.searchForTargets(ctx, subsearchWrap, targetVectors, nil) + if err != nil { + return nil, "", err + } + + var vector models.Vector + if len(vectors) > 0 { + vector = vectors[0] + } + + results, err := e.searchResultsToGetResponseWithType(ctx, partialResults, vector, params) + if err != nil { + return nil, "", err + } + + var out []*search.Result + for _, res := range results { + sr := res + sr.SecondarySortValue = 1 - sr.Dist + out = append(out, &sr) + } + + return out, "vector,nearText", nil +} + +// Hybrid search. This is the main entry point to the hybrid search algorithm +func (e *Explorer) Hybrid(ctx context.Context, params dto.GetParams) ([]search.Result, error) { + var err error + var results [][]*search.Result + var weights []float64 + var names []string + var targetVectors []string + + if params.HybridSearch.NearTextParams != nil && params.HybridSearch.NearVectorParams != nil { + return nil, fmt.Errorf("hybrid search cannot have both nearText and nearVector parameters") + } + + origParams := params + params.Pagination = &filters.Pagination{ + Limit: params.Pagination.Limit, + Offset: params.Pagination.Offset, + Autocut: params.Pagination.Autocut, + } + + // pagination is handled after combining results + vectorParams := params + vectorParams.Pagination = &filters.Pagination{ + Limit: int(math.Max(float64(e.config.QueryHybridMaximumResults), float64(params.Pagination.Limit))), + Offset: 0, + Autocut: -1, + } + + keywordParams := params + keywordParams.Pagination = &filters.Pagination{ + Limit: int(math.Max(float64(e.config.QueryHybridMaximumResults), float64(params.Pagination.Limit))), + Offset: 0, + Autocut: -1, + } + + targetVectors, err = e.targetParamHelper.GetTargetVectorOrDefault(e.schemaGetter.GetSchemaSkipAuth(), params.ClassName, params.HybridSearch.TargetVectors) + if err != nil { + return nil, err + } + + // If the user has given any weight to the vector search, choose 1 of three possible vector searches + // + // 1. If the user hase provided nearText parameters, use them in a nearText search + // 2. If the user has provided nearVector parameters, use them in a nearVector search + // 3. (Default) Do a vector search with the default parameters (the old hybrid search) + + resultsCount := 1 + if params.HybridSearch.Alpha != 0 && params.HybridSearch.Alpha != 1 { + resultsCount = 2 + } + + eg := enterrors.NewErrorGroupWrapper(e.logger) + eg.SetLimit(resultsCount) + + results = make([][]*search.Result, resultsCount) + weights = make([]float64, resultsCount) + names = make([]string, resultsCount) + var belowCutoffSet map[strfmt.UUID]struct{} + + if (params.HybridSearch.Alpha) > 0 { + eg.Go(func() error { + params := vectorParams + var err error + var name string + var res []*search.Result + var errorText string + if params.HybridSearch.NearTextParams != nil { + res, name, err = nearTextSubSearch(ctx, e, params, targetVectors) + errorText = "nearTextSubSearch" + } else if params.HybridSearch.NearVectorParams != nil { + searchVectors := make([]*searchparams.NearVector, len(targetVectors)) + for i, targetVector := range targetVectors { + searchVectors[i] = params.HybridSearch.NearVectorParams + searchVectors[i].TargetVectors = []string{targetVector} + } + res, name, err = denseSearch(ctx, e, params, "nearVector", targetVectors, params.HybridSearch.NearVectorParams) + errorText = "nearVectorSubSearch" + } else { + sch := e.schemaGetter.GetSchemaSkipAuth() + class := sch.FindClassByName(schema.ClassName(params.ClassName)) + if class == nil { + return fmt.Errorf("class %q not found", params.ClassName) + } + + searchVectors := &searchparams.NearVector{} + searchVectors.Vectors = make([]models.Vector, len(targetVectors)) + searchVectors.TargetVectors = make([]string, len(targetVectors)) + isVectorEmpty, isVectorEmptyErr := dto.IsVectorEmpty(params.HybridSearch.Vector) + if isVectorEmptyErr != nil { + return fmt.Errorf("is hybrid vector empty: %w", isVectorEmptyErr) + } + if !isVectorEmpty { + for i, targetVector := range targetVectors { + searchVectors.TargetVectors[i] = targetVector + searchVectors.Vectors[i] = params.HybridSearch.Vector + } + } else { + eg2 := enterrors.NewErrorGroupWrapper(e.logger) + eg2.SetLimit(_NUMCPU) + for i, targetVector := range targetVectors { + i := i + targetVector := targetVector + eg2.Go(func() error { + isMultiVector, err := e.modulesProvider.IsTargetVectorMultiVector(params.ClassName, targetVector) + if err != nil { + return fmt.Errorf("hybrid: is target vector multi vector: %w", err) + } + if isMultiVector { + searchVectors.TargetVectors[i] = targetVector + searchVector, err := e.modulesProvider.MultiVectorFromInput(ctx, params.ClassName, params.HybridSearch.Query, targetVector) + searchVectors.Vectors[i] = searchVector + return err + } + searchVectors.TargetVectors[i] = targetVector + searchVector, err := e.modulesProvider.VectorFromInput(ctx, params.ClassName, params.HybridSearch.Query, targetVector) + searchVectors.Vectors[i] = searchVector + return err + }) + } + if err := eg2.Wait(); err != nil { + return err + } + } + + res, name, err = denseSearch(ctx, e, params, "hybridVector", targetVectors, searchVectors) + errorText = "hybrid" + } + + if params.HybridSearch.WithDistance { + belowCutoffSet = map[strfmt.UUID]struct{}{} + maxFound := -1 + for i := range res { + if res[i].Dist <= params.HybridSearch.Distance { + belowCutoffSet[res[i].ID] = struct{}{} + maxFound = i + } else { + break + } + } + // sorted by distance, so just remove everything after the first entry we found + res = res[:maxFound+1] + } + + if err != nil { + e.logger.WithField("action", "hybrid").WithError(err).Error(errorText + " failed") + return err + } else { + weights[0] = params.HybridSearch.Alpha + results[0] = res + names[0] = name + } + + return nil + }) + } + + sparseSearchIndex := -1 + if 1-params.HybridSearch.Alpha > 0 { + eg.Go(func() error { + // If the user has given any weight to the keyword search, do a keyword search + params := keywordParams + sparseResults, name, err := sparseSearch(ctx, e, params) + if err != nil { + e.logger.WithField("action", "hybrid").WithError(err).Error("sparseSearch failed") + return err + } else { + weights[len(weights)-1] = 1 - params.HybridSearch.Alpha + results[len(weights)-1] = sparseResults + names[len(weights)-1] = name + sparseSearchIndex = len(weights) - 1 + } + + return nil + }) + } + + if err := eg.Wait(); err != nil { + return nil, err + } + + // remove results with a vector distance above the cutoff from the BM25 results + if sparseSearchIndex >= 0 && belowCutoffSet != nil { + newResults := make([]*search.Result, 0, len(results[sparseSearchIndex])) + for i := range results[sparseSearchIndex] { + if _, ok := belowCutoffSet[results[sparseSearchIndex][i].ID]; ok { + newResults = append(newResults, results[sparseSearchIndex][i]) + } + } + results[sparseSearchIndex] = newResults + } + + // The postProcess function is used to limit the number of results and to resolve references + // in the results. It is called after all the subsearches have been completed, and before autocut + postProcess := func(results []*search.Result) ([]search.Result, error) { + totalLimit, err := e.CalculateTotalLimit(origParams.Pagination) + if err != nil { + return nil, err + } + + if len(results) > totalLimit { + results = results[:totalLimit] + } + + res1 := make([]search.Result, 0, len(results)) + for _, res := range results { + res1 = append(res1, *res) + } + + res, err := e.searcher.ResolveReferences(ctx, res1, origParams.Properties, nil, origParams.AdditionalProperties, origParams.Tenant) + if err != nil { + return nil, err + } + return res, nil + } + + res, err := hybrid.HybridCombiner(ctx, &hybrid.Params{ + HybridSearch: origParams.HybridSearch, + Keyword: origParams.KeywordRanking, + Class: origParams.ClassName, + Autocut: origParams.Pagination.Autocut, + }, results, weights, names, e.logger, postProcess) + if err != nil { + return nil, err + } + + var pointerResultList hybrid.Results + + if origParams.Pagination.Limit <= 0 { + origParams.Pagination.Limit = int(e.config.QueryHybridMaximumResults) + } + + if origParams.Pagination.Offset < 0 { + origParams.Pagination.Offset = 0 + } + + if len(res) >= origParams.Pagination.Limit+origParams.Pagination.Offset { + pointerResultList = res[origParams.Pagination.Offset : origParams.Pagination.Limit+origParams.Pagination.Offset] + } + if len(res) < origParams.Pagination.Limit+origParams.Pagination.Offset && len(res) > origParams.Pagination.Offset { + pointerResultList = res[origParams.Pagination.Offset:] + } + if len(res) <= origParams.Pagination.Offset { + pointerResultList = hybrid.Results{} + } + + // The rest of weaviate uses []search.Result, so we convert the hpointerResultList to []search.Result + + out := make([]search.Result, 0, len(pointerResultList)) + for _, pointerResult := range pointerResultList { + out = append(out, *pointerResult) + } + + if origParams.GroupBy != nil { + groupedResults, err := e.groupSearchResults(ctx, out, origParams.GroupBy) + if err != nil { + return nil, err + } + return groupedResults, nil + } + return out, nil +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/explorer_test.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/explorer_test.go new file mode 100644 index 0000000000000000000000000000000000000000..d4281de9972e1d37d41b464847e13aac991ecebb --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/explorer_test.go @@ -0,0 +1,2908 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +import ( + "context" + "testing" + "time" + + "github.com/go-openapi/strfmt" + "github.com/pkg/errors" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/additional" + "github.com/weaviate/weaviate/entities/dto" + "github.com/weaviate/weaviate/entities/filters" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/entities/searchparams" + "github.com/weaviate/weaviate/entities/vectorindex/hnsw" + "github.com/weaviate/weaviate/usecases/config" +) + +var defaultConfig = config.Config{ + QueryDefaults: config.QueryDefaults{ + Limit: 100, + }, + QueryMaximumResults: 100, +} + +func Test_Explorer_GetClass(t *testing.T) { + t.Run("when an explore param is set for nearVector", func(t *testing.T) { + // TODO: this is a module specific test case, which relies on the + // text2vec-contextionary module + params := dto.GetParams{ + ClassName: "BestClass", + NearVector: &searchparams.NearVector{ + Vectors: []models.Vector{[]float32{0.8, 0.2, 0.7}}, + }, + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + } + + searchResults := []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + }, + Dims: 128, + }, + { + ID: "id2", + Schema: map[string]interface{}{ + "age": 200, + }, + Dims: 128, + }, + } + + search := &fakeVectorSearcher{} + metrics := &fakeMetrics{} + log, _ := test.NewNullLogger() + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + search. + On("VectorSearch", expectedParamsToSearch, []models.Vector{[]float32{0.8, 0.2, 0.7}}). + Return(searchResults, nil) + + metrics.On("AddUsageDimensions", "BestClass", "get_graphql", "nearVector", 128) + res, err := explorer.GetClass(context.Background(), params) + + t.Run("vector search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + search.AssertExpectations(t) + }) + + t.Run("response must contain concepts1", func(t *testing.T) { + require.Len(t, res, 2) + assert.Equal(t, + map[string]interface{}{ + "name": "Foo", + }, res[0]) + assert.Equal(t, + map[string]interface{}{ + "age": 200, + }, res[1]) + }) + + t.Run("usage must be tracked", func(t *testing.T) { + metrics.AssertExpectations(t) + }) + }) + + t.Run("when an explore param is set for nearObject without id and beacon", func(t *testing.T) { + t.Run("with distance", func(t *testing.T) { + // TODO: this is a module specific test case, which relies on the + // text2vec-contextionary module + params := dto.GetParams{ + ClassName: "BestClass", + NearObject: &searchparams.NearObject{ + Distance: 0.1, + }, + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("vector search must be called with right params", func(t *testing.T) { + assert.NotNil(t, err) + assert.Nil(t, res) + assert.Contains(t, err.Error(), "explorer: get class: vectorize search vector: nearObject params: empty id and beacon") + }) + }) + + t.Run("with certainty", func(t *testing.T) { + // TODO: this is a module specific test case, which relies on the + // text2vec-contextionary module + params := dto.GetParams{ + ClassName: "BestClass", + NearObject: &searchparams.NearObject{ + Certainty: 0.9, + }, + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("vector search must be called with right params", func(t *testing.T) { + assert.NotNil(t, err) + assert.Nil(t, res) + assert.Contains(t, err.Error(), "explorer: get class: vectorize search vector: nearObject params: empty id and beacon") + }) + }) + }) + + t.Run("when an explore param is set for nearObject with beacon", func(t *testing.T) { + t.Run("with distance", func(t *testing.T) { + t.Run("with certainty", func(t *testing.T) { + // TODO: this is a module specific test case, which relies on the + // text2vec-contextionary module + searchVector := []float32{1, 2, 3} + params := dto.GetParams{ + ClassName: "BestClass", + NearObject: &searchparams.NearObject{ + Beacon: "weaviate://localhost/e9c12c22-766f-4bde-b140-d4cf8fd6e041", + Distance: 0.1, + }, + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + } + + searchRes := search.Result{ + ID: "e9c12c22-766f-4bde-b140-d4cf8fd6e041", + Schema: map[string]interface{}{ + "name": "Foo", + }, + Vector: []float32{1, 2, 3}, + } + + searchResults := []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + }, + Dims: 128, + }, + { + ID: "id2", + Schema: map[string]interface{}{ + "age": 200, + }, + Dims: 128, + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + vectors := make([]models.Vector, 1) + vectors[0] = searchVector + expectedParamsToSearch := params + search. + On("Object", "BestClass", strfmt.UUID("e9c12c22-766f-4bde-b140-d4cf8fd6e041")). + Return(&searchRes, nil) + search. + On("VectorSearch", expectedParamsToSearch, vectors). + Return(searchResults, nil) + metrics.On("AddUsageDimensions", "BestClass", "get_graphql", "nearObject", 128) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("vector search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + search.AssertExpectations(t) + }) + + t.Run("response must contain object", func(t *testing.T) { + require.Len(t, res, 2) + assert.Equal(t, + map[string]interface{}{ + "name": "Foo", + }, res[0]) + assert.Equal(t, + map[string]interface{}{ + "age": 200, + }, res[1]) + }) + }) + }) + + t.Run("with certainty", func(t *testing.T) { + // TODO: this is a module specific test case, which relies on the + // text2vec-contextionary module + searchVector := []float32{1, 2, 3} + params := dto.GetParams{ + ClassName: "BestClass", + NearObject: &searchparams.NearObject{ + Beacon: "weaviate://localhost/e9c12c22-766f-4bde-b140-d4cf8fd6e041", + Certainty: 0.9, + }, + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + } + + searchRes := search.Result{ + ID: "e9c12c22-766f-4bde-b140-d4cf8fd6e041", + Schema: map[string]interface{}{ + "name": "Foo", + }, + Vector: []float32{1, 2, 3}, + } + + searchResults := []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + }, + Dims: 128, + }, + { + ID: "id2", + Schema: map[string]interface{}{ + "age": 200, + }, + Dims: 128, + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + vectors := make([]models.Vector, 1) + vectors[0] = searchVector + + search. + On("Object", "BestClass", strfmt.UUID("e9c12c22-766f-4bde-b140-d4cf8fd6e041")). + Return(&searchRes, nil) + search. + On("VectorSearch", expectedParamsToSearch, vectors). + Return(searchResults, nil) + metrics.On("AddUsageDimensions", "BestClass", "get_graphql", "nearObject", 128) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("vector search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + search.AssertExpectations(t) + }) + + t.Run("response must contain object", func(t *testing.T) { + require.Len(t, res, 2) + assert.Equal(t, + map[string]interface{}{ + "name": "Foo", + }, res[0]) + assert.Equal(t, + map[string]interface{}{ + "age": 200, + }, res[1]) + }) + }) + }) + + t.Run("when an explore param is set for nearObject with id", func(t *testing.T) { + t.Run("with distance", func(t *testing.T) { + // TODO: this is a module specific test case, which relies on the + // text2vec-contextionary module + searchVector := []float32{1, 2, 3} + params := dto.GetParams{ + ClassName: "BestClass", + NearObject: &searchparams.NearObject{ + ID: "e9c12c22-766f-4bde-b140-d4cf8fd6e041", + Distance: 0.1, + }, + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + } + + searchRes := search.Result{ + ID: "e9c12c22-766f-4bde-b140-d4cf8fd6e041", + Schema: map[string]interface{}{ + "name": "Foo", + }, + Vector: []float32{1, 2, 3}, + } + + searchResults := []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + }, + Dims: 128, + }, + { + ID: "id2", + Schema: map[string]interface{}{ + "age": 200, + }, + Dims: 128, + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + vectors := make([]models.Vector, 1) + vectors[0] = searchVector + + search. + On("Object", "BestClass", strfmt.UUID("e9c12c22-766f-4bde-b140-d4cf8fd6e041")). + Return(&searchRes, nil) + search. + On("VectorSearch", expectedParamsToSearch, vectors). + Return(searchResults, nil) + metrics.On("AddUsageDimensions", "BestClass", "get_graphql", "nearObject", 128) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("vector search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + search.AssertExpectations(t) + }) + + t.Run("response must contain object", func(t *testing.T) { + require.Len(t, res, 2) + assert.Equal(t, + map[string]interface{}{ + "name": "Foo", + }, res[0]) + assert.Equal(t, + map[string]interface{}{ + "age": 200, + }, res[1]) + }) + }) + + t.Run("with certainty", func(t *testing.T) { + // TODO: this is a module specific test case, which relies on the + // text2vec-contextionary module + searchVector := []float32{1, 2, 3} + params := dto.GetParams{ + ClassName: "BestClass", + NearObject: &searchparams.NearObject{ + ID: "e9c12c22-766f-4bde-b140-d4cf8fd6e041", + Certainty: 0.9, + }, + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + } + + searchRes := search.Result{ + ID: "e9c12c22-766f-4bde-b140-d4cf8fd6e041", + Schema: map[string]interface{}{ + "name": "Foo", + }, + Vector: []float32{1, 2, 3}, + } + + searchResults := []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + }, + Dims: 128, + }, + { + ID: "id2", + Schema: map[string]interface{}{ + "age": 200, + }, + Dims: 128, + }, + } + + search := &fakeVectorSearcher{} + metrics := &fakeMetrics{} + log, _ := test.NewNullLogger() + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + vectors := make([]models.Vector, 1) + vectors[0] = searchVector + + search. + On("Object", "BestClass", strfmt.UUID("e9c12c22-766f-4bde-b140-d4cf8fd6e041")). + Return(&searchRes, nil) + search. + On("VectorSearch", expectedParamsToSearch, vectors). + Return(searchResults, nil) + metrics.On("AddUsageDimensions", "BestClass", "get_graphql", "nearObject", 128) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("vector search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + search.AssertExpectations(t) + }) + + t.Run("response must contain object", func(t *testing.T) { + require.Len(t, res, 2) + assert.Equal(t, + map[string]interface{}{ + "name": "Foo", + }, res[0]) + assert.Equal(t, + map[string]interface{}{ + "age": 200, + }, res[1]) + }) + }) + }) + + t.Run("when an explore param is set for nearVector and the required distance not met", + func(t *testing.T) { + t.Run("with distance", func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + NearVector: &searchparams.NearVector{ + Vectors: []models.Vector{[]float32{0.8, 0.2, 0.7}}, + Distance: 0.4, + WithDistance: true, + }, + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + } + + searchResults := []search.Result{ + { + ID: "id1", + Dist: 2 * 0.69, + Dims: 128, + }, + { + ID: "id2", + Dist: 2 * 0.69, + Dims: 128, + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + search. + On("VectorSearch", expectedParamsToSearch, []models.Vector{[]float32{0.8, 0.2, 0.7}}). + Return(searchResults, nil) + metrics.On("AddUsageDimensions", "BestClass", "get_graphql", "nearVector", 128) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("vector search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + search.AssertExpectations(t) + }) + + t.Run("no concept met the required certainty", func(t *testing.T) { + assert.Len(t, res, 0) + }) + }) + + t.Run("with certainty", func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + NearVector: &searchparams.NearVector{ + Vectors: []models.Vector{[]float32{0.8, 0.2, 0.7}}, + Certainty: 0.8, + }, + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + } + + searchResults := []search.Result{ + { + ID: "id1", + Dist: 2 * 0.69, + Dims: 128, + }, + { + ID: "id2", + Dist: 2 * 0.69, + Dims: 128, + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + search. + On("VectorSearch", expectedParamsToSearch, []models.Vector{[]float32{0.8, 0.2, 0.7}}). + Return(searchResults, nil) + metrics.On("AddUsageDimensions", "BestClass", "get_graphql", "nearVector", 128) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("vector search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + search.AssertExpectations(t) + }) + + t.Run("no concept met the required certainty", func(t *testing.T) { + assert.Len(t, res, 0) + }) + }) + }) + + t.Run("when two conflicting (nearVector, nearObject) near searchers are set", func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + NearVector: &searchparams.NearVector{ + Vectors: []models.Vector{[]float32{0.8, 0.2, 0.7}}, + }, + NearObject: &searchparams.NearObject{ + Beacon: "weaviate://localhost/e9c12c22-766f-4bde-b140-d4cf8fd6e041", + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + _, err := explorer.GetClass(context.Background(), params) + require.NotNil(t, err) + assert.Contains(t, err.Error(), "parameters which are conflicting") + }) + + t.Run("when no explore param is set", func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + } + + searchResults := []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + }, + }, + { + ID: "id2", + Schema: map[string]interface{}{ + "age": 200, + }, + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + search. + On("Search", expectedParamsToSearch). + Return(searchResults, nil) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("class search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + search.AssertExpectations(t) + }) + + t.Run("response must contain concepts2", func(t *testing.T) { + require.Len(t, res, 2) + assert.Equal(t, + map[string]interface{}{ + "name": "Foo", + }, res[0]) + assert.Equal(t, + map[string]interface{}{ + "age": 200, + }, res[1]) + }) + }) + + t.Run("near vector with group", func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + NearVector: &searchparams.NearVector{ + Vectors: []models.Vector{[]float32{0.8, 0.2, 0.7}}, + }, + Group: &dto.GroupParams{ + Strategy: "closest", + Force: 1.0, + }, + } + + searchResults := []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + }, + Dims: 128, + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + expectedParamsToSearch.AdditionalProperties.Vector = true + search. + On("VectorSearch", expectedParamsToSearch, []models.Vector{[]float32{0.8, 0.2, 0.7}}). + Return(searchResults, nil) + metrics.On("AddUsageDimensions", "BestClass", "get_graphql", "nearVector", 128) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("class search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + search.AssertExpectations(t) + }) + + t.Run("response must contain concepts3", func(t *testing.T) { + require.Len(t, res, 1) + }) + }) + + t.Run("when the semanticPath prop is set but cannot be", func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + AdditionalProperties: additional.Properties{ + ModuleParams: map[string]interface{}{ + "semanticPath": getDefaultParam("semanticPath"), + }, + }, + } + + searchResults := []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + }, + }, + { + ID: "id2", + Schema: map[string]interface{}{ + "age": 200, + }, + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + search. + On("Search", expectedParamsToSearch). + Return(searchResults, nil) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("error can't be nil", func(t *testing.T) { + assert.NotNil(t, err) + assert.Nil(t, res) + assert.Contains(t, err.Error(), "unknown capability: semanticPath") + }) + }) + + t.Run("when the classification prop is set", func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + AdditionalProperties: additional.Properties{ + Classification: true, + }, + } + + searchResults := []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + }, + AdditionalProperties: models.AdditionalProperties{ + "classification": nil, + }, + }, + { + ID: "id2", + Schema: map[string]interface{}{ + "age": 200, + }, + AdditionalProperties: models.AdditionalProperties{ + "classification": &additional.Classification{ + ID: "1234", + }, + }, + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + search. + On("Search", expectedParamsToSearch). + Return(searchResults, nil) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("class search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + search.AssertExpectations(t) + }) + + t.Run("response must contain concepts4", func(t *testing.T) { + require.Len(t, res, 2) + assert.Equal(t, + map[string]interface{}{ + "name": "Foo", + }, res[0]) + assert.Equal(t, + map[string]interface{}{ + "age": 200, + "_additional": map[string]interface{}{ + "classification": &additional.Classification{ + ID: "1234", + }, + }, + }, res[1]) + }) + }) + + t.Run("when the interpretation prop is set", func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + AdditionalProperties: additional.Properties{ + ModuleParams: map[string]interface{}{ + "interpretation": true, + }, + }, + } + + searchResults := []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + }, + AdditionalProperties: models.AdditionalProperties{ + "interpretation": nil, + }, + }, + { + ID: "id2", + Schema: map[string]interface{}{ + "age": 200, + }, + AdditionalProperties: models.AdditionalProperties{ + "interpretation": &Interpretation{ + Source: []*InterpretationSource{ + { + Concept: "foo", + Weight: 0.123, + Occurrence: 123, + }, + }, + }, + }, + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + search. + On("Search", expectedParamsToSearch). + Return(searchResults, nil) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("class search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + search.AssertExpectations(t) + }) + + t.Run("response must contain concepts5", func(t *testing.T) { + require.Len(t, res, 2) + assert.Equal(t, + map[string]interface{}{ + "name": "Foo", + }, res[0]) + assert.Equal(t, + map[string]interface{}{ + "age": 200, + "_additional": map[string]interface{}{ + "interpretation": &Interpretation{ + Source: []*InterpretationSource{ + { + Concept: "foo", + Weight: 0.123, + Occurrence: 123, + }, + }, + }, + }, + }, res[1]) + }) + }) + + t.Run("when the vector _additional prop is set", func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + AdditionalProperties: additional.Properties{ + Vector: true, + }, + } + + searchResults := []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + }, + Vector: []float32{0.1, -0.3}, + Dims: 128, + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + search. + On("Search", expectedParamsToSearch). + Return(searchResults, nil) + metrics.On("AddUsageDimensions", "BestClass", "get_graphql", "_additional.vector", 128) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("class search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + search.AssertExpectations(t) + }) + + t.Run("response must contain vector", func(t *testing.T) { + require.Len(t, res, 1) + assert.Equal(t, + map[string]interface{}{ + "name": "Foo", + "_additional": map[string]interface{}{ + "vector": []float32{0.1, -0.3}, + }, + }, res[0]) + }) + }) + + t.Run("when the creationTimeUnix _additional prop is set", func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + AdditionalProperties: additional.Properties{ + CreationTimeUnix: true, + }, + } + + now := time.Now().UnixNano() / int64(time.Millisecond) + + searchResults := []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + }, + Created: now, + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + search. + On("Search", expectedParamsToSearch). + Return(searchResults, nil) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("class search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + search.AssertExpectations(t) + }) + + t.Run("response must contain creationTimeUnix", func(t *testing.T) { + require.Len(t, res, 1) + assert.Equal(t, + map[string]interface{}{ + "name": "Foo", + "_additional": map[string]interface{}{ + "creationTimeUnix": now, + }, + }, res[0]) + }) + }) + + t.Run("when the lastUpdateTimeUnix _additional prop is set", func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + AdditionalProperties: additional.Properties{ + LastUpdateTimeUnix: true, + }, + } + + now := time.Now().UnixNano() / int64(time.Millisecond) + + searchResults := []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + }, + Updated: now, + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + search. + On("Search", expectedParamsToSearch). + Return(searchResults, nil) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("class search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + search.AssertExpectations(t) + }) + + t.Run("response must contain lastUpdateTimeUnix", func(t *testing.T) { + require.Len(t, res, 1) + assert.Equal(t, + map[string]interface{}{ + "name": "Foo", + "_additional": map[string]interface{}{ + "lastUpdateTimeUnix": now, + }, + }, res[0]) + }) + }) + + t.Run("when the nearestNeighbors prop is set", func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + AdditionalProperties: additional.Properties{ + ModuleParams: map[string]interface{}{ + "nearestNeighbors": true, + }, + }, + } + + searchResults := []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + }, + }, + { + ID: "id2", + Schema: map[string]interface{}{ + "name": "Bar", + }, + }, + } + + searcher := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + extender := &fakeExtender{ + returnArgs: []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + }, + AdditionalProperties: models.AdditionalProperties{ + "nearestNeighbors": &NearestNeighbors{ + Neighbors: []*NearestNeighbor{ + { + Concept: "foo", + Distance: 0.1, + }, + }, + }, + }, + }, + { + ID: "id2", + Schema: map[string]interface{}{ + "name": "Bar", + }, + AdditionalProperties: models.AdditionalProperties{ + "nearestNeighbors": &NearestNeighbors{ + Neighbors: []*NearestNeighbor{ + { + Concept: "bar", + Distance: 0.1, + }, + }, + }, + }, + }, + }, + } + explorer := NewExplorer(searcher, log, getFakeModulesProviderWithCustomExtenders(extender, nil, nil), nil, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + searcher. + On("Search", expectedParamsToSearch). + Return(searchResults, nil) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("class search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + searcher.AssertExpectations(t) + }) + + t.Run("response must contain concepts6", func(t *testing.T) { + require.Len(t, res, 2) + assert.Equal(t, + map[string]interface{}{ + "name": "Foo", + "_additional": map[string]interface{}{ + "nearestNeighbors": &NearestNeighbors{ + Neighbors: []*NearestNeighbor{ + { + Concept: "foo", + Distance: 0.1, + }, + }, + }, + }, + }, res[0]) + assert.Equal(t, + map[string]interface{}{ + "name": "Bar", + "_additional": map[string]interface{}{ + "nearestNeighbors": &NearestNeighbors{ + Neighbors: []*NearestNeighbor{ + { + Concept: "bar", + Distance: 0.1, + }, + }, + }, + }, + }, res[1]) + }) + }) + + t.Run("when the featureProjection prop is set", func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + AdditionalProperties: additional.Properties{ + ModuleParams: map[string]interface{}{ + "featureProjection": getDefaultParam("featureProjection"), + }, + }, + } + + searchResults := []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + }, + }, + { + ID: "id2", + Schema: map[string]interface{}{ + "name": "Bar", + }, + }, + } + + searcher := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + projector := &fakeProjector{ + returnArgs: []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + }, + AdditionalProperties: models.AdditionalProperties{ + "featureProjection": &FeatureProjection{ + Vector: []float32{0, 1}, + }, + }, + }, + { + ID: "id2", + Schema: map[string]interface{}{ + "name": "Bar", + }, + AdditionalProperties: models.AdditionalProperties{ + "featureProjection": &FeatureProjection{ + Vector: []float32{1, 0}, + }, + }, + }, + }, + } + explorer := NewExplorer(searcher, log, getFakeModulesProviderWithCustomExtenders(nil, projector, nil), nil, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + searcher. + On("Search", expectedParamsToSearch). + Return(searchResults, nil) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("class search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + searcher.AssertExpectations(t) + }) + + t.Run("response must contain concepts7", func(t *testing.T) { + require.Len(t, res, 2) + assert.Equal(t, + map[string]interface{}{ + "name": "Foo", + "_additional": map[string]interface{}{ + "featureProjection": &FeatureProjection{ + Vector: []float32{0, 1}, + }, + }, + }, res[0]) + assert.Equal(t, + map[string]interface{}{ + "name": "Bar", + "_additional": map[string]interface{}{ + "featureProjection": &FeatureProjection{ + Vector: []float32{1, 0}, + }, + }, + }, res[1]) + }) + }) + + t.Run("when the _additional on ref prop is set", func(t *testing.T) { + now := time.Now().UnixMilli() + params := dto.GetParams{ + ClassName: "BestClass", + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + Properties: []search.SelectProperty{ + { + Name: "ofBestRefClass", + Refs: []search.SelectClass{ + { + ClassName: "BestRefClass", + AdditionalProperties: additional.Properties{ + ID: true, + Vector: true, + CreationTimeUnix: true, + LastUpdateTimeUnix: true, + }, + }, + }, + }, + }, + } + + searchResults := []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + "ofBestRefClass": []interface{}{ + search.LocalRef{ + Class: "BestRefClass", + Fields: map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4cea", + "vector": []float32{1, 0}, + "creationTimeUnix": now, + "lastUpdateTimeUnix": now, + }, + }, + }, + }, + AdditionalProperties: models.AdditionalProperties{ + "classification": nil, + }, + }, + { + ID: "id2", + Schema: map[string]interface{}{ + "age": 200, + "ofBestRefClass": []interface{}{ + search.LocalRef{ + Class: "BestRefClass", + Fields: map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4ceb", + }, + }, + }, + }, + AdditionalProperties: models.AdditionalProperties{ + "classification": &additional.Classification{ + ID: "1234", + }, + }, + }, + } + + fakeSearch := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(fakeSearch, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + fakeSearch. + On("Search", expectedParamsToSearch). + Return(searchResults, nil) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("class search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + fakeSearch.AssertExpectations(t) + }) + + t.Run("response must contain _additional id and vector params for ref prop", func(t *testing.T) { + require.Len(t, res, 2) + assert.Equal(t, + map[string]interface{}{ + "name": "Foo", + "ofBestRefClass": []interface{}{ + search.LocalRef{ + Class: "BestRefClass", + Fields: map[string]interface{}{ + "_additional": map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4cea", + "vector": []float32{1, 0}, + "creationTimeUnix": now, + "lastUpdateTimeUnix": now, + }, + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4cea", + "vector": []float32{1, 0}, + "creationTimeUnix": now, + "lastUpdateTimeUnix": now, + }, + }, + }, + }, res[0]) + assert.Equal(t, + map[string]interface{}{ + "age": 200, + "ofBestRefClass": []interface{}{ + search.LocalRef{ + Class: "BestRefClass", + Fields: map[string]interface{}{ + "_additional": map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4ceb", + "vector": nil, + "creationTimeUnix": nil, + "lastUpdateTimeUnix": nil, + }, + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4ceb", + }, + }, + }, + "_additional": map[string]interface{}{ + "classification": &additional.Classification{ + ID: "1234", + }, + }, + }, res[1]) + }) + }) + + t.Run("when the _additional on all refs prop is set", func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + Properties: []search.SelectProperty{ + { + Name: "ofBestRefClass", + Refs: []search.SelectClass{ + { + ClassName: "BestRefClass", + AdditionalProperties: additional.Properties{ + ID: true, + }, + RefProperties: search.SelectProperties{ + search.SelectProperty{ + Name: "ofBestRefInnerClass", + Refs: []search.SelectClass{ + { + ClassName: "BestRefInnerClass", + AdditionalProperties: additional.Properties{ + ID: true, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + searchResults := []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + "ofBestRefClass": []interface{}{ + search.LocalRef{ + Class: "BestRefClass", + Fields: map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4cea", + "ofBestRefInnerClass": []interface{}{ + search.LocalRef{ + Class: "BestRefInnerClass", + Fields: map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4caa", + }, + }, + }, + }, + }, + }, + }, + AdditionalProperties: models.AdditionalProperties{ + "classification": nil, + }, + }, + { + ID: "id2", + Schema: map[string]interface{}{ + "age": 200, + "ofBestRefClass": []interface{}{ + search.LocalRef{ + Class: "BestRefClass", + Fields: map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4ceb", + "ofBestRefInnerClass": []interface{}{ + search.LocalRef{ + Class: "BestRefInnerClass", + Fields: map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4cbb", + }, + }, + }, + }, + }, + }, + }, + AdditionalProperties: models.AdditionalProperties{ + "classification": &additional.Classification{ + ID: "1234", + }, + }, + }, + } + + fakeSearch := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(fakeSearch, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + fakeSearch. + On("Search", expectedParamsToSearch). + Return(searchResults, nil) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("class search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + fakeSearch.AssertExpectations(t) + }) + + t.Run("response must contain _additional id param for ref prop", func(t *testing.T) { + require.Len(t, res, 2) + assert.Equal(t, + map[string]interface{}{ + "name": "Foo", + "ofBestRefClass": []interface{}{ + search.LocalRef{ + Class: "BestRefClass", + Fields: map[string]interface{}{ + "_additional": map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4cea", + }, + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4cea", + "ofBestRefInnerClass": []interface{}{ + search.LocalRef{ + Class: "BestRefInnerClass", + Fields: map[string]interface{}{ + "_additional": map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4caa", + }, + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4caa", + }, + }, + }, + }, + }, + }, + }, res[0]) + assert.Equal(t, + map[string]interface{}{ + "age": 200, + "ofBestRefClass": []interface{}{ + search.LocalRef{ + Class: "BestRefClass", + Fields: map[string]interface{}{ + "_additional": map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4ceb", + }, + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4ceb", + "ofBestRefInnerClass": []interface{}{ + search.LocalRef{ + Class: "BestRefInnerClass", + Fields: map[string]interface{}{ + "_additional": map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4cbb", + }, + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4cbb", + }, + }, + }, + }, + }, + }, + "_additional": map[string]interface{}{ + "classification": &additional.Classification{ + ID: "1234", + }, + }, + }, res[1]) + }) + }) + + t.Run("when the _additional on lots of refs prop is set", func(t *testing.T) { + now := time.Now().UnixMilli() + vec := []float32{1, 2, 3} + params := dto.GetParams{ + ClassName: "BestClass", + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + Properties: []search.SelectProperty{ + { + Name: "ofBestRefClass", + Refs: []search.SelectClass{ + { + ClassName: "BestRefClass", + AdditionalProperties: additional.Properties{ + ID: true, + Vector: true, + CreationTimeUnix: true, + LastUpdateTimeUnix: true, + }, + RefProperties: search.SelectProperties{ + search.SelectProperty{ + Name: "ofBestRefInnerClass", + Refs: []search.SelectClass{ + { + ClassName: "BestRefInnerClass", + AdditionalProperties: additional.Properties{ + ID: true, + Vector: true, + CreationTimeUnix: true, + LastUpdateTimeUnix: true, + }, + RefProperties: search.SelectProperties{ + search.SelectProperty{ + Name: "ofBestRefInnerInnerClass", + Refs: []search.SelectClass{ + { + ClassName: "BestRefInnerInnerClass", + AdditionalProperties: additional.Properties{ + ID: true, + Vector: true, + CreationTimeUnix: true, + LastUpdateTimeUnix: true, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + searchResults := []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + "ofBestRefClass": []interface{}{ + search.LocalRef{ + Class: "BestRefClass", + Fields: map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4cea", + "creationTimeUnix": now, + "lastUpdateTimeUnix": now, + "vector": vec, + "ofBestRefInnerClass": []interface{}{ + search.LocalRef{ + Class: "BestRefInnerClass", + Fields: map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4caa", + "creationTimeUnix": now, + "lastUpdateTimeUnix": now, + "vector": vec, + "ofBestRefInnerInnerClass": []interface{}{ + search.LocalRef{ + Class: "BestRefInnerInnerClass", + Fields: map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4aaa", + "creationTimeUnix": now, + "lastUpdateTimeUnix": now, + "vector": vec, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + AdditionalProperties: models.AdditionalProperties{ + "classification": nil, + }, + }, + { + ID: "id2", + Schema: map[string]interface{}{ + "age": 200, + "ofBestRefClass": []interface{}{ + search.LocalRef{ + Class: "BestRefClass", + Fields: map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4ceb", + "creationTimeUnix": now, + "lastUpdateTimeUnix": now, + "vector": vec, + "ofBestRefInnerClass": []interface{}{ + search.LocalRef{ + Class: "BestRefInnerClass", + Fields: map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4cbb", + "creationTimeUnix": now, + "lastUpdateTimeUnix": now, + "vector": vec, + "ofBestRefInnerInnerClass": []interface{}{ + search.LocalRef{ + Class: "BestRefInnerInnerClass", + Fields: map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4bbb", + "creationTimeUnix": now, + "lastUpdateTimeUnix": now, + "vector": vec, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + AdditionalProperties: models.AdditionalProperties{ + "classification": &additional.Classification{ + ID: "1234", + }, + }, + }, + } + + fakeSearch := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(fakeSearch, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + fakeSearch. + On("Search", expectedParamsToSearch). + Return(searchResults, nil) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("class search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + fakeSearch.AssertExpectations(t) + }) + + t.Run("response must contain _additional id param for ref prop", func(t *testing.T) { + require.Len(t, res, 2) + assert.Equal(t, + map[string]interface{}{ + "name": "Foo", + "ofBestRefClass": []interface{}{ + search.LocalRef{ + Class: "BestRefClass", + Fields: map[string]interface{}{ + "_additional": map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4cea", + "creationTimeUnix": now, + "lastUpdateTimeUnix": now, + "vector": vec, + }, + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4cea", + "creationTimeUnix": now, + "lastUpdateTimeUnix": now, + "vector": vec, + "ofBestRefInnerClass": []interface{}{ + search.LocalRef{ + Class: "BestRefInnerClass", + Fields: map[string]interface{}{ + "_additional": map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4caa", + "creationTimeUnix": now, + "lastUpdateTimeUnix": now, + "vector": vec, + }, + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4caa", + "creationTimeUnix": now, + "lastUpdateTimeUnix": now, + "vector": vec, + "ofBestRefInnerInnerClass": []interface{}{ + search.LocalRef{ + Class: "BestRefInnerInnerClass", + Fields: map[string]interface{}{ + "_additional": map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4aaa", + "creationTimeUnix": now, + "lastUpdateTimeUnix": now, + "vector": vec, + }, + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4aaa", + "creationTimeUnix": now, + "lastUpdateTimeUnix": now, + "vector": vec, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, res[0]) + assert.Equal(t, + map[string]interface{}{ + "age": 200, + "ofBestRefClass": []interface{}{ + search.LocalRef{ + Class: "BestRefClass", + Fields: map[string]interface{}{ + "_additional": map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4ceb", + "creationTimeUnix": now, + "lastUpdateTimeUnix": now, + "vector": vec, + }, + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4ceb", + "creationTimeUnix": now, + "lastUpdateTimeUnix": now, + "vector": vec, + "ofBestRefInnerClass": []interface{}{ + search.LocalRef{ + Class: "BestRefInnerClass", + Fields: map[string]interface{}{ + "_additional": map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4cbb", + "creationTimeUnix": now, + "lastUpdateTimeUnix": now, + "vector": vec, + }, + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4cbb", + "creationTimeUnix": now, + "lastUpdateTimeUnix": now, + "vector": vec, + "ofBestRefInnerInnerClass": []interface{}{ + search.LocalRef{ + Class: "BestRefInnerInnerClass", + Fields: map[string]interface{}{ + "_additional": map[string]interface{}{ + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4bbb", + "creationTimeUnix": now, + "lastUpdateTimeUnix": now, + "vector": vec, + }, + "id": "2d68456c-73b4-4cfc-a6dc-718efc5b4bbb", + "creationTimeUnix": now, + "lastUpdateTimeUnix": now, + "vector": vec, + }, + }, + }, + }, + }, + }, + }, + }, + }, + "_additional": map[string]interface{}{ + "classification": &additional.Classification{ + ID: "1234", + }, + }, + }, res[1]) + }) + }) + + t.Run("when the almost all _additional props set", func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + AdditionalProperties: additional.Properties{ + ID: true, + Classification: true, + ModuleParams: map[string]interface{}{ + "interpretation": true, + "nearestNeighbors": true, + }, + }, + } + + searchResults := []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + }, + AdditionalProperties: models.AdditionalProperties{ + "classification": &additional.Classification{ + ID: "1234", + }, + "nearestNeighbors": &NearestNeighbors{ + Neighbors: []*NearestNeighbor{ + { + Concept: "foo", + Distance: 0.1, + }, + }, + }, + }, + }, + { + ID: "id2", + Schema: map[string]interface{}{ + "name": "Bar", + }, + AdditionalProperties: models.AdditionalProperties{ + "classification": &additional.Classification{ + ID: "5678", + }, + "nearestNeighbors": &NearestNeighbors{ + Neighbors: []*NearestNeighbor{ + { + Concept: "bar", + Distance: 0.1, + }, + }, + }, + }, + }, + } + + searcher := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + extender := &fakeExtender{ + returnArgs: []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + }, + AdditionalProperties: models.AdditionalProperties{ + "classification": &additional.Classification{ + ID: "1234", + }, + "interpretation": &Interpretation{ + Source: []*InterpretationSource{ + { + Concept: "foo", + Weight: 0.123, + Occurrence: 123, + }, + }, + }, + "nearestNeighbors": &NearestNeighbors{ + Neighbors: []*NearestNeighbor{ + { + Concept: "foo", + Distance: 0.1, + }, + }, + }, + }, + }, + { + ID: "id2", + Schema: map[string]interface{}{ + "name": "Bar", + }, + AdditionalProperties: models.AdditionalProperties{ + "classification": &additional.Classification{ + ID: "5678", + }, + "interpretation": &Interpretation{ + Source: []*InterpretationSource{ + { + Concept: "bar", + Weight: 0.456, + Occurrence: 456, + }, + }, + }, + "nearestNeighbors": &NearestNeighbors{ + Neighbors: []*NearestNeighbor{ + { + Concept: "bar", + Distance: 0.1, + }, + }, + }, + }, + }, + }, + } + explorer := NewExplorer(searcher, log, getFakeModulesProviderWithCustomExtenders(extender, nil, nil), nil, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + searcher. + On("Search", expectedParamsToSearch). + Return(searchResults, nil) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("class search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + searcher.AssertExpectations(t) + }) + + t.Run("response must contain concepts8", func(t *testing.T) { + require.Len(t, res, 2) + assert.Equal(t, + map[string]interface{}{ + "name": "Foo", + "_additional": map[string]interface{}{ + "id": strfmt.UUID("id1"), + "classification": &additional.Classification{ + ID: "1234", + }, + "nearestNeighbors": &NearestNeighbors{ + Neighbors: []*NearestNeighbor{ + { + Concept: "foo", + Distance: 0.1, + }, + }, + }, + "interpretation": &Interpretation{ + Source: []*InterpretationSource{ + { + Concept: "foo", + Weight: 0.123, + Occurrence: 123, + }, + }, + }, + }, + }, res[0]) + assert.Equal(t, + map[string]interface{}{ + "name": "Bar", + "_additional": map[string]interface{}{ + "id": strfmt.UUID("id2"), + "classification": &additional.Classification{ + ID: "5678", + }, + "nearestNeighbors": &NearestNeighbors{ + Neighbors: []*NearestNeighbor{ + { + Concept: "bar", + Distance: 0.1, + }, + }, + }, + "interpretation": &Interpretation{ + Source: []*InterpretationSource{ + { + Concept: "bar", + Weight: 0.456, + Occurrence: 456, + }, + }, + }, + }, + }, res[1]) + }) + }) +} + +func Test_Explorer_GetClass_With_Modules(t *testing.T) { + t.Run("when an explore param is set for nearCustomText", func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + ModuleParams: map[string]interface{}{ + "nearCustomText": extractNearCustomTextParam(map[string]interface{}{ + "concepts": []interface{}{"foo"}, + }), + }, + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + } + + searchResults := []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + }, + Dims: 128, + }, + { + ID: "id2", + Schema: map[string]interface{}{ + "age": 200, + }, + Dims: 128, + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + search. + On("VectorSearch", expectedParamsToSearch, []models.Vector{[]float32{1, 2, 3}}). + Return(searchResults, nil) + metrics.On("AddUsageDimensions", "BestClass", "get_graphql", "nearCustomText", 128) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("vector search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + search.AssertExpectations(t) + }) + + t.Run("response must contain concepts9", func(t *testing.T) { + require.Len(t, res, 2) + assert.Equal(t, + map[string]interface{}{ + "name": "Foo", + }, res[0]) + assert.Equal(t, + map[string]interface{}{ + "age": 200, + }, res[1]) + }) + }) + + t.Run("when an explore param is set for nearCustomText and the required distance not met", + func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + ModuleParams: map[string]interface{}{ + "nearCustomText": extractNearCustomTextParam(map[string]interface{}{ + "concepts": []interface{}{"foo"}, + "distance": float64(0.2), + }), + }, + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + } + + searchResults := []search.Result{ + { + ID: "id1", + Dist: 2 * 0.69, + Dims: 128, + }, + { + ID: "id2", + Dist: 2 * 0.69, + Dims: 128, + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + search. + On("VectorSearch", expectedParamsToSearch, []models.Vector{[]float32{1, 2, 3}}). + Return(searchResults, nil) + metrics.On("AddUsageDimensions", "BestClass", "get_graphql", "nearCustomText", 128) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("vector search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + search.AssertExpectations(t) + }) + + t.Run("no object met the required distance", func(t *testing.T) { + assert.Len(t, res, 0) + }) + }) + + t.Run("when an explore param is set for nearCustomText and the required certainty not met", + func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + ModuleParams: map[string]interface{}{ + "nearCustomText": extractNearCustomTextParam(map[string]interface{}{ + "concepts": []interface{}{"foo"}, + "certainty": float64(0.8), + }), + }, + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + } + + searchResults := []search.Result{ + { + ID: "id1", + Dist: 2 * 0.69, + Dims: 128, + }, + { + ID: "id2", + Dist: 2 * 0.69, + Dims: 128, + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + search. + On("VectorSearch", expectedParamsToSearch, []models.Vector{[]float32{1, 2, 3}}). + Return(searchResults, nil) + metrics.On("AddUsageDimensions", "BestClass", "get_graphql", "nearCustomText", 128) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("vector search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + search.AssertExpectations(t) + }) + + t.Run("no object met the required certainty", func(t *testing.T) { + assert.Len(t, res, 0) + }) + }) + + t.Run("when two conflicting (nearVector, nearCustomText) near searchers are set", func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + NearVector: &searchparams.NearVector{ + Vectors: []models.Vector{[]float32{0.8, 0.2, 0.7}}, + }, + ModuleParams: map[string]interface{}{ + "nearCustomText": extractNearCustomTextParam(map[string]interface{}{ + "concepts": []interface{}{"foo"}, + }), + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + _, err := explorer.GetClass(context.Background(), params) + require.NotNil(t, err) + assert.Contains(t, err.Error(), "parameters which are conflicting") + }) + + t.Run("when two conflicting (nearCustomText, nearObject) near searchers are set", func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + NearObject: &searchparams.NearObject{ + Beacon: "weaviate://localhost/e9c12c22-766f-4bde-b140-d4cf8fd6e041", + }, + ModuleParams: map[string]interface{}{ + "nearCustomText": extractNearCustomTextParam(map[string]interface{}{ + "concepts": []interface{}{"foo"}, + }), + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + _, err := explorer.GetClass(context.Background(), params) + require.NotNil(t, err) + assert.Contains(t, err.Error(), "parameters which are conflicting") + }) + + t.Run("when three conflicting (nearCustomText, nearVector, nearObject) near searchers are set", func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + NearVector: &searchparams.NearVector{ + Vectors: []models.Vector{[]float32{0.8, 0.2, 0.7}}, + }, + NearObject: &searchparams.NearObject{ + Beacon: "weaviate://localhost/e9c12c22-766f-4bde-b140-d4cf8fd6e041", + }, + ModuleParams: map[string]interface{}{ + "nearCustomText": extractNearCustomTextParam(map[string]interface{}{ + "concepts": []interface{}{"foo"}, + }), + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + _, err := explorer.GetClass(context.Background(), params) + require.NotNil(t, err) + assert.Contains(t, err.Error(), "parameters which are conflicting") + }) + + t.Run("when nearCustomText.moveTo has no concepts and objects defined", func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + ModuleParams: map[string]interface{}{ + "nearCustomText": extractNearCustomTextParam(map[string]interface{}{ + "concepts": []interface{}{"foo"}, + "moveTo": map[string]interface{}{ + "force": float64(0.1), + }, + }), + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + _, err := explorer.GetClass(context.Background(), params) + require.NotNil(t, err) + assert.Contains(t, err.Error(), "needs to have defined either 'concepts' or 'objects' fields") + }) + + t.Run("when nearCustomText.moveAwayFrom has no concepts and objects defined", func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + ModuleParams: map[string]interface{}{ + "nearCustomText": extractNearCustomTextParam(map[string]interface{}{ + "concepts": []interface{}{"foo"}, + "moveAwayFrom": map[string]interface{}{ + "force": float64(0.1), + }, + }), + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + _, err := explorer.GetClass(context.Background(), params) + require.NotNil(t, err) + assert.Contains(t, err.Error(), "needs to have defined either 'concepts' or 'objects' fields") + }) + + t.Run("when the distance prop is set", func(t *testing.T) { + params := dto.GetParams{ + Filters: nil, + ClassName: "BestClass", + Pagination: &filters.Pagination{Limit: 100}, + AdditionalProperties: additional.Properties{ + Distance: true, + }, + ModuleParams: map[string]interface{}{ + "nearCustomText": extractNearCustomTextParam(map[string]interface{}{ + "concepts": []interface{}{"foobar"}, + "limit": 100, + "distance": float64(1.38), + }), + }, + } + + searchResults := []search.Result{ + { + ID: "id2", + Schema: map[string]interface{}{ + "age": 200, + }, + Vector: []float32{0.5, 1.5, 0.0}, + Dist: 2 * 0.69, + Dims: 128, + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + search. + On("VectorSearch", expectedParamsToSearch, []models.Vector{[]float32{1.0, 2.0, 3.0}}). + Return(searchResults, nil) + metrics.On("AddUsageDimensions", "BestClass", "get_graphql", "nearCustomText", 128) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("class search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + search.AssertExpectations(t) + }) + + t.Run("response must contain concepts10", func(t *testing.T) { + require.Len(t, res, 1) + + resMap := res[0].(map[string]interface{}) + assert.Equal(t, 2, len(resMap)) + assert.Contains(t, resMap, "age") + assert.Equal(t, 200, resMap["age"]) + additionalMap := resMap["_additional"] + assert.Contains(t, additionalMap, "distance") + assert.InEpsilon(t, 1.38, additionalMap.(map[string]interface{})["distance"].(float32), 0.000001) + }) + }) + + t.Run("when the certainty prop is set", func(t *testing.T) { + params := dto.GetParams{ + Filters: nil, + ClassName: "BestClass", + Pagination: &filters.Pagination{Limit: 100}, + AdditionalProperties: additional.Properties{ + Certainty: true, + }, + ModuleParams: map[string]interface{}{ + "nearCustomText": extractNearCustomTextParam(map[string]interface{}{ + "concepts": []interface{}{"foobar"}, + "limit": 100, + "certainty": float64(0.1), + }), + }, + } + + searchResults := []search.Result{ + { + ID: "id2", + Schema: map[string]interface{}{ + "age": 200, + }, + Vector: []float32{0.5, 1.5, 0.0}, + Dist: 2 * 0.69, + Dims: 128, + }, + } + + search := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + schemaGetter := newFakeSchemaGetter("BestClass") + schemaGetter.SetVectorIndexConfig(hnsw.UserConfig{Distance: "cosine"}) + explorer.schemaGetter = schemaGetter + expectedParamsToSearch := params + search. + On("VectorSearch", expectedParamsToSearch, []models.Vector{[]float32{1.0, 2.0, 3.0}}). + Return(searchResults, nil) + metrics.On("AddUsageDimensions", "BestClass", "get_graphql", "nearCustomText", 128) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("class search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + search.AssertExpectations(t) + }) + + t.Run("response must contain concepts11", func(t *testing.T) { + require.Len(t, res, 1) + + resMap := res[0].(map[string]interface{}) + assert.Equal(t, 2, len(resMap)) + assert.Contains(t, resMap, "age") + assert.Equal(t, 200, resMap["age"]) + additionalMap := resMap["_additional"] + assert.Contains(t, additionalMap, "certainty") + // Certainty is fixed to 0.69 in this mock + assert.InEpsilon(t, 0.31, additionalMap.(map[string]interface{})["certainty"], 0.000001) + }) + }) + + t.Run("when the semanticPath prop is set", func(t *testing.T) { + params := dto.GetParams{ + ClassName: "BestClass", + Pagination: &filters.Pagination{Limit: 100}, + Filters: nil, + AdditionalProperties: additional.Properties{ + ModuleParams: map[string]interface{}{ + "semanticPath": getDefaultParam("semanticPath"), + }, + }, + ModuleParams: map[string]interface{}{ + "nearCustomText": extractNearCustomTextParam(map[string]interface{}{ + "concepts": []interface{}{"foobar"}, + }), + }, + } + + searchResults := []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + }, + }, + { + ID: "id2", + Schema: map[string]interface{}{ + "name": "Bar", + }, + }, + } + + searcher := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + pathBuilder := &fakePathBuilder{ + returnArgs: []search.Result{ + { + ID: "id1", + Dims: 128, + Schema: map[string]interface{}{ + "name": "Foo", + }, + AdditionalProperties: models.AdditionalProperties{ + "semanticPath": &SemanticPath{ + Path: []*SemanticPathElement{ + { + Concept: "pathelem1", + DistanceToQuery: 0, + DistanceToResult: 2.1, + DistanceToPrevious: nil, + DistanceToNext: ptFloat32(0.5), + }, + { + Concept: "pathelem2", + DistanceToQuery: 2.1, + DistanceToResult: 0, + DistanceToPrevious: ptFloat32(0.5), + DistanceToNext: nil, + }, + }, + }, + }, + }, + { + ID: "id2", + Dims: 128, + Schema: map[string]interface{}{ + "name": "Bar", + }, + AdditionalProperties: models.AdditionalProperties{ + "semanticPath": &SemanticPath{ + Path: []*SemanticPathElement{ + { + Concept: "pathelem1", + DistanceToQuery: 0, + DistanceToResult: 2.1, + DistanceToPrevious: nil, + DistanceToNext: ptFloat32(0.5), + }, + { + Concept: "pathelem2", + DistanceToQuery: 2.1, + DistanceToResult: 0, + DistanceToPrevious: ptFloat32(0.5), + DistanceToNext: nil, + }, + }, + }, + }, + }, + }, + } + metrics := &fakeMetrics{} + explorer := NewExplorer(searcher, log, getFakeModulesProviderWithCustomExtenders(nil, nil, pathBuilder), metrics, defaultConfig) + explorer.SetSchemaGetter(&fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{Classes: []*models.Class{ + {Class: "BestClass"}, + }}}, + }) + expectedParamsToSearch := params + expectedParamsToSearch.AdditionalProperties.Vector = true // any custom additional params will trigger vector + searcher. + On("VectorSearch", expectedParamsToSearch, []models.Vector{[]float32{1, 2, 3}}). + Return(searchResults, nil) + metrics.On("AddUsageDimensions", "BestClass", "get_graphql", "nearCustomText", 128) + + res, err := explorer.GetClass(context.Background(), params) + + t.Run("class search must be called with right params", func(t *testing.T) { + assert.Nil(t, err) + searcher.AssertExpectations(t) + }) + + t.Run("response must contain concepts 12", func(t *testing.T) { + require.Len(t, res, 2) + assert.Equal(t, + map[string]interface{}{ + "name": "Foo", + "_additional": map[string]interface{}{ + "semanticPath": &SemanticPath{ + Path: []*SemanticPathElement{ + { + Concept: "pathelem1", + DistanceToQuery: 0, + DistanceToResult: 2.1, + DistanceToPrevious: nil, + DistanceToNext: ptFloat32(0.5), + }, + { + Concept: "pathelem2", + DistanceToQuery: 2.1, + DistanceToResult: 0, + DistanceToPrevious: ptFloat32(0.5), + DistanceToNext: nil, + }, + }, + }, + }, + }, res[0]) + assert.Equal(t, + map[string]interface{}{ + "name": "Bar", + "_additional": map[string]interface{}{ + "semanticPath": &SemanticPath{ + Path: []*SemanticPathElement{ + { + Concept: "pathelem1", + DistanceToQuery: 0, + DistanceToResult: 2.1, + DistanceToPrevious: nil, + DistanceToNext: ptFloat32(0.5), + }, + { + Concept: "pathelem2", + DistanceToQuery: 2.1, + DistanceToResult: 0, + DistanceToPrevious: ptFloat32(0.5), + DistanceToNext: nil, + }, + }, + }, + }, + }, res[1]) + }) + }) +} + +func ptFloat32(in float32) *float32 { + return &in +} + +type fakeModulesProvider struct { + customC11yModule *fakeText2vecContextionaryModule +} + +func (p *fakeModulesProvider) VectorFromInput(ctx context.Context, className, input, targetVector string) ([]float32, error) { + panic("not implemented") +} + +func (p *fakeModulesProvider) MultiVectorFromInput(ctx context.Context, className, input, targetVector string) ([][]float32, error) { + panic("not implemented") +} + +func (p *fakeModulesProvider) IsTargetVectorMultiVector(className, targetVector string) (bool, error) { + // all target vectors are regular vectors + return false, nil +} + +func (p *fakeModulesProvider) VectorFromSearchParam(ctx context.Context, className, targetVector, tenant, param string, params interface{}, + findVectorFn modulecapabilities.FindVectorFn[[]float32], +) ([]float32, error) { + txt2vec := p.getFakeT2Vec() + vectorForParams := txt2vec.VectorSearches()["nearCustomText"] + vec, err := vectorForParams.VectorForParams(ctx, params, "", findVectorFn, nil) + return vec, err +} + +func (p *fakeModulesProvider) MultiVectorFromSearchParam(ctx context.Context, className, targetVector, tenant, param string, params interface{}, + findVectorFn modulecapabilities.FindVectorFn[[][]float32], +) ([][]float32, error) { + panic("not implemented") +} + +func (p *fakeModulesProvider) TargetsFromSearchParam(className string, params interface{}) ([]string, error) { + targetVector := "" + return []string{targetVector}, nil +} + +func (p *fakeModulesProvider) CrossClassVectorFromSearchParam(ctx context.Context, + param string, params interface{}, + findVectorFn modulecapabilities.FindVectorFn[[]float32], +) ([]float32, string, error) { + txt2vec := p.getFakeT2Vec() + vectorForParams := txt2vec.VectorSearches()["nearCustomText"] + targetVector := "" + vec, err := vectorForParams.VectorForParams(ctx, params, "", findVectorFn, nil) + return vec, targetVector, err +} + +func (p *fakeModulesProvider) MultiCrossClassVectorFromSearchParam(ctx context.Context, + param string, params interface{}, + findVectorFn modulecapabilities.FindVectorFn[[][]float32], +) ([][]float32, string, error) { + panic("not implemented") +} + +func (p *fakeModulesProvider) CrossClassValidateSearchParam(name string, value interface{}) error { + return p.ValidateSearchParam(name, value, "") +} + +func (p *fakeModulesProvider) ValidateSearchParam(name string, value interface{}, className string) error { + txt2vec := p.getFakeT2Vec() + arg := txt2vec.Arguments()["nearCustomText"] + return arg.ValidateFunction(value) +} + +func (p *fakeModulesProvider) GetExploreAdditionalExtend(ctx context.Context, in []search.Result, + moduleParams map[string]interface{}, searchVector models.Vector, + argumentModuleParams map[string]interface{}, +) ([]search.Result, error) { + return p.additionalExtend(ctx, in, moduleParams, searchVector, "ExploreGet") +} + +func (p *fakeModulesProvider) ListExploreAdditionalExtend(ctx context.Context, in []search.Result, + moduleParams map[string]interface{}, + argumentModuleParams map[string]interface{}, +) ([]search.Result, error) { + return p.additionalExtend(ctx, in, moduleParams, nil, "ExploreList") +} + +func (p *fakeModulesProvider) additionalExtend(ctx context.Context, + in search.Results, moduleParams map[string]interface{}, + searchVector models.Vector, capability string, +) (search.Results, error) { + txt2vec := p.getFakeT2Vec() + if additionalProperties := txt2vec.AdditionalProperties(); len(additionalProperties) > 0 { + for name, value := range moduleParams { + additionalPropertyFn := p.getAdditionalPropertyFn(additionalProperties[name], capability) + if additionalPropertyFn != nil && value != nil { + searchValue := value + if searchVectorValue, ok := value.(modulecapabilities.AdditionalPropertyWithSearchVector[[]float32]); ok { + searchVectorValue.SetSearchVector(searchVector.([]float32)) + searchValue = searchVectorValue + } + resArray, err := additionalPropertyFn(ctx, in, searchValue, nil, nil, nil) + if err != nil { + return nil, err + } + in = resArray + } else { + return nil, errors.Errorf("unknown capability: %s", name) + } + } + } + return in, nil +} + +func (p *fakeModulesProvider) getAdditionalPropertyFn(additionalProperty modulecapabilities.AdditionalProperty, + capability string, +) modulecapabilities.AdditionalPropertyFn { + switch capability { + case "ObjectGet": + return additionalProperty.SearchFunctions.ObjectGet + case "ObjectList": + return additionalProperty.SearchFunctions.ObjectList + case "ExploreGet": + return additionalProperty.SearchFunctions.ExploreGet + case "ExploreList": + return additionalProperty.SearchFunctions.ExploreList + default: + return nil + } +} + +func (p *fakeModulesProvider) getFakeT2Vec() *fakeText2vecContextionaryModule { + if p.customC11yModule != nil { + return p.customC11yModule + } + return &fakeText2vecContextionaryModule{} +} + +func extractNearCustomTextParam(param map[string]interface{}) interface{} { + txt2vec := &fakeText2vecContextionaryModule{} + argument := txt2vec.Arguments()["nearCustomText"] + params, _, _ := argument.ExtractFunction(param) + return params +} + +func getDefaultParam(name string) interface{} { + switch name { + case "featureProjection": + return &fakeProjectorParams{} + case "semanticPath": + return &pathBuilderParams{} + case "nearestNeighbors": + return true + default: + return nil + } +} + +func getFakeModulesProviderWithCustomExtenders( + customExtender *fakeExtender, + customProjector *fakeProjector, + customPathBuilder *fakePathBuilder, +) ModulesProvider { + return &fakeModulesProvider{ + newFakeText2vecContextionaryModuleWithCustomExtender(customExtender, customProjector, customPathBuilder), + } +} + +func getFakeModulesProvider() ModulesProvider { + return &fakeModulesProvider{} +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/explorer_validate_scroll.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/explorer_validate_scroll.go new file mode 100644 index 0000000000000000000000000000000000000000..c1876edeb571cf93f13c4e43a3c33c6401609c38 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/explorer_validate_scroll.go @@ -0,0 +1,34 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +import ( + "fmt" + + "github.com/weaviate/weaviate/entities/dto" + "github.com/weaviate/weaviate/entities/filters" + "github.com/weaviate/weaviate/entities/schema" +) + +func (e *Explorer) validateCursor(params dto.GetParams) error { + if params.Cursor != nil { + if params.Group != nil || params.HybridSearch != nil || params.KeywordRanking != nil || + params.NearObject != nil || params.NearVector != nil || len(params.ModuleParams) > 0 { + return fmt.Errorf("other params cannot be set with after and limit parameters") + } + if err := filters.ValidateCursor(schema.ClassName(params.ClassName), + params.Cursor, params.Pagination.Offset, params.Filters, params.Sort); err != nil { + return err + } + } + return nil +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/explorer_validate_sort.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/explorer_validate_sort.go new file mode 100644 index 0000000000000000000000000000000000000000..53577aae0494b7bd7e1b5ecb63267b824239baeb --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/explorer_validate_sort.go @@ -0,0 +1,24 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +import ( + "github.com/weaviate/weaviate/entities/filters" + "github.com/weaviate/weaviate/entities/schema" +) + +func (e *Explorer) validateSort(className string, sort []filters.Sort) error { + if len(sort) == 0 { + return nil + } + return filters.ValidateSort(e.schemaGetter.ReadOnlyClass, schema.ClassName(className), sort) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/explorer_validate_sort_test.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/explorer_validate_sort_test.go new file mode 100644 index 0000000000000000000000000000000000000000..c619a5765b828e4a0d49e1033a02d94a1291475e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/explorer_validate_sort_test.go @@ -0,0 +1,404 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +import ( + "context" + "errors" + "testing" + + testLogger "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/dto" + "github.com/weaviate/weaviate/entities/filters" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/entities/searchparams" +) + +func Test_Explorer_GetClass_WithSort(t *testing.T) { + type testData struct { + name string + params dto.GetParams + expectedError error + } + + oneSortFilter := []testData{ + { + name: "invalid order parameter", + params: dto.GetParams{ + ClassName: "ClassOne", + Sort: []filters.Sort{{Path: nil, Order: "asce"}}, + }, + expectedError: errors.New(`invalid 'sort' parameter: sort parameter at position 0: ` + + `invalid order parameter, possible values are: ["asc", "desc"] not: "asce"`), + }, + { + name: "empty path", + params: dto.GetParams{ + ClassName: "ClassOne", + Sort: []filters.Sort{{Path: nil, Order: "asc"}}, + }, + expectedError: errors.New("invalid 'sort' parameter: sort parameter at position 0: " + + "path parameter cannot be empty"), + }, + { + name: "non-existent class", + params: dto.GetParams{ + ClassName: "NonExistentClass", + Sort: []filters.Sort{{Path: []string{"property"}, Order: "asc"}}, + }, + expectedError: errors.New("invalid 'sort' parameter: sort parameter at position 0: " + + "class \"NonExistentClass\" does not exist in schema"), + }, + { + name: "non-existent property in class", + params: dto.GetParams{ + ClassName: "ClassOne", + Sort: []filters.Sort{{Path: []string{"nonexistentproperty"}, Order: "asc"}}, + }, + expectedError: errors.New("invalid 'sort' parameter: sort parameter at position 0: " + + "no such prop with name 'nonexistentproperty' found in class 'ClassOne' in the schema. " + + "Check your schema files for which properties in this class are available"), + }, + { + name: "reference property in class", + params: dto.GetParams{ + ClassName: "ClassOne", + Sort: []filters.Sort{{Path: []string{"ref_prop"}, Order: "asc"}}, + }, + expectedError: errors.New("invalid 'sort' parameter: sort parameter at position 0: " + + "sorting by reference not supported, " + + "property \"ref_prop\" is a ref prop to the class \"ClassTwo\""), + }, + { + name: "reference property path", + params: dto.GetParams{ + ClassName: "ClassOne", + Sort: []filters.Sort{{Path: []string{"ref", "prop"}, Order: "asc"}}, + }, + expectedError: errors.New("invalid 'sort' parameter: sort parameter at position 0: " + + "sorting by reference not supported, " + + "path must have exactly one argument"), + }, + { + name: "invalid order parameter", + params: dto.GetParams{ + ClassName: "ClassOne", + Sort: []filters.Sort{{Path: nil, Order: "asce"}}, + }, + expectedError: errors.New(`invalid 'sort' parameter: sort parameter at position 0: ` + + `invalid order parameter, possible values are: ["asc", "desc"] not: "asce"`), + }, + } + + twoSortFilters := []testData{ + { + name: "invalid order parameter", + params: dto.GetParams{ + ClassName: "ClassOne", + Sort: []filters.Sort{ + {Path: nil, Order: "asce"}, + {Path: nil, Order: "desce"}, + }, + }, + expectedError: errors.New(`invalid 'sort' parameter: ` + + `sort parameter at position 0: ` + + `invalid order parameter, possible values are: ["asc", "desc"] not: "asce", ` + + `sort parameter at position 1: ` + + `invalid order parameter, possible values are: ["asc", "desc"] not: "desce"`), + }, + { + name: "empty path", + params: dto.GetParams{ + ClassName: "ClassOne", + Sort: []filters.Sort{ + {Path: nil, Order: "asc"}, + {Path: []string{}, Order: "asc"}, + }, + }, + expectedError: errors.New("invalid 'sort' parameter: " + + "sort parameter at position 0: path parameter cannot be empty, " + + "sort parameter at position 1: path parameter cannot be empty"), + }, + { + name: "non-existent class", + params: dto.GetParams{ + ClassName: "NonExistentClass", + Sort: []filters.Sort{ + {Path: []string{"property"}, Order: "asc"}, + {Path: []string{"property"}, Order: "asc"}, + }, + }, + expectedError: errors.New("invalid 'sort' parameter: " + + "sort parameter at position 0: " + + "class \"NonExistentClass\" does not exist in schema, " + + "sort parameter at position 1: " + + "class \"NonExistentClass\" does not exist in schema"), + }, + { + name: "non-existent property in class", + params: dto.GetParams{ + ClassName: "ClassOne", + Sort: []filters.Sort{ + {Path: []string{"nonexistentproperty1"}, Order: "asc"}, + {Path: []string{"nonexistentproperty2"}, Order: "asc"}, + }, + }, + expectedError: errors.New("invalid 'sort' parameter: " + + "sort parameter at position 0: " + + "no such prop with name 'nonexistentproperty1' found in class 'ClassOne' in the schema. " + + "Check your schema files for which properties in this class are available, " + + "sort parameter at position 1: " + + "no such prop with name 'nonexistentproperty2' found in class 'ClassOne' in the schema. " + + "Check your schema files for which properties in this class are available"), + }, + { + name: "reference property in class", + params: dto.GetParams{ + ClassName: "ClassOne", + Sort: []filters.Sort{ + {Path: []string{"ref_prop"}, Order: "asc"}, + {Path: []string{"ref_prop"}, Order: "desc"}, + }, + }, + expectedError: errors.New("invalid 'sort' parameter: " + + "sort parameter at position 0: " + + "sorting by reference not supported, " + + "property \"ref_prop\" is a ref prop to the class \"ClassTwo\", " + + "sort parameter at position 1: " + + "sorting by reference not supported, " + + "property \"ref_prop\" is a ref prop to the class \"ClassTwo\""), + }, + { + name: "reference property path", + params: dto.GetParams{ + ClassName: "ClassOne", + Sort: []filters.Sort{ + {Path: []string{"ref", "prop"}, Order: "asc"}, + {Path: []string{"ref", "prop"}, Order: "asc"}, + }, + }, + expectedError: errors.New("invalid 'sort' parameter: " + + "sort parameter at position 0: " + + "sorting by reference not supported, " + + "path must have exactly one argument, " + + "sort parameter at position 1: " + + "sorting by reference not supported, " + + "path must have exactly one argument"), + }, + { + name: "reference properties path", + params: dto.GetParams{ + ClassName: "ClassOne", + Sort: []filters.Sort{ + {Path: []string{"ref_prop"}, Order: "asc"}, + {Path: []string{"ref", "prop"}, Order: "asc"}, + }, + }, + expectedError: errors.New("invalid 'sort' parameter: " + + "sort parameter at position 0: " + + "sorting by reference not supported, " + + "property \"ref_prop\" is a ref prop to the class \"ClassTwo\", " + + "sort parameter at position 1: " + + "sorting by reference not supported, " + + "path must have exactly one argument"), + }, + { + name: "reference properties path", + params: dto.GetParams{ + ClassName: "ClassOne", + Sort: []filters.Sort{ + {Path: []string{"ref_prop"}, Order: "asc"}, + {Path: []string{"ref", "prop"}, Order: "asc"}, + }, + }, + expectedError: errors.New("invalid 'sort' parameter: " + + "sort parameter at position 0: " + + "sorting by reference not supported, " + + "property \"ref_prop\" is a ref prop to the class \"ClassTwo\", " + + "sort parameter at position 1: " + + "sorting by reference not supported, " + + "path must have exactly one argument"), + }, + } + + oneOfTwoSortFilters := []testData{ + { + name: "invalid order parameter", + params: dto.GetParams{ + ClassName: "ClassOne", + Sort: []filters.Sort{ + {Path: []string{"text_prop"}, Order: "asc"}, + {Path: nil, Order: "desce"}, + }, + }, + expectedError: errors.New(`invalid 'sort' parameter: ` + + `sort parameter at position 1: ` + + `invalid order parameter, possible values are: ["asc", "desc"] not: "desce"`), + }, + { + name: "empty path", + params: dto.GetParams{ + ClassName: "ClassOne", + Sort: []filters.Sort{ + {Path: []string{"text_prop"}, Order: "asc"}, + {Path: []string{}, Order: "asc"}, + }, + }, + expectedError: errors.New("invalid 'sort' parameter: " + + "sort parameter at position 1: path parameter cannot be empty"), + }, + { + name: "non-existent property in class", + params: dto.GetParams{ + ClassName: "ClassOne", + Sort: []filters.Sort{ + {Path: []string{"text_prop"}, Order: "asc"}, + {Path: []string{"nonexistentproperty2"}, Order: "asc"}, + }, + }, + expectedError: errors.New("invalid 'sort' parameter: " + + "sort parameter at position 1: " + + "no such prop with name 'nonexistentproperty2' found in class 'ClassOne' in the schema. " + + "Check your schema files for which properties in this class are available"), + }, + { + name: "reference property in class", + params: dto.GetParams{ + ClassName: "ClassOne", + Sort: []filters.Sort{ + {Path: []string{"text_prop"}, Order: "asc"}, + {Path: []string{"ref_prop"}, Order: "desc"}, + }, + }, + expectedError: errors.New("invalid 'sort' parameter: " + + "sort parameter at position 1: " + + "sorting by reference not supported, " + + "property \"ref_prop\" is a ref prop to the class \"ClassTwo\""), + }, + { + name: "reference property path", + params: dto.GetParams{ + ClassName: "ClassOne", + Sort: []filters.Sort{ + {Path: []string{"text_prop"}, Order: "asc"}, + {Path: []string{"ref", "prop"}, Order: "asc"}, + }, + }, + expectedError: errors.New("invalid 'sort' parameter: " + + "sort parameter at position 1: " + + "sorting by reference not supported, " + + "path must have exactly one argument"), + }, + { + name: "reference properties path", + params: dto.GetParams{ + ClassName: "ClassOne", + Sort: []filters.Sort{ + {Path: []string{"text_prop"}, Order: "asc"}, + {Path: []string{"ref_prop"}, Order: "asc"}, + {Path: []string{"ref", "prop"}, Order: "asc"}, + }, + }, + expectedError: errors.New("invalid 'sort' parameter: " + + "sort parameter at position 1: " + + "sorting by reference not supported, " + + "property \"ref_prop\" is a ref prop to the class \"ClassTwo\", " + + "sort parameter at position 2: " + + "sorting by reference not supported, " + + "path must have exactly one argument"), + }, + } + + properSortFilters := []testData{ + { + name: "sort by text_prop", + params: dto.GetParams{ + ClassName: "ClassOne", + NearVector: &searchparams.NearVector{ + Vectors: []models.Vector{[]float32{0.8, 0.2, 0.7}}, + }, + Sort: []filters.Sort{ + {Path: []string{"text_prop"}, Order: "asc"}, + }, + }, + }, + } + + testCases := []struct { + name string + testData []testData + }{ + { + name: "one sort filter broken", + testData: oneSortFilter, + }, + { + name: "two sort filters broken", + testData: twoSortFilters, + }, + { + name: "one of two sort filters broken", + testData: oneOfTwoSortFilters, + }, + { + name: "proper sort filters", + testData: properSortFilters, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + for _, td := range tc.testData { + t.Run(td.name, func(t *testing.T) { + params := td.params + searchResults := []search.Result{ + { + ID: "id1", + Schema: map[string]interface{}{ + "name": "Foo", + }, + }, + } + + search := &fakeVectorSearcher{} + sg := &fakeSchemaGetter{ + schema: schemaForFiltersValidation(), + } + log, _ := testLogger.NewNullLogger() + metrics := &fakeMetrics{} + metrics.On("AddUsageDimensions", mock.Anything, mock.Anything, mock.Anything, + mock.Anything) + explorer := NewExplorer(search, log, getFakeModulesProvider(), metrics, defaultConfig) + explorer.SetSchemaGetter(sg) + + if td.expectedError == nil { + search. + On("VectorSearch", mock.Anything, mock.Anything). + Return(searchResults, nil) + res, err := explorer.GetClass(context.Background(), params) + assert.Nil(t, err) + search.AssertExpectations(t) + require.Len(t, res, 1) + } else { + _, err := explorer.GetClass(context.Background(), params) + require.NotNil(t, err) + assert.Equal(t, err.Error(), td.expectedError.Error()) + } + }) + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/fakes_for_test.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/fakes_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..229fb7bb50d627be5f59db6a2dea07229fa0d58a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/fakes_for_test.go @@ -0,0 +1,884 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +import ( + "context" + "fmt" + + "github.com/go-openapi/strfmt" + "github.com/pkg/errors" + "github.com/stretchr/testify/mock" + "github.com/tailor-inc/graphql" + "github.com/tailor-inc/graphql/language/ast" + "github.com/weaviate/weaviate/adapters/handlers/graphql/descriptions" + "github.com/weaviate/weaviate/entities/additional" + "github.com/weaviate/weaviate/entities/aggregation" + "github.com/weaviate/weaviate/entities/dto" + "github.com/weaviate/weaviate/entities/filters" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/moduletools" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/entities/searchparams" + "github.com/weaviate/weaviate/entities/storobj" + "github.com/weaviate/weaviate/entities/vectorindex/hnsw" + "github.com/weaviate/weaviate/usecases/modulecomponents/generictypes" + "github.com/weaviate/weaviate/usecases/modules" +) + +type ClassIndexCheck interface { + PropertyIndexed(property string) bool + VectorizeClassName() bool + VectorizePropertyName(propertyName string) bool +} + +type fakeTxt2VecVectorizer struct{} + +func (f *fakeTxt2VecVectorizer) Object(ctx context.Context, object *models.Object, icheck ClassIndexCheck) error { + panic("not implemented") +} + +func (f *fakeTxt2VecVectorizer) Corpi(ctx context.Context, corpi []string) ([]float32, error) { + return []float32{1, 2, 3}, nil +} + +func (f *fakeTxt2VecVectorizer) MoveTo(source []float32, target []float32, weight float32) ([]float32, error) { + res := make([]float32, len(source)) + for i, v := range source { + res[i] = v + 1 + } + return res, nil +} + +func (f *fakeTxt2VecVectorizer) MoveAwayFrom(source []float32, target []float32, weight float32) ([]float32, error) { + res := make([]float32, len(source)) + for i, v := range source { + res[i] = v - 0.5 + } + return res, nil +} + +type fakeVectorSearcher struct { + mock.Mock + calledWithVector models.Vector + calledWithLimit int + calledWithOffset int + results []search.Result +} + +func (f *fakeVectorSearcher) CrossClassVectorSearch(ctx context.Context, + vector models.Vector, targetVector string, offset, limit int, filters *filters.LocalFilter, +) ([]search.Result, error) { + f.calledWithVector = vector + f.calledWithLimit = limit + f.calledWithOffset = offset + return f.results, nil +} + +func (f *fakeVectorSearcher) Aggregate(ctx context.Context, + params aggregation.Params, modules *modules.Provider, +) (*aggregation.Result, error) { + args := f.Called(params) + return args.Get(0).(*aggregation.Result), args.Error(1) +} + +func (f *fakeVectorSearcher) VectorSearch(ctx context.Context, + params dto.GetParams, targetVectors []string, searchVectors []models.Vector, +) ([]search.Result, error) { + args := f.Called(params, searchVectors) + return args.Get(0).([]search.Result), args.Error(1) +} + +func (f *fakeVectorSearcher) Search(ctx context.Context, + params dto.GetParams, +) ([]search.Result, error) { + args := f.Called(params) + return args.Get(0).([]search.Result), args.Error(1) +} + +func (f *fakeVectorSearcher) Object(ctx context.Context, + className string, id strfmt.UUID, props search.SelectProperties, + additional additional.Properties, repl *additional.ReplicationProperties, + tenant string, +) (*search.Result, error) { + args := f.Called(className, id) + return args.Get(0).(*search.Result), args.Error(1) +} + +func (f *fakeVectorSearcher) ObjectsByID(ctx context.Context, id strfmt.UUID, + props search.SelectProperties, additional additional.Properties, tenant string, +) (search.Results, error) { + args := f.Called(id) + return args.Get(0).(search.Results), args.Error(1) +} + +func (f *fakeVectorSearcher) SparseObjectSearch(ctx context.Context, + params dto.GetParams, +) ([]*storobj.Object, []float32, error) { + return nil, nil, nil +} + +func (f *fakeVectorSearcher) ResolveReferences(ctx context.Context, objs search.Results, + props search.SelectProperties, groupBy *searchparams.GroupBy, + additional additional.Properties, tenant string, +) (search.Results, error) { + return nil, nil +} + +type fakeVectorRepo struct { + mock.Mock +} + +func (f *fakeVectorRepo) ObjectsByID(ctx context.Context, + id strfmt.UUID, props search.SelectProperties, + additional additional.Properties, tenant string, +) (search.Results, error) { + return nil, nil +} + +func (f *fakeVectorRepo) Object(ctx context.Context, className string, id strfmt.UUID, + props search.SelectProperties, additional additional.Properties, + repl *additional.ReplicationProperties, tenant string, +) (*search.Result, error) { + return nil, nil +} + +func (f *fakeVectorRepo) Aggregate(ctx context.Context, + params aggregation.Params, modules *modules.Provider, +) (*aggregation.Result, error) { + args := f.Called(params) + return args.Get(0).(*aggregation.Result), args.Error(1) +} + +func (f *fakeVectorRepo) GetObject(ctx context.Context, uuid strfmt.UUID, + res *models.Object, +) error { + args := f.Called(uuid) + *res = args.Get(0).(models.Object) + return args.Error(1) +} + +type fakeExplorer struct{} + +func (f *fakeExplorer) GetClass(ctx context.Context, p dto.GetParams) ([]interface{}, error) { + return nil, nil +} + +func (f *fakeExplorer) CrossClassVectorSearch(ctx context.Context, p ExploreParams) ([]search.Result, error) { + return nil, nil +} + +type fakeSchemaGetter struct { + schema schema.Schema +} + +func newFakeSchemaGetter(className string) *fakeSchemaGetter { + return &fakeSchemaGetter{ + schema: schema.Schema{Objects: &models.Schema{ + Classes: []*models.Class{ + { + Class: className, + }, + }, + }}, + } +} + +func (f *fakeSchemaGetter) SetVectorIndexConfig(cfg hnsw.UserConfig) { + for _, cls := range f.schema.Objects.Classes { + cls.VectorIndexConfig = cfg + } +} + +func (f *fakeSchemaGetter) GetSchemaSkipAuth() schema.Schema { + return f.schema +} + +func (f *fakeSchemaGetter) ReadOnlyClass(className string) *models.Class { + return f.schema.GetClass(className) +} + +func (f *fakeSchemaGetter) ShardOwner(class, shard string) (string, error) { + return shard, nil +} + +func (f *fakeSchemaGetter) ShardReplicas(class, shard string) ([]string, error) { + return []string{shard}, nil +} + +func (f *fakeSchemaGetter) TenantsShards(_ context.Context, class string, tenants ...string) (map[string]string, error) { + res := map[string]string{} + for _, t := range tenants { + res[t] = models.TenantActivityStatusHOT + } + return res, nil +} + +func (f *fakeSchemaGetter) OptimisticTenantStatus(_ context.Context, class string, tenant string) (map[string]string, error) { + res := map[string]string{} + res[tenant] = models.TenantActivityStatusHOT + return res, nil +} + +func (f *fakeSchemaGetter) ShardFromUUID(class string, uuid []byte) string { return string(uuid) } + +func (f *fakeSchemaGetter) Nodes() []string { + panic("not implemented") +} + +func (f *fakeSchemaGetter) NodeName() string { + panic("not implemented") +} + +func (f *fakeSchemaGetter) ClusterHealthScore() int { + panic("not implemented") +} + +func (f *fakeSchemaGetter) ResolveParentNodes(string, string, +) (map[string]string, error) { + panic("not implemented") +} + +func (f *fakeSchemaGetter) Statistics() map[string]any { + panic("not implemented") +} + +func (f *fakeSchemaGetter) ResolveAlias(string) string { + return "" +} + +func (f *fakeSchemaGetter) GetAliasesForClass(string) []*models.Alias { + return nil +} + +type fakeInterpretation struct{} + +func (f *fakeInterpretation) AdditionalPropertyFn(ctx context.Context, + in []search.Result, params interface{}, limit *int, + argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig, +) ([]search.Result, error) { + return in, nil +} + +func (f *fakeInterpretation) ExtractAdditionalFn(param []*ast.Argument, class *models.Class) interface{} { + return true +} + +func (f *fakeInterpretation) AdditionalPropertyDefaultValue() interface{} { + return true +} + +type fakeExtender struct { + returnArgs []search.Result +} + +func (f *fakeExtender) AdditionalPropertyFn(ctx context.Context, + in []search.Result, params interface{}, limit *int, + argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig, +) ([]search.Result, error) { + return f.returnArgs, nil +} + +func (f *fakeExtender) ExtractAdditionalFn(param []*ast.Argument, class *models.Class) interface{} { + return nil +} + +func (f *fakeExtender) AdditionalPropertyDefaultValue() interface{} { + return true +} + +type fakeProjectorParams struct { + Enabled bool + Algorithm string + Dimensions int + Perplexity int + Iterations int + LearningRate int + IncludeNeighbors bool +} + +type fakeProjector struct { + returnArgs []search.Result +} + +func (f *fakeProjector) AdditionalPropertyFn(ctx context.Context, + in []search.Result, params interface{}, limit *int, + argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig, +) ([]search.Result, error) { + return f.returnArgs, nil +} + +func (f *fakeProjector) ExtractAdditionalFn(param []*ast.Argument, class *models.Class) interface{} { + return nil +} + +func (f *fakeProjector) AdditionalPropertyDefaultValue() interface{} { + return &fakeProjectorParams{} +} + +type pathBuilderParams struct{} + +type fakePathBuilder struct { + returnArgs []search.Result +} + +func (f *fakePathBuilder) AdditionalPropertyFn(ctx context.Context, + in []search.Result, params interface{}, limit *int, + argumentModuleParams map[string]interface{}, cfg moduletools.ClassConfig, +) ([]search.Result, error) { + return f.returnArgs, nil +} + +func (f *fakePathBuilder) ExtractAdditionalFn(param []*ast.Argument, class *models.Class) interface{} { + return nil +} + +func (f *fakePathBuilder) AdditionalPropertyDefaultValue() interface{} { + return &pathBuilderParams{} +} + +type fakeText2vecContextionaryModule struct { + customExtender *fakeExtender + customProjector *fakeProjector + customPathBuilder *fakePathBuilder + customInterpretation *fakeInterpretation +} + +func newFakeText2vecContextionaryModuleWithCustomExtender( + customExtender *fakeExtender, + customProjector *fakeProjector, + customPathBuilder *fakePathBuilder, +) *fakeText2vecContextionaryModule { + return &fakeText2vecContextionaryModule{customExtender, customProjector, customPathBuilder, &fakeInterpretation{}} +} + +func (m *fakeText2vecContextionaryModule) Name() string { + return "text2vec-contextionary" +} + +func (m *fakeText2vecContextionaryModule) Init(params moduletools.ModuleInitParams) error { + return nil +} + +func (m *fakeText2vecContextionaryModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + return newNearCustomTextModule(m.getExtender(), m.getProjector(), m.getPathBuilder(), m.getInterpretation()).Arguments() +} + +func (m *fakeText2vecContextionaryModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + searcher := &fakeSearcher{&fakeTxt2VecVectorizer{}} + return searcher.VectorSearches() +} + +func (m *fakeText2vecContextionaryModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + return newNearCustomTextModule(m.getExtender(), m.getProjector(), m.getPathBuilder(), m.getInterpretation()).AdditionalProperties() +} + +func (m *fakeText2vecContextionaryModule) getExtender() *fakeExtender { + if m.customExtender != nil { + return m.customExtender + } + return &fakeExtender{} +} + +func (m *fakeText2vecContextionaryModule) getProjector() *fakeProjector { + if m.customProjector != nil { + return m.customProjector + } + return &fakeProjector{} +} + +func (m *fakeText2vecContextionaryModule) getPathBuilder() *fakePathBuilder { + if m.customPathBuilder != nil { + return m.customPathBuilder + } + return &fakePathBuilder{} +} + +func (m *fakeText2vecContextionaryModule) getInterpretation() *fakeInterpretation { + if m.customInterpretation != nil { + return m.customInterpretation + } + return &fakeInterpretation{} +} + +type nearCustomTextParams struct { + Values []string + MoveTo nearExploreMove + MoveAwayFrom nearExploreMove + Certainty float64 + Distance float64 + WithDistance bool + TargetVectors []string +} + +func (p nearCustomTextParams) GetCertainty() float64 { + return p.Certainty +} + +func (p nearCustomTextParams) GetDistance() float64 { + return p.Distance +} + +func (p nearCustomTextParams) SimilarityMetricProvided() bool { + return p.Certainty != 0 || p.WithDistance +} + +func (n nearCustomTextParams) SupportMultiTargetVector() bool { + return false +} + +func (p nearCustomTextParams) GetTargetVectors() []string { + return p.TargetVectors +} + +func (p nearCustomTextParams) GetTargetCombination() *dto.TargetCombination { + return nil +} + +type nearExploreMove struct { + Values []string + Force float32 + Objects []nearObjectMove +} + +type nearObjectMove struct { + ID string + Beacon string +} + +type nearCustomTextModule struct { + fakeExtender *fakeExtender + fakeProjector *fakeProjector + fakePathBuilder *fakePathBuilder + fakeInterpretation *fakeInterpretation +} + +func newNearCustomTextModule( + fakeExtender *fakeExtender, + fakeProjector *fakeProjector, + fakePathBuilder *fakePathBuilder, + fakeInterpretation *fakeInterpretation, +) *nearCustomTextModule { + return &nearCustomTextModule{fakeExtender, fakeProjector, fakePathBuilder, fakeInterpretation} +} + +func (m *nearCustomTextModule) Name() string { + return "mock-custom-near-text-module" +} + +func (m *nearCustomTextModule) Init(params moduletools.ModuleInitParams) error { + return nil +} + +func (m *nearCustomTextModule) getNearCustomTextArgument(classname string) *graphql.ArgumentConfig { + prefix := classname + return &graphql.ArgumentConfig{ + Type: graphql.NewInputObject( + graphql.InputObjectConfig{ + Name: fmt.Sprintf("%sNearCustomTextInpObj", prefix), + Fields: graphql.InputObjectConfigFieldMap{ + "concepts": &graphql.InputObjectFieldConfig{ + Type: graphql.NewNonNull(graphql.NewList(graphql.String)), + }, + "moveTo": &graphql.InputObjectFieldConfig{ + Description: descriptions.VectorMovement, + Type: graphql.NewInputObject( + graphql.InputObjectConfig{ + Name: fmt.Sprintf("%sMoveTo", prefix), + Fields: graphql.InputObjectConfigFieldMap{ + "concepts": &graphql.InputObjectFieldConfig{ + Description: descriptions.Keywords, + Type: graphql.NewList(graphql.String), + }, + "objects": &graphql.InputObjectFieldConfig{ + Description: "objects", + Type: graphql.NewList(graphql.NewInputObject( + graphql.InputObjectConfig{ + Name: fmt.Sprintf("%sMovementObjectsToInpObj", prefix), + Fields: graphql.InputObjectConfigFieldMap{ + "id": &graphql.InputObjectFieldConfig{ + Type: graphql.String, + Description: "id of an object", + }, + "beacon": &graphql.InputObjectFieldConfig{ + Type: graphql.String, + Description: descriptions.Beacon, + }, + }, + Description: "Movement Object", + }, + )), + }, + "force": &graphql.InputObjectFieldConfig{ + Description: descriptions.Force, + Type: graphql.NewNonNull(graphql.Float), + }, + }, + }), + }, + "moveAwayFrom": &graphql.InputObjectFieldConfig{ + Description: descriptions.VectorMovement, + Type: graphql.NewInputObject( + graphql.InputObjectConfig{ + Name: fmt.Sprintf("%sMoveAway", prefix), + Fields: graphql.InputObjectConfigFieldMap{ + "concepts": &graphql.InputObjectFieldConfig{ + Description: descriptions.Keywords, + Type: graphql.NewList(graphql.String), + }, + "objects": &graphql.InputObjectFieldConfig{ + Description: "objects", + Type: graphql.NewList(graphql.NewInputObject( + graphql.InputObjectConfig{ + Name: fmt.Sprintf("%sMovementObjectsAwayInpObj", prefix), + Fields: graphql.InputObjectConfigFieldMap{ + "id": &graphql.InputObjectFieldConfig{ + Type: graphql.String, + Description: "id of an object", + }, + "beacon": &graphql.InputObjectFieldConfig{ + Type: graphql.String, + Description: descriptions.Beacon, + }, + }, + Description: "Movement Object", + }, + )), + }, + "force": &graphql.InputObjectFieldConfig{ + Description: descriptions.Force, + Type: graphql.NewNonNull(graphql.Float), + }, + }, + }), + }, + "certainty": &graphql.InputObjectFieldConfig{ + Description: descriptions.Certainty, + Type: graphql.Float, + }, + "distance": &graphql.InputObjectFieldConfig{ + Description: descriptions.Distance, + Type: graphql.Float, + }, + }, + Description: descriptions.GetWhereInpObj, + }, + ), + } +} + +func (m *nearCustomTextModule) extractNearCustomTextArgument(source map[string]interface{}) *nearCustomTextParams { + var args nearCustomTextParams + + concepts := source["concepts"].([]interface{}) + args.Values = make([]string, len(concepts)) + for i, value := range concepts { + args.Values[i] = value.(string) + } + + certainty, ok := source["certainty"] + if ok { + args.Certainty = certainty.(float64) + } + + distance, ok := source["distance"] + if ok { + args.Distance = distance.(float64) + args.WithDistance = true + } + + // moveTo is an optional arg, so it could be nil + moveTo, ok := source["moveTo"] + if ok { + moveToMap := moveTo.(map[string]interface{}) + args.MoveTo = m.parseMoveParam(moveToMap) + } + + moveAwayFrom, ok := source["moveAwayFrom"] + if ok { + moveAwayFromMap := moveAwayFrom.(map[string]interface{}) + args.MoveAwayFrom = m.parseMoveParam(moveAwayFromMap) + } + + return &args +} + +func (m *nearCustomTextModule) parseMoveParam(source map[string]interface{}) nearExploreMove { + res := nearExploreMove{} + res.Force = float32(source["force"].(float64)) + + concepts, ok := source["concepts"].([]interface{}) + if ok { + res.Values = make([]string, len(concepts)) + for i, value := range concepts { + res.Values[i] = value.(string) + } + } + + objects, ok := source["objects"].([]interface{}) + if ok { + res.Objects = make([]nearObjectMove, len(objects)) + for i, value := range objects { + v, ok := value.(map[string]interface{}) + if ok { + if v["id"] != nil { + res.Objects[i].ID = v["id"].(string) + } + if v["beacon"] != nil { + res.Objects[i].Beacon = v["beacon"].(string) + } + } + } + } + + return res +} + +func (m *nearCustomTextModule) Arguments() map[string]modulecapabilities.GraphQLArgument { + arguments := map[string]modulecapabilities.GraphQLArgument{} + // define nearCustomText argument + arguments["nearCustomText"] = modulecapabilities.GraphQLArgument{ + GetArgumentsFunction: func(classname string) *graphql.ArgumentConfig { + return m.getNearCustomTextArgument(classname) + }, + ExploreArgumentsFunction: func() *graphql.ArgumentConfig { + return m.getNearCustomTextArgument("") + }, + ExtractFunction: func(source map[string]interface{}) (interface{}, *dto.TargetCombination, error) { + params := m.extractNearCustomTextArgument(source) + return params, nil, nil + }, + ValidateFunction: func(param interface{}) error { + nearText, ok := param.(*nearCustomTextParams) + if !ok { + return errors.New("'nearCustomText' invalid parameter") + } + + if nearText.MoveTo.Force > 0 && + nearText.MoveTo.Values == nil && nearText.MoveTo.Objects == nil { + return errors.Errorf("'nearCustomText.moveTo' parameter " + + "needs to have defined either 'concepts' or 'objects' fields") + } + + if nearText.MoveAwayFrom.Force > 0 && + nearText.MoveAwayFrom.Values == nil && nearText.MoveAwayFrom.Objects == nil { + return errors.Errorf("'nearCustomText.moveAwayFrom' parameter " + + "needs to have defined either 'concepts' or 'objects' fields") + } + + if nearText.Certainty != 0 && nearText.WithDistance { + return errors.Errorf( + "nearText cannot provide both distance and certainty") + } + + return nil + }, + } + return arguments +} + +// additional properties +func (m *nearCustomTextModule) AdditionalProperties() map[string]modulecapabilities.AdditionalProperty { + additionalProperties := map[string]modulecapabilities.AdditionalProperty{} + additionalProperties["featureProjection"] = m.getFeatureProjection() + additionalProperties["nearestNeighbors"] = m.getNearestNeighbors() + additionalProperties["semanticPath"] = m.getSemanticPath() + additionalProperties["interpretation"] = m.getInterpretation() + return additionalProperties +} + +func (m *nearCustomTextModule) getFeatureProjection() modulecapabilities.AdditionalProperty { + return modulecapabilities.AdditionalProperty{ + DefaultValue: m.fakeProjector.AdditionalPropertyDefaultValue(), + GraphQLNames: []string{"featureProjection"}, + GraphQLFieldFunction: func(classname string) *graphql.Field { + return &graphql.Field{ + Args: graphql.FieldConfigArgument{ + "algorithm": &graphql.ArgumentConfig{ + Type: graphql.String, + DefaultValue: nil, + }, + "dimensions": &graphql.ArgumentConfig{ + Type: graphql.Int, + DefaultValue: nil, + }, + "learningRate": &graphql.ArgumentConfig{ + Type: graphql.Int, + DefaultValue: nil, + }, + "iterations": &graphql.ArgumentConfig{ + Type: graphql.Int, + DefaultValue: nil, + }, + "perplexity": &graphql.ArgumentConfig{ + Type: graphql.Int, + DefaultValue: nil, + }, + }, + Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%sAdditionalFeatureProjection", classname), + Fields: graphql.Fields{ + "vector": &graphql.Field{Type: graphql.NewList(graphql.Float)}, + }, + }), + } + }, + GraphQLExtractFunction: m.fakeProjector.ExtractAdditionalFn, + SearchFunctions: modulecapabilities.AdditionalSearch{ + ObjectList: m.fakeProjector.AdditionalPropertyFn, + ExploreGet: m.fakeProjector.AdditionalPropertyFn, + ExploreList: m.fakeProjector.AdditionalPropertyFn, + }, + } +} + +func (m *nearCustomTextModule) getNearestNeighbors() modulecapabilities.AdditionalProperty { + return modulecapabilities.AdditionalProperty{ + DefaultValue: m.fakeExtender.AdditionalPropertyDefaultValue(), + GraphQLNames: []string{"nearestNeighbors"}, + GraphQLFieldFunction: func(classname string) *graphql.Field { + return &graphql.Field{ + Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%sAdditionalNearestNeighbors", classname), + Fields: graphql.Fields{ + "neighbors": &graphql.Field{Type: graphql.NewList(graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%sAdditionalNearestNeighborsNeighbors", classname), + Fields: graphql.Fields{ + "concept": &graphql.Field{Type: graphql.String}, + "distance": &graphql.Field{Type: graphql.Float}, + }, + }))}, + }, + }), + } + }, + GraphQLExtractFunction: m.fakeExtender.ExtractAdditionalFn, + SearchFunctions: modulecapabilities.AdditionalSearch{ + ObjectGet: m.fakeExtender.AdditionalPropertyFn, + ObjectList: m.fakeExtender.AdditionalPropertyFn, + ExploreGet: m.fakeExtender.AdditionalPropertyFn, + ExploreList: m.fakeExtender.AdditionalPropertyFn, + }, + } +} + +func (m *nearCustomTextModule) getSemanticPath() modulecapabilities.AdditionalProperty { + return modulecapabilities.AdditionalProperty{ + DefaultValue: m.fakePathBuilder.AdditionalPropertyDefaultValue(), + GraphQLNames: []string{"semanticPath"}, + GraphQLFieldFunction: func(classname string) *graphql.Field { + return &graphql.Field{ + Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%sAdditionalSemanticPath", classname), + Fields: graphql.Fields{ + "path": &graphql.Field{Type: graphql.NewList(graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%sAdditionalSemanticPathElement", classname), + Fields: graphql.Fields{ + "concept": &graphql.Field{Type: graphql.String}, + "distanceToQuery": &graphql.Field{Type: graphql.Float}, + "distanceToResult": &graphql.Field{Type: graphql.Float}, + "distanceToNext": &graphql.Field{Type: graphql.Float}, + "distanceToPrevious": &graphql.Field{Type: graphql.Float}, + }, + }))}, + }, + }), + } + }, + GraphQLExtractFunction: m.fakePathBuilder.ExtractAdditionalFn, + SearchFunctions: modulecapabilities.AdditionalSearch{ + ExploreGet: m.fakePathBuilder.AdditionalPropertyFn, + }, + } +} + +func (m *nearCustomTextModule) getInterpretation() modulecapabilities.AdditionalProperty { + return modulecapabilities.AdditionalProperty{ + DefaultValue: m.fakeInterpretation.AdditionalPropertyDefaultValue(), + GraphQLNames: []string{"interpretation"}, + GraphQLFieldFunction: func(classname string) *graphql.Field { + return &graphql.Field{ + Type: graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%sAdditionalInterpretation", classname), + Fields: graphql.Fields{ + "source": &graphql.Field{Type: graphql.NewList(graphql.NewObject(graphql.ObjectConfig{ + Name: fmt.Sprintf("%sAdditionalInterpretationSource", classname), + Fields: graphql.Fields{ + "concept": &graphql.Field{Type: graphql.String}, + "weight": &graphql.Field{Type: graphql.Float}, + "occurrence": &graphql.Field{Type: graphql.Int}, + }, + }))}, + }, + }), + } + }, + GraphQLExtractFunction: m.fakeInterpretation.ExtractAdditionalFn, + SearchFunctions: modulecapabilities.AdditionalSearch{ + ObjectGet: m.fakeInterpretation.AdditionalPropertyFn, + ObjectList: m.fakeInterpretation.AdditionalPropertyFn, + ExploreGet: m.fakeInterpretation.AdditionalPropertyFn, + ExploreList: m.fakeInterpretation.AdditionalPropertyFn, + }, + } +} + +func (m *nearCustomTextModule) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + vectorSearches := map[string]modulecapabilities.VectorForParams[[]float32]{} + return vectorSearches +} + +type fakeSearcher struct { + vectorizer *fakeTxt2VecVectorizer +} + +func (s *fakeSearcher) VectorSearches() map[string]modulecapabilities.VectorForParams[[]float32] { + vectorSearches := map[string]modulecapabilities.VectorForParams[[]float32]{} + vectorSearches["nearCustomText"] = generictypes.VectorForParams(s.vectorForNearTextParam) + return vectorSearches +} + +func (s *fakeSearcher) vectorForNearTextParam(ctx context.Context, params interface{}, + className string, findVectorFn modulecapabilities.FindVectorFn[[]float32], cfg moduletools.ClassConfig, +) ([]float32, error) { + vector, err := s.vectorizer.Corpi(ctx, nil) + if err != nil { + return nil, err + } + + p, ok := params.(*nearCustomTextParams) + if ok && p.MoveTo.Force > 0 { + afterMoveTo, err := s.vectorizer.MoveTo(vector, nil, 0) + if err != nil { + return nil, err + } + vector = afterMoveTo + } + if ok && p.MoveAwayFrom.Force > 0 { + afterMoveAway, err := s.vectorizer.MoveAwayFrom(vector, nil, 0) + if err != nil { + return nil, err + } + vector = afterMoveAway + } + return vector, nil +} + +type fakeMetrics struct { + mock.Mock +} + +func (m *fakeMetrics) AddUsageDimensions(class, query, op string, dims int) { + m.Called(class, query, op, dims) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/hybrid_group_by.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/hybrid_group_by.go new file mode 100644 index 0000000000000000000000000000000000000000..a89033ca6d4c140e578f5ac92395bed6bb8ca632 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/hybrid_group_by.go @@ -0,0 +1,96 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +import ( + "context" + + "github.com/weaviate/weaviate/entities/additional" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/entities/searchparams" +) + +func (e *Explorer) groupSearchResults(ctx context.Context, sr search.Results, groupBy *searchparams.GroupBy) (search.Results, error) { + groupsOrdered := []string{} + groups := map[string][]search.Result{} + + for _, result := range sr { + prop_i := result.Object().Properties + prop := prop_i.(map[string]interface{}) + val, ok := prop[groupBy.Property].(string) + + if !ok { + continue + } + + current, groupExists := groups[val] + if len(current) >= groupBy.ObjectsPerGroup { + continue + } + + if !groupExists && len(groups) >= groupBy.Groups { + continue + } + + groups[val] = append(current, result) + + if !groupExists { + // this group doesn't exist add it to the ordered list + groupsOrdered = append(groupsOrdered, val) + } + } + + out := make(search.Results, 0, len(sr)) + for i, groupValue := range groupsOrdered { + groupMembers := groups[groupValue] + first := groupMembers[0] + + hits := make([]map[string]interface{}, len(groupMembers)) + + for j, groupMember := range groupMembers { + props := map[string]interface{}{} + for k, v := range groupMember.Object().Properties.(map[string]interface{}) { + props[k] = v + } + props["_additional"] = &additional.GroupHitAdditional{ + ID: groupMember.ID, + Distance: groupMember.Dist, + Vector: groupMember.Vector, + Vectors: groupMember.Vectors, + } + hits[j] = props + } + + group := &additional.Group{ + ID: i, + GroupedBy: &additional.GroupedBy{ + Value: groupValue, + Path: []string{groupBy.Property}, + }, + Count: len(hits), + Hits: hits, + MinDistance: first.Dist, + MaxDistance: first.Dist, + } + + // add group + if first.AdditionalProperties == nil { + first.AdditionalProperties = models.AdditionalProperties{} + } + first.AdditionalProperties["group"] = group + + out = append(out, first) + } + + return out, nil +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/metrics.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/metrics.go new file mode 100644 index 0000000000000000000000000000000000000000..3878f0ae01a9156d71835ef66f7b04d2cd65dd44 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/metrics.go @@ -0,0 +1,135 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +import ( + "time" + + "github.com/prometheus/client_golang/prometheus" + "github.com/weaviate/weaviate/usecases/monitoring" +) + +type Metrics struct { + queriesCount *prometheus.GaugeVec + queriesDurations *prometheus.HistogramVec + dimensions *prometheus.CounterVec + dimensionsCombined prometheus.Counter + groupClasses bool +} + +func NewMetrics(prom *monitoring.PrometheusMetrics) *Metrics { + if prom == nil { + return nil + } + + return &Metrics{ + queriesCount: prom.QueriesCount, + queriesDurations: prom.QueriesDurations, + dimensions: prom.QueryDimensions, + dimensionsCombined: prom.QueryDimensionsCombined, + groupClasses: prom.Group, + } +} + +func (m *Metrics) QueriesAggregateInc(className string) { + if m == nil { + return + } + + if m.groupClasses { + className = "n/a" + } + + m.queriesCount.With(prometheus.Labels{ + "class_name": className, + "query_type": "aggregate", + }).Inc() +} + +func (m *Metrics) QueriesAggregateDec(className string) { + if m == nil { + return + } + + if m.groupClasses { + className = "n/a" + } + + m.queriesCount.With(prometheus.Labels{ + "class_name": className, + "query_type": "aggregate", + }).Dec() +} + +func (m *Metrics) QueriesGetInc(className string) { + if m == nil { + return + } + + if m.groupClasses { + className = "n/a" + } + + m.queriesCount.With(prometheus.Labels{ + "class_name": className, + "query_type": "get_graphql", + }).Inc() +} + +func (m *Metrics) QueriesObserveDuration(className string, startMs int64) { + if m == nil { + return + } + + if m.groupClasses { + className = "n/a" + } + + took := float64(time.Now().UnixMilli() - startMs) + + m.queriesDurations.With(prometheus.Labels{ + "class_name": className, + "query_type": "get_graphql", + }).Observe(float64(took)) +} + +func (m *Metrics) QueriesGetDec(className string) { + if m == nil { + return + } + + if m.groupClasses { + className = "n/a" + } + + m.queriesCount.With(prometheus.Labels{ + "class_name": className, + "query_type": "get_graphql", + }).Dec() +} + +func (m *Metrics) AddUsageDimensions(className, queryType, operation string, dims int) { + if m == nil { + return + } + + if m.groupClasses { + className = "n/a" + } + + m.dimensions.With(prometheus.Labels{ + "class_name": className, + "operation": operation, + "query_type": queryType, + }).Add(float64(dims)) + m.dimensionsCombined.Add(float64(dims)) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/models_for_test.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/models_for_test.go new file mode 100644 index 0000000000000000000000000000000000000000..9f6774801535543b3b667ad0c744c5ec536a2e37 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/models_for_test.go @@ -0,0 +1,48 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +type FeatureProjection struct { + Vector []float32 `json:"vector"` +} + +type NearestNeighbors struct { + Neighbors []*NearestNeighbor `json:"neighbors"` +} + +type NearestNeighbor struct { + Concept string `json:"concept,omitempty"` + Distance float32 `json:"distance,omitempty"` + Vector []float32 `json:"vector"` +} + +type SemanticPath struct { + Path []*SemanticPathElement `json:"path"` +} + +type SemanticPathElement struct { + Concept string `json:"concept,omitempty"` + DistanceToNext *float32 `json:"distanceToNext,omitempty"` + DistanceToPrevious *float32 `json:"distanceToPrevious,omitempty"` + DistanceToQuery float32 `json:"distanceToQuery,omitempty"` + DistanceToResult float32 `json:"distanceToResult,omitempty"` +} + +type Interpretation struct { + Source []*InterpretationSource `json:"source"` +} + +type InterpretationSource struct { + Concept string `json:"concept,omitempty"` + Occurrence uint64 `json:"occurrence,omitempty"` + Weight float64 `json:"weight,omitempty"` +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/near_params_vector.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/near_params_vector.go new file mode 100644 index 0000000000000000000000000000000000000000..f039a2118c4c77e7273a7015dafb7d90f6a759d0 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/near_params_vector.go @@ -0,0 +1,539 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +import ( + "context" + "fmt" + "strings" + + "github.com/go-openapi/strfmt" + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/additional" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modelsext" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/schema/crossref" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/entities/searchparams" + "github.com/weaviate/weaviate/usecases/modulecomponents/generictypes" + libvectorizer "github.com/weaviate/weaviate/usecases/vectorizer" +) + +type nearParamsVector struct { + modulesProvider ModulesProvider + search nearParamsSearcher +} + +type nearParamsSearcher interface { + Object(ctx context.Context, className string, id strfmt.UUID, + props search.SelectProperties, additional additional.Properties, + repl *additional.ReplicationProperties, tenant string) (*search.Result, error) + ObjectsByID(ctx context.Context, id strfmt.UUID, props search.SelectProperties, + additional additional.Properties, tenant string) (search.Results, error) +} + +func newNearParamsVector(modulesProvider ModulesProvider, search nearParamsSearcher) *nearParamsVector { + return &nearParamsVector{modulesProvider, search} +} + +func (v *nearParamsVector) targetFromParams(ctx context.Context, + nearVector *searchparams.NearVector, nearObject *searchparams.NearObject, + moduleParams map[string]interface{}, className, tenant string, +) ([]string, error) { + err := v.validateNearParams(nearVector, nearObject, moduleParams, className) + if err != nil { + return nil, err + } + + if len(moduleParams) == 1 { + for _, value := range moduleParams { + return v.targetFromModules(className, value) + } + } + + if nearVector != nil { + var targetVector []string + if len(nearVector.TargetVectors) > 0 { + targetVector = nearVector.TargetVectors + } + return targetVector, nil + } + + if nearObject != nil { + var targetVector []string + if len(nearObject.TargetVectors) > 0 { + targetVector = nearObject.TargetVectors + } + return targetVector, nil + } + + // either nearObject or nearVector or module search param has to be set, + // so if we land here, something has gone very wrong + return nil, errors.Errorf("targetFromParams was called without any known params present") +} + +func (v *nearParamsVector) vectorFromParams(ctx context.Context, + nearVector *searchparams.NearVector, nearObject *searchparams.NearObject, + moduleParams map[string]interface{}, className, tenant, targetVector string, index int, +) (models.Vector, error) { + err := v.validateNearParams(nearVector, nearObject, moduleParams, className) + if err != nil { + return nil, err + } + + if len(moduleParams) == 1 { + for name, value := range moduleParams { + return v.vectorFromModules(ctx, className, name, value, tenant, targetVector) + } + } + + if nearVector != nil { + if index >= len(nearVector.Vectors) { + return nil, fmt.Errorf("nearVector.vectorFromParams was called with invalid index") + } + return nearVector.Vectors[index], nil + } + + if nearObject != nil { + vector, _, err := v.vectorFromNearObjectParams(ctx, className, nearObject, tenant, targetVector) + if err != nil { + return nil, errors.Errorf("nearObject params: %v", err) + } + + return vector, nil + } + + // either nearObject or nearVector or module search param has to be set, + // so if we land here, something has gone very wrong + return []float32{}, errors.Errorf("vectorFromParams was called without any known params present") +} + +func (v *nearParamsVector) validateNearParams(nearVector *searchparams.NearVector, + nearObject *searchparams.NearObject, + moduleParams map[string]interface{}, className ...string, +) error { + if len(moduleParams) == 1 && nearVector != nil && nearObject != nil { + return errors.Errorf("found 'nearText' and 'nearVector' and 'nearObject' parameters " + + "which are conflicting, choose one instead") + } + + if len(moduleParams) == 1 && nearVector != nil { + return errors.Errorf("found both 'nearText' and 'nearVector' parameters " + + "which are conflicting, choose one instead") + } + + if len(moduleParams) == 1 && nearObject != nil { + return errors.Errorf("found both 'nearText' and 'nearObject' parameters " + + "which are conflicting, choose one instead") + } + + if nearVector != nil && nearObject != nil { + return errors.Errorf("found both 'nearVector' and 'nearObject' parameters " + + "which are conflicting, choose one instead") + } + + if v.modulesProvider != nil { + if len(moduleParams) > 1 { + params := make([]string, 0, len(moduleParams)) + for p := range moduleParams { + params = append(params, fmt.Sprintf("'%s'", p)) + } + return errors.Errorf("found more than one module params: %s which are conflicting "+ + "choose one instead", strings.Join(params, ", ")) + } + + for name, value := range moduleParams { + if len(className) == 1 { + err := v.modulesProvider.ValidateSearchParam(name, value, className[0]) + if err != nil { + return err + } + } else { + err := v.modulesProvider.CrossClassValidateSearchParam(name, value) + if err != nil { + return err + } + } + } + } + + if nearVector != nil { + if nearVector.Certainty != 0 && nearVector.Distance != 0 { + return errors.Errorf("found 'certainty' and 'distance' set in nearVector " + + "which are conflicting, choose one instead") + } + } + + if nearObject != nil { + if nearObject.Certainty != 0 && nearObject.Distance != 0 { + return errors.Errorf("found 'certainty' and 'distance' set in nearObject " + + "which are conflicting, choose one instead") + } + } + + return nil +} + +func (v *nearParamsVector) targetFromModules(className string, paramValue interface{}) ([]string, error) { + if v.modulesProvider != nil { + targetVector, err := v.modulesProvider.TargetsFromSearchParam(className, paramValue) + if err != nil { + return nil, errors.Errorf("vectorize params: %v", err) + } + return targetVector, nil + } + return nil, errors.New("no modules defined") +} + +func (v *nearParamsVector) vectorFromModules(ctx context.Context, + className, paramName string, paramValue interface{}, tenant string, targetVector string, +) (models.Vector, error) { + if v.modulesProvider != nil { + isMultiVector, err := v.modulesProvider.IsTargetVectorMultiVector(className, targetVector) + if err != nil { + return nil, errors.Errorf("is target vector: %s multi vector: %v", targetVector, err) + } + + if isMultiVector { + vector, err := v.modulesProvider.MultiVectorFromSearchParam(ctx, + className, targetVector, tenant, paramName, paramValue, generictypes.FindMultiVectorFn(v.findMultiVector), + ) + if err != nil { + return nil, errors.Errorf("vectorize params: %v", err) + } + return vector, nil + } else { + vector, err := v.modulesProvider.VectorFromSearchParam(ctx, + className, targetVector, tenant, paramName, paramValue, generictypes.FindVectorFn(v.findVector), + ) + if err != nil { + return nil, errors.Errorf("vectorize params: %v", err) + } + return vector, nil + } + } + return nil, errors.New("no modules defined") +} + +// TODO:colbert unify findVector and findMultiVector +func (v *nearParamsVector) findVectorForNearObject(ctx context.Context, + className string, id strfmt.UUID, tenant, targetVector string, +) (models.Vector, string, error) { + if multiVector, targetVector, err := v.findMultiVector(ctx, className, id, tenant, targetVector); err == nil && len(multiVector) > 0 { + return multiVector, targetVector, nil + } + return v.findVector(ctx, className, id, tenant, targetVector) +} + +func (v *nearParamsVector) findVector(ctx context.Context, className string, id strfmt.UUID, tenant, targetVector string) ([]float32, string, error) { + switch className { + case "": + // Explore cross class searches where we don't have class context + return v.crossClassFindVector(ctx, id, targetVector) + default: + return v.classFindVector(ctx, className, id, tenant, targetVector) + } +} + +func (v *nearParamsVector) findMultiVector(ctx context.Context, className string, id strfmt.UUID, tenant, targetVector string) ([][]float32, string, error) { + switch className { + case "": + // Explore cross class searches where we don't have class context + return v.crossClassFindMultiVector(ctx, id, targetVector) + default: + return v.classFindMultiVector(ctx, className, id, tenant, targetVector) + } +} + +func (v *nearParamsVector) classFindVector(ctx context.Context, className string, + id strfmt.UUID, tenant, targetVector string, +) ([]float32, string, error) { + res, err := v.search.Object(ctx, className, id, search.SelectProperties{}, additional.Properties{}, nil, tenant) + if err != nil { + return nil, "", err + } + if res == nil { + return nil, "", errors.New("vector not found") + } + if targetVector != "" { + if targetVector == modelsext.DefaultNamedVectorName && len(res.Vector) > 0 { + return res.Vector, "", nil + } + + if res.Vectors[targetVector] == nil { + return nil, "", fmt.Errorf("vector not found for target: %v", targetVector) + } + vec, ok := res.Vectors[targetVector].([]float32) + if !ok { + return nil, "", fmt.Errorf("unrecognized type: %T for target: %v", res.Vectors[targetVector], targetVector) + } + return vec, targetVector, nil + } else { + if len(res.Vector) > 0 { + return res.Vector, "", nil + } + + if len(res.Vectors) == 1 { + for key, vec := range res.Vectors { + switch v := vec.(type) { + case []float32: + return v, key, nil + default: + return nil, "", fmt.Errorf("unrecognized type: %T target: %v", vec, key) + } + } + } else if len(res.Vectors) > 1 { + return nil, "", errors.New("multiple vectors found, specify target vector") + } + } + + return nil, "", fmt.Errorf("nearObject search-object with id %v has no vector", id) +} + +// TODO:colbert try to unify +func (v *nearParamsVector) classFindMultiVector(ctx context.Context, className string, + id strfmt.UUID, tenant, targetVector string, +) ([][]float32, string, error) { + res, err := v.search.Object(ctx, className, id, search.SelectProperties{}, additional.Properties{}, nil, tenant) + if err != nil { + return nil, "", err + } + if res == nil { + return nil, "", errors.New("vector not found") + } + if targetVector != "" { + if len(res.Vectors) == 0 || res.Vectors[targetVector] == nil { + return nil, "", fmt.Errorf("vector not found for target: %v", targetVector) + } + multiVector, ok := res.Vectors[targetVector].([][]float32) + if !ok { + return nil, "", fmt.Errorf("unrecognized type: %T target: %v", res.Vectors[targetVector], targetVector) + } + return multiVector, targetVector, nil + } else { + if len(res.Vectors) == 1 { + for key, vec := range res.Vectors { + switch v := vec.(type) { + case [][]float32: + return v, key, nil + default: + return nil, "", fmt.Errorf("unrecognized type: %T target: %v", vec, key) + } + } + } else if len(res.Vectors) > 1 { + return nil, "", errors.New("multiple vectors found, specify target vector") + } + } + return nil, "", fmt.Errorf("nearObject search-object with id %v has no vector", id) +} + +func (v *nearParamsVector) crossClassFindVector(ctx context.Context, id strfmt.UUID, targetVector string) ([]float32, string, error) { + res, err := v.search.ObjectsByID(ctx, id, search.SelectProperties{}, additional.Properties{}, "") + if err != nil { + return nil, "", errors.Wrap(err, "find objects") + } + switch len(res) { + case 0: + return nil, "", errors.New("vector not found") + case 1: + if targetVector != "" { + if len(res[0].Vectors) == 0 || res[0].Vectors[targetVector] == nil { + return nil, "", fmt.Errorf("vector not found for target: %v", targetVector) + } + vec, ok := res[0].Vectors[targetVector].([]float32) + if !ok { + return nil, "", fmt.Errorf("unrecognized vector type: %T", vec) + } + return vec, targetVector, nil + } + + if len(res[0].Vector) > 0 { + return res[0].Vector, "", nil + } + + if len(res[0].Vectors) == 0 { + return nil, "", nil + } + + if len(res[0].Vectors) == 1 { + for key, vec := range res[0].Vectors { + v, ok := vec.([]float32) + if !ok { + return nil, "", fmt.Errorf("unrecognized vector type: %T", vec) + } + return v, key, nil + } + } + + return nil, "", errors.New("multiple vectors found, specify target vector") + default: + if targetVector == "" { + vectors := make([][]float32, len(res)) + for i := range res { + vectors[i] = res[i].Vector + } + return libvectorizer.CombineVectors(vectors), targetVector, nil + } + vectors := [][]float32{} + vectorDims := map[int]bool{} + for i := range res { + if len(res[i].Vectors) > 0 { + if vec, ok := res[i].Vectors[targetVector]; ok { + switch v := vec.(type) { + case []float32: + vectors = append(vectors, v) + if _, exists := vectorDims[len(v)]; !exists { + vectorDims[len(v)] = true + } + default: + return nil, "", fmt.Errorf("unrecognized vector type: %T for target vector: %s", vec, targetVector) + } + } + } + } + if len(vectorDims) != 1 { + return nil, "", fmt.Errorf("vectors with incompatible dimensions found for target: %s", targetVector) + } + return libvectorizer.CombineVectors(vectors), targetVector, nil + } +} + +func (v *nearParamsVector) crossClassFindMultiVector(ctx context.Context, id strfmt.UUID, targetVector string) ([][]float32, string, error) { + res, err := v.search.ObjectsByID(ctx, id, search.SelectProperties{}, additional.Properties{}, "") + if err != nil { + return nil, "", errors.Wrap(err, "find objects") + } + switch len(res) { + case 0: + return nil, "", errors.New("multi vector not found") + case 1: + if targetVector != "" { + if len(res[0].Vectors) == 0 || res[0].Vectors[targetVector] == nil { + return nil, "", fmt.Errorf("multi vector not found for target: %v", targetVector) + } + } else { + if len(res[0].Vectors) == 1 { + for key, vec := range res[0].Vectors { + v, ok := vec.([][]float32) + if !ok { + return nil, "", fmt.Errorf("unrecognized multi vector type: %T", vec) + } + return v, key, nil + } + } else if len(res[0].Vectors) > 1 { + return nil, "", errors.New("multiple multi vectors found, specify target vector") + } + } + return nil, "", fmt.Errorf("multi vector not found for target: %v", targetVector) + default: + return nil, "", fmt.Errorf("multiple multi vectors with incompatible dimensions found for target: %s", targetVector) + } +} + +func (v *nearParamsVector) crossClassVectorFromNearObjectParams(ctx context.Context, + params *searchparams.NearObject, +) (models.Vector, string, error) { + return v.vectorFromNearObjectParams(ctx, "", params, "", "") +} + +func (v *nearParamsVector) vectorFromNearObjectParams(ctx context.Context, + className string, params *searchparams.NearObject, tenant, targetVector string, +) (models.Vector, string, error) { + if len(params.ID) == 0 && len(params.Beacon) == 0 { + return nil, "", errors.New("empty id and beacon") + } + + var id strfmt.UUID + targetClassName := className + + if len(params.ID) > 0 { + id = strfmt.UUID(params.ID) + } else { + ref, err := crossref.Parse(params.Beacon) + if err != nil { + return nil, "", err + } + id = ref.TargetID + if ref.Class != "" { + targetClassName = ref.Class + } + } + + if targetVector == "" && len(params.TargetVectors) >= 1 { + targetVector = params.TargetVectors[0] + } + + return v.findVectorForNearObject(ctx, targetClassName, id, tenant, targetVector) +} + +func (v *nearParamsVector) extractCertaintyFromParams(nearVector *searchparams.NearVector, + nearObject *searchparams.NearObject, moduleParams map[string]interface{}, hybrid *searchparams.HybridSearch, +) float64 { + if nearVector != nil { + if nearVector.Certainty != 0 { + return nearVector.Certainty + } else if nearVector.WithDistance { + return additional.DistToCertainty(nearVector.Distance) + } + } + + if nearObject != nil { + if nearObject.Certainty != 0 { + return nearObject.Certainty + } else if nearObject.WithDistance { + return additional.DistToCertainty(nearObject.Distance) + } + } + + if hybrid != nil { + if hybrid.WithDistance { + return additional.DistToCertainty(float64(hybrid.Distance)) + } + if hybrid.NearVectorParams != nil { + if hybrid.NearVectorParams.Certainty != 0 { + return hybrid.NearVectorParams.Certainty + } else if hybrid.NearVectorParams.WithDistance { + return additional.DistToCertainty(hybrid.NearVectorParams.Distance) + } + } + if hybrid.NearTextParams != nil { + if hybrid.NearTextParams.Certainty != 0 { + return hybrid.NearTextParams.Certainty + } else if hybrid.NearTextParams.WithDistance { + return additional.DistToCertainty(hybrid.NearTextParams.Distance) + } + } + } + + if len(moduleParams) == 1 { + return v.extractCertaintyFromModuleParams(moduleParams) + } + + return 0 +} + +func (v *nearParamsVector) extractCertaintyFromModuleParams(moduleParams map[string]interface{}) float64 { + for _, param := range moduleParams { + if nearParam, ok := param.(modulecapabilities.NearParam); ok { + if nearParam.SimilarityMetricProvided() { + if certainty := nearParam.GetCertainty(); certainty != 0 { + return certainty + } else { + return additional.DistToCertainty(nearParam.GetDistance()) + } + } + } + } + + return 0 +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/near_params_vector_test.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/near_params_vector_test.go new file mode 100644 index 0000000000000000000000000000000000000000..fea316dfa234694779bc38c288912e7a52105612 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/near_params_vector_test.go @@ -0,0 +1,587 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +import ( + "context" + "reflect" + "testing" + + "github.com/go-openapi/strfmt" + "github.com/stretchr/testify/assert" + "github.com/weaviate/weaviate/entities/additional" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/modelsext" + "github.com/weaviate/weaviate/entities/schema/crossref" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/entities/searchparams" +) + +func Test_nearParamsVector_validateNearParams(t *testing.T) { + type args struct { + nearVector *searchparams.NearVector + nearObject *searchparams.NearObject + moduleParams map[string]interface{} + className []string + } + tests := []struct { + name string + args args + wantErr bool + errMessage string + }{ + { + name: "Should be OK, when all near params are nil", + args: args{ + nearVector: nil, + nearObject: nil, + moduleParams: nil, + className: nil, + }, + wantErr: false, + }, + { + name: "Should be OK, when nearVector param is set", + args: args{ + nearVector: &searchparams.NearVector{}, + nearObject: nil, + moduleParams: nil, + className: nil, + }, + wantErr: false, + }, + { + name: "Should be OK, when nearObject param is set", + args: args{ + nearVector: nil, + nearObject: &searchparams.NearObject{}, + moduleParams: nil, + className: nil, + }, + wantErr: false, + }, + { + name: "Should be OK, when moduleParams param is set", + args: args{ + nearVector: nil, + nearObject: nil, + moduleParams: map[string]interface{}{ + "nearCustomText": &nearCustomTextParams{}, + }, + className: nil, + }, + wantErr: false, + }, + { + name: "Should throw error, when nearVector and nearObject is set", + args: args{ + nearVector: &searchparams.NearVector{}, + nearObject: &searchparams.NearObject{}, + moduleParams: nil, + className: nil, + }, + wantErr: true, + errMessage: "found both 'nearVector' and 'nearObject' parameters which are conflicting, choose one instead", + }, + { + name: "Should throw error, when nearVector and moduleParams is set", + args: args{ + nearVector: &searchparams.NearVector{}, + nearObject: nil, + moduleParams: map[string]interface{}{ + "nearCustomText": &nearCustomTextParams{}, + }, + className: nil, + }, + wantErr: true, + errMessage: "found both 'nearText' and 'nearVector' parameters which are conflicting, choose one instead", + }, + { + name: "Should throw error, when nearObject and moduleParams is set", + args: args{ + nearVector: nil, + nearObject: &searchparams.NearObject{}, + moduleParams: map[string]interface{}{ + "nearCustomText": &nearCustomTextParams{}, + }, + className: nil, + }, + wantErr: true, + errMessage: "found both 'nearText' and 'nearObject' parameters which are conflicting, choose one instead", + }, + { + name: "Should throw error, when nearVector and nearObject and moduleParams is set", + args: args{ + nearVector: &searchparams.NearVector{}, + nearObject: &searchparams.NearObject{}, + moduleParams: map[string]interface{}{ + "nearCustomText": &nearCustomTextParams{}, + }, + className: nil, + }, + wantErr: true, + errMessage: "found 'nearText' and 'nearVector' and 'nearObject' parameters which are conflicting, choose one instead", + }, + { + name: "Should throw error, when nearVector certainty and distance are set", + args: args{ + nearVector: &searchparams.NearVector{ + Certainty: 0.1, + Distance: 0.9, + WithDistance: true, + }, + className: nil, + }, + wantErr: true, + errMessage: "found 'certainty' and 'distance' set in nearVector which are conflicting, choose one instead", + }, + { + name: "Should throw error, when nearObject certainty and distance are set", + args: args{ + nearObject: &searchparams.NearObject{ + Certainty: 0.1, + Distance: 0.9, + WithDistance: true, + }, + className: nil, + }, + wantErr: true, + errMessage: "found 'certainty' and 'distance' set in nearObject which are conflicting, choose one instead", + }, + { + name: "Should throw error, when nearText certainty and distance are set", + args: args{ + moduleParams: map[string]interface{}{ + "nearCustomText": &nearCustomTextParams{ + Certainty: 0.1, + Distance: 0.9, + WithDistance: true, + }, + }, + className: nil, + }, + wantErr: true, + errMessage: "nearText cannot provide both distance and certainty", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + e := &nearParamsVector{ + modulesProvider: &fakeModulesProvider{}, + search: &fakeNearParamsSearcher{returnVec: true}, + } + err := e.validateNearParams(tt.args.nearVector, tt.args.nearObject, tt.args.moduleParams, tt.args.className...) + if (err != nil) != tt.wantErr { + t.Errorf("nearParamsVector.validateNearParams() error = %v, wantErr %v", err, tt.wantErr) + } + if err != nil && tt.errMessage != err.Error() { + t.Errorf("nearParamsVector.validateNearParams() error = %v, errMessage = %v", err, tt.errMessage) + } + }) + } +} + +func Test_nearParamsVector_vectorFromParams(t *testing.T) { + type args struct { + ctx context.Context + nearVector *searchparams.NearVector + nearObject *searchparams.NearObject + moduleParams map[string]interface{} + className string + } + tests := []struct { + name string + args args + want models.Vector + wantErr bool + }{ + { + name: "Should get vector from nearVector", + args: args{ + nearVector: &searchparams.NearVector{ + Vectors: []models.Vector{[]float32{1.1, 1.0, 0.1}}, + }, + }, + want: []float32{1.1, 1.0, 0.1}, + wantErr: false, + }, + { + name: "Should get vector from nearObject", + args: args{ + nearObject: &searchparams.NearObject{ + ID: "uuid", + }, + }, + want: []float32{1.0, 1.0, 1.0}, + wantErr: false, + }, + { + name: "Should get vector from nearText", + args: args{ + moduleParams: map[string]interface{}{ + "nearCustomText": &nearCustomTextParams{ + Values: []string{"a"}, + }, + }, + }, + want: []float32{1, 2, 3}, + wantErr: false, + }, + { + name: "Should get vector from nearObject", + args: args{ + nearObject: &searchparams.NearObject{ + Beacon: crossref.NewLocalhost("Class", "uuid").String(), + }, + }, + wantErr: true, + }, + { + name: "Should get vector from nearObject", + args: args{ + nearObject: &searchparams.NearObject{ + Beacon: crossref.NewLocalhost("Class", "e5dc4a4c-ef0f-3aed-89a3-a73435c6bbcf").String(), + }, + }, + want: []float32{1.0, 1.0, 1.0}, + wantErr: false, + }, + { + name: "Should get vector from nearObject across classes", + args: args{ + nearObject: &searchparams.NearObject{ + Beacon: crossref.NewLocalhost("LegacyClass", "e5dc4a4c-ef0f-3aed-89a3-a73435c6bbcf").String(), + }, + }, + want: []float32{0.0, 0.0, 0.0}, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + e := &nearParamsVector{ + modulesProvider: &fakeModulesProvider{}, + search: &fakeNearParamsSearcher{returnVec: true}, + } + got, err := e.vectorFromParams(tt.args.ctx, tt.args.nearVector, tt.args.nearObject, tt.args.moduleParams, tt.args.className, "", "", 0) + if (err != nil) != tt.wantErr { + t.Errorf("nearParamsVector.targetFromParams() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("nearParamsVector.targetFromParams() = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_nearParamsVector_multiVectorFromParams(t *testing.T) { + type args struct { + ctx context.Context + nearVector *searchparams.NearVector + nearObject *searchparams.NearObject + moduleParams map[string]interface{} + className string + } + tests := []struct { + name string + args args + want models.Vector + returnVec bool + wantErr bool + wantTargetVector string + }{ + { + name: "should get vector from nearObject with single multi vector no target vector", + args: args{ + nearObject: &searchparams.NearObject{ + ID: "uuid", + }, + className: "SingleNamedVector", + }, + want: []float32{2.0, 2.0, 2.0}, + returnVec: true, + wantErr: false, + wantTargetVector: "", + }, + { + name: "should get vector from nearObject with single named vector and target vector", + args: args{ + nearObject: &searchparams.NearObject{ + ID: "uuid", + TargetVectors: []string{"Vector"}, + }, + className: "SingleNamedVector", + }, + want: []float32{2.0, 2.0, 2.0}, + returnVec: true, + wantErr: false, + wantTargetVector: "Vector", + }, + { + name: "should get vector from nearObject with multi named vector and target vector", + args: args{ + nearObject: &searchparams.NearObject{ + ID: "uuid", + TargetVectors: []string{"B"}, + }, + className: "MultiNamedVector", + }, + want: []float32{4.0, 4.0, 4.0}, + returnVec: true, + wantErr: false, + wantTargetVector: "B", + }, + { + name: "should get legacy vector from nearObject with default named vector name from legacy collection", + args: args{ + nearObject: &searchparams.NearObject{ + ID: "uuid", + TargetVectors: []string{modelsext.DefaultNamedVectorName}, + }, + className: "LegacyCollection", + }, + want: []float32{1, 1, 1}, + returnVec: true, + wantErr: false, + wantTargetVector: modelsext.DefaultNamedVectorName, + }, + { + name: "should get legacy vector from nearObject from legacy collection", + args: args{ + nearObject: &searchparams.NearObject{ + ID: "uuid", + }, + className: "LegacyCollection", + }, + want: []float32{1, 1, 1}, + returnVec: true, + wantErr: false, + }, + { + name: "error if no vector is present", + args: args{ + nearObject: &searchparams.NearObject{ + ID: "uuid", + }, + className: "LegacyClass", + }, + returnVec: false, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + e := &nearParamsVector{ + modulesProvider: &fakeModulesProvider{}, + search: &fakeNearParamsSearcher{returnVec: tt.returnVec}, + } + targetVector, err := e.targetFromParams(tt.args.ctx, tt.args.nearVector, tt.args.nearObject, tt.args.moduleParams, tt.args.className, "") + assert.Nil(t, err) + if tt.wantTargetVector != "" { + assert.Equal(t, tt.wantTargetVector, targetVector[0]) + } else { + assert.Empty(t, targetVector) + } + + got, err := e.vectorFromParams(tt.args.ctx, tt.args.nearVector, tt.args.nearObject, tt.args.moduleParams, tt.args.className, "", tt.wantTargetVector, 0) + if (err != nil) != tt.wantErr { + t.Errorf("nearParamsVector.vectorFromParams() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("nearParamsVector.vectorFromParams() = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_nearParamsVector_extractCertaintyFromParams(t *testing.T) { + type args struct { + nearVector *searchparams.NearVector + nearObject *searchparams.NearObject + hybrid *searchparams.HybridSearch + moduleParams map[string]interface{} + } + tests := []struct { + name string + args args + want float64 + }{ + { + name: "Should extract distance from nearVector", + args: args{ + nearVector: &searchparams.NearVector{ + Distance: 0.88, + WithDistance: true, + }, + }, + want: 1 - 0.88/2, + }, + { + name: "Should extract certainty from nearVector", + args: args{ + nearVector: &searchparams.NearVector{ + Certainty: 0.88, + }, + }, + want: 0.88, + }, + { + name: "Should extract distance from nearObject", + args: args{ + nearObject: &searchparams.NearObject{ + Distance: 0.99, + WithDistance: true, + }, + }, + want: 1 - 0.99/2, + }, + { + name: "Should extract certainty from nearObject", + args: args{ + nearObject: &searchparams.NearObject{ + Certainty: 0.99, + }, + }, + want: 0.99, + }, + { + name: "Should extract distance from nearText", + args: args{ + moduleParams: map[string]interface{}{ + "nearCustomText": &nearCustomTextParams{ + Distance: 0.77, + WithDistance: true, + }, + }, + }, + want: 1 - 0.77/2, + }, + { + name: "Should extract certainty from nearText", + args: args{ + moduleParams: map[string]interface{}{ + "nearCustomText": &nearCustomTextParams{ + Certainty: 0.77, + }, + }, + }, + want: 0.77, + }, + { + name: "Should extract distance from hybrid nearVector", + args: args{ + hybrid: &searchparams.HybridSearch{ + NearVectorParams: &searchparams.NearVector{ + Distance: 0.88, + WithDistance: true, + }, + }, + }, + want: 1 - 0.88/2, + }, + { + name: "Should extract certainty from hybrid nearText", + args: args{ + hybrid: &searchparams.HybridSearch{ + NearTextParams: &searchparams.NearTextParams{ + Certainty: 0.77, + }, + }, + }, + want: 0.77, + }, + { + name: "Should extract distance from hybrid nearText", + args: args{ + hybrid: &searchparams.HybridSearch{ + NearTextParams: &searchparams.NearTextParams{ + Distance: 0.77, + WithDistance: true, + }, + }, + }, + want: 1 - 0.77/2, + }, + { + name: "Should extract distance from hybrid nearVector", + args: args{ + hybrid: &searchparams.HybridSearch{ + NearVectorParams: &searchparams.NearVector{ + Distance: 0.88, + WithDistance: true, + }, + }, + }, + want: 1 - 0.88/2, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + e := &nearParamsVector{ + modulesProvider: &fakeModulesProvider{}, + search: &fakeNearParamsSearcher{returnVec: true}, + } + got := e.extractCertaintyFromParams(tt.args.nearVector, tt.args.nearObject, tt.args.moduleParams, tt.args.hybrid) + if !assert.InDelta(t, tt.want, got, 1e-9) { + t.Errorf("nearParamsVector.extractCertaintyFromParams() = %v, want %v", got, tt.want) + } + }) + } +} + +type fakeNearParamsSearcher struct { + returnVec bool +} + +func (f *fakeNearParamsSearcher) ObjectsByID(ctx context.Context, id strfmt.UUID, + props search.SelectProperties, additional additional.Properties, tenant string, +) (search.Results, error) { + return search.Results{ + {Vector: []float32{1.0, 1.0, 1.0}}, + }, nil +} + +func (f *fakeNearParamsSearcher) Object(ctx context.Context, className string, id strfmt.UUID, + props search.SelectProperties, additional additional.Properties, + repl *additional.ReplicationProperties, tenant string, +) (*search.Result, error) { + switch className { + case "LegacyClass": + vec := []float32{0.0, 0.0, 0.0} + if !f.returnVec { + vec = nil + } + + return &search.Result{ + Vector: vec, + }, nil + case "SingleNamedVector": + return &search.Result{ + Vectors: models.Vectors{ + "Vector": []float32{2.0, 2.0, 2.0}, + }, + }, nil + case "MultiNamedVector": + return &search.Result{ + Vectors: models.Vectors{ + "A": []float32{3.0, 3.0, 3.0}, + "B": []float32{4.0, 4.0, 4.0}, + }, + }, nil + default: + return &search.Result{ + Vector: []float32{1.0, 1.0, 1.0}, + }, nil + } +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/target_vector_param_helper.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/target_vector_param_helper.go new file mode 100644 index 0000000000000000000000000000000000000000..12ca5b8b844e0153d4315b0ec5bf599b98a7da19 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/target_vector_param_helper.go @@ -0,0 +1,71 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +import ( + "fmt" + + "github.com/weaviate/weaviate/entities/dto" + "github.com/weaviate/weaviate/entities/modelsext" + "github.com/weaviate/weaviate/entities/modulecapabilities" + "github.com/weaviate/weaviate/entities/schema" +) + +type TargetVectorParamHelper struct{} + +func NewTargetParamHelper() *TargetVectorParamHelper { + return &TargetVectorParamHelper{} +} + +func (t *TargetVectorParamHelper) GetTargetVectorOrDefault(sch schema.Schema, className string, targetVectors []string) ([]string, error) { + if len(targetVectors) == 0 { + class := sch.FindClassByName(schema.ClassName(className)) + + // If no target vectors provided, check whether legacy vector is configured. + // For backwards compatibility, we have to return legacy vector in case no named vectors configured. + if modelsext.ClassHasLegacyVectorIndex(class) || len(class.VectorConfig) == 0 { + return []string{""}, nil + } + + if len(class.VectorConfig) > 1 { + return []string{}, fmt.Errorf("multiple vectorizers configuration found, please specify target vector name") + } + + if len(class.VectorConfig) == 1 { + for name := range class.VectorConfig { + return []string{name}, nil + } + } + } + + return targetVectors, nil +} + +func (t *TargetVectorParamHelper) GetTargetVectorsFromParams(params dto.GetParams) []string { + if params.NearObject != nil && len(params.NearObject.TargetVectors) >= 1 { + return params.NearObject.TargetVectors + } + if params.NearVector != nil && len(params.NearVector.TargetVectors) >= 1 { + return params.NearVector.TargetVectors + } + if params.HybridSearch != nil && len(params.HybridSearch.TargetVectors) >= 1 { + return params.HybridSearch.TargetVectors + } + if len(params.ModuleParams) > 0 { + for _, moduleParam := range params.ModuleParams { + if nearParam, ok := moduleParam.(modulecapabilities.NearParam); ok && len(nearParam.GetTargetVectors()) >= 1 { + return nearParam.GetTargetVectors() + } + } + } + return []string{} +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser.go new file mode 100644 index 0000000000000000000000000000000000000000..7140975daf049981ee52f47a25bf4ae27aa43351 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser.go @@ -0,0 +1,94 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +import ( + "context" + + "github.com/go-openapi/strfmt" + "github.com/sirupsen/logrus" + "github.com/weaviate/weaviate/entities/additional" + "github.com/weaviate/weaviate/entities/aggregation" + "github.com/weaviate/weaviate/entities/dto" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/usecases/auth/authorization" + "github.com/weaviate/weaviate/usecases/config" + "github.com/weaviate/weaviate/usecases/modules" + "github.com/weaviate/weaviate/usecases/ratelimiter" + "github.com/weaviate/weaviate/usecases/schema" +) + +// Traverser can be used to dynamically traverse the knowledge graph +type Traverser struct { + config *config.WeaviateConfig + logger logrus.FieldLogger + authorizer authorization.Authorizer + vectorSearcher VectorSearcher + explorer explorer + schemaGetter schema.SchemaGetter + nearParamsVector *nearParamsVector + targetVectorParamHelper *TargetVectorParamHelper + metrics *Metrics + ratelimiter *ratelimiter.Limiter +} + +type VectorSearcher interface { + Aggregate(ctx context.Context, params aggregation.Params, modules *modules.Provider) (*aggregation.Result, error) + Object(ctx context.Context, className string, id strfmt.UUID, + props search.SelectProperties, additional additional.Properties, + properties *additional.ReplicationProperties, tenant string) (*search.Result, error) + ObjectsByID(ctx context.Context, id strfmt.UUID, props search.SelectProperties, + additional additional.Properties, tenant string) (search.Results, error) +} + +type explorer interface { + GetClass(ctx context.Context, params dto.GetParams) ([]interface{}, error) + CrossClassVectorSearch(ctx context.Context, params ExploreParams) ([]search.Result, error) +} + +// NewTraverser to traverse the knowledge graph +func NewTraverser(config *config.WeaviateConfig, logger logrus.FieldLogger, authorizer authorization.Authorizer, + vectorSearcher VectorSearcher, + explorer explorer, schemaGetter schema.SchemaGetter, + modulesProvider ModulesProvider, + metrics *Metrics, maxGetRequests int, +) *Traverser { + return &Traverser{ + config: config, + logger: logger, + authorizer: authorizer, + vectorSearcher: vectorSearcher, + explorer: explorer, + schemaGetter: schemaGetter, + nearParamsVector: newNearParamsVector(modulesProvider, vectorSearcher), + targetVectorParamHelper: NewTargetParamHelper(), + metrics: metrics, + ratelimiter: ratelimiter.New(maxGetRequests), + } +} + +// SearchResult is a single search result. See wrapping Search Results for the Type +type SearchResult struct { + Name string + Certainty float32 +} + +// SearchResults is grouping of SearchResults for a SchemaSearch +type SearchResults struct { + Type SearchType + Results []SearchResult +} + +// Len of the result set +func (r SearchResults) Len() int { + return len(r.Results) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_aggregate.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_aggregate.go new file mode 100644 index 0000000000000000000000000000000000000000..9fde59428adead910c31cc4ee2a74b74837d0650 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_aggregate.go @@ -0,0 +1,117 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +import ( + "context" + "fmt" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/aggregation" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/usecases/modules" +) + +// Aggregate resolves meta queries +func (t *Traverser) Aggregate(ctx context.Context, principal *models.Principal, + params *aggregation.Params, +) (interface{}, error) { + t.metrics.QueriesAggregateInc(params.ClassName.String()) + defer t.metrics.QueriesAggregateDec(params.ClassName.String()) + + inspector := newTypeInspector(t.schemaGetter.ReadOnlyClass) + + if cls := t.schemaGetter.ResolveAlias(params.ClassName.String()); cls != "" { + params.ClassName = schema.ClassName(cls) + } + + // validate here, because filters can contain references that need to be authorized + if err := t.validateFilters(ctx, principal, params.Filters); err != nil { + return nil, errors.Wrap(err, "invalid 'where' filter") + } + + if params.NearVector != nil || params.NearObject != nil || len(params.ModuleParams) > 0 { + className := params.ClassName.String() + err := t.nearParamsVector.validateNearParams(params.NearVector, + params.NearObject, params.ModuleParams, className) + if err != nil { + return nil, err + } + targetVectors, err := t.nearParamsVector.targetFromParams(ctx, + params.NearVector, params.NearObject, params.ModuleParams, className, params.Tenant) + if err != nil { + return nil, err + } + + targetVectors, err = t.targetVectorParamHelper.GetTargetVectorOrDefault(t.schemaGetter.GetSchemaSkipAuth(), + className, targetVectors) + if err != nil { + return nil, err + } + + searchVector, err := t.nearParamsVector.vectorFromParams(ctx, + params.NearVector, params.NearObject, params.ModuleParams, className, params.Tenant, targetVectors[0], 0) + if err != nil { + return nil, err + } + + params.TargetVector = targetVectors[0] + params.SearchVector = searchVector + + certainty := t.nearParamsVector.extractCertaintyFromParams(params.NearVector, + params.NearObject, params.ModuleParams, nil) + + if certainty == 0 && params.ObjectLimit == nil { + return nil, fmt.Errorf("must provide certainty or objectLimit with vector search") + } + params.Certainty = certainty + } + + if params.Hybrid != nil && params.Hybrid.Vector == nil { + var targetVectors []string + if len(params.Hybrid.TargetVectors) == 1 { + targetVectors = params.Hybrid.TargetVectors[:1] + } + targetVectors, err := t.targetVectorParamHelper.GetTargetVectorOrDefault(t.schemaGetter.GetSchemaSkipAuth(), params.ClassName.String(), targetVectors) + if err != nil { + return nil, err + } + if len(targetVectors) == 0 { + params.TargetVector = "" + } else { + params.TargetVector = targetVectors[0] + } + + certainty := t.nearParamsVector.extractCertaintyFromParams(params.NearVector, + params.NearObject, params.ModuleParams, params.Hybrid) + + if certainty == 0 && params.ObjectLimit == nil { + return nil, fmt.Errorf("must provide certainty or objectLimit with vector search") + } + + params.Certainty = certainty + } + + var mp *modules.Provider + + if t.nearParamsVector.modulesProvider != nil { + mp = t.nearParamsVector.modulesProvider.(*modules.Provider) + } + + res, err := t.vectorSearcher.Aggregate(ctx, *params, mp) + if err != nil || res == nil { + return nil, err + } + + return inspector.WithTypes(res, *params) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_aggregate_params_hash_test.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_aggregate_params_hash_test.go new file mode 100644 index 0000000000000000000000000000000000000000..de82acb0c5f1656595756a5e125b70c47229ec1c --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_aggregate_params_hash_test.go @@ -0,0 +1,62 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +// TODO: do we still need this? there is no more spark support with 0.20.x +// func Test_ParamsHashing(t *testing.T) { +// params := func() AggregateParams { +// return AggregateParams{ +// Analytics: filters.AnalyticsProps{UseAnalyticsEngine: true}, +// ClassName: schema.ClassName("MyBestClass"), +// Filters: nil, +// Kind: kind.Thing, +// Properties: []AggregateProperty{ +// AggregateProperty{ +// Name: schema.PropertyName("bestprop"), +// Aggregators: []Aggregator{CountAggregator}, +// }, +// }, +// } +// } +// hash := func() string { return "a71e85e0741fccd63b33281b26270d43" } + +// t.Run("it generates a hash", func(t *testing.T) { +// p := params() +// h, err := p.AnalyticsHash() +// require.Nil(t, err) +// assert.Equal(t, h, hash()) +// }) + +// t.Run("it generates the same hash if analytical props are changed", func(t *testing.T) { +// p := params() +// p.Analytics.ForceRecalculate = true +// h, err := p.AnalyticsHash() +// require.Nil(t, err) +// assert.Equal(t, hash(), h) +// }) + +// t.Run("it generates a different hash if a prop is changed", func(t *testing.T) { +// p := params() +// p.Properties[0].Aggregators[0] = MaximumAggregator +// h, err := p.AnalyticsHash() +// require.Nil(t, err) +// assert.NotEqual(t, hash(), h) +// }) + +// t.Run("it generates a different hash if where filter is added", func(t *testing.T) { +// p := params() +// p.Filters = &filters.LocalFilter{Root: &filters.Clause{Value: &filters.Value{Value: "foo"}}} +// h, err := p.AnalyticsHash() +// require.Nil(t, err) +// assert.NotEqual(t, hash(), h) +// }) +// } diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_aggregate_test.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_aggregate_test.go new file mode 100644 index 0000000000000000000000000000000000000000..0c123ad7ebf7fd59f45298d48e16bca46d6632e1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_aggregate_test.go @@ -0,0 +1,368 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +import ( + "context" + "testing" + + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/aggregation" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/searchparams" + "github.com/weaviate/weaviate/usecases/auth/authorization/mocks" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_Traverser_Aggregate(t *testing.T) { + principal := &models.Principal{} + logger, _ := test.NewNullLogger() + authorizer := mocks.NewMockAuthorizer() + vectorRepo := &fakeVectorRepo{} + explorer := &fakeExplorer{} + schemaGetter := &fakeSchemaGetter{aggregateTestSchema} + + traverser := NewTraverser(&config.WeaviateConfig{}, logger, authorizer, + vectorRepo, explorer, schemaGetter, nil, nil, -1) + + t.Run("with aggregation only", func(t *testing.T) { + params := aggregation.Params{ + ClassName: "MyClass", + Properties: []aggregation.ParamProperty{ + { + Name: "label", + Aggregators: []aggregation.Aggregator{aggregation.NewTopOccurrencesAggregator(nil)}, + }, + { + Name: "number", + Aggregators: []aggregation.Aggregator{aggregation.SumAggregator}, + }, + { + Name: "int", + Aggregators: []aggregation.Aggregator{aggregation.SumAggregator}, + }, + { + Name: "date", + Aggregators: []aggregation.Aggregator{aggregation.NewTopOccurrencesAggregator(nil)}, + }, + }, + } + + agg := aggregation.Result{ + Groups: []aggregation.Group{ + { + Properties: map[string]aggregation.Property{ + "label": { + TextAggregation: aggregation.Text{ + Items: []aggregation.TextOccurrence{ + { + Value: "Foo", + Occurs: 200, + }, + }, + }, + Type: aggregation.PropertyTypeText, + }, + "date": { + TextAggregation: aggregation.Text{ + Items: []aggregation.TextOccurrence{ + { + Value: "Bar", + Occurs: 100, + }, + }, + }, + Type: aggregation.PropertyTypeText, + }, + "number": { + Type: aggregation.PropertyTypeNumerical, + NumericalAggregations: map[string]interface{}{ + "sum": 200, + }, + }, + "int": { + Type: aggregation.PropertyTypeNumerical, + NumericalAggregations: map[string]interface{}{ + "sum": 100, + }, + }, + }, + }, + }, + } + + vectorRepo.On("Aggregate", params).Return(&agg, nil) + res, err := traverser.Aggregate(context.Background(), principal, ¶ms) + require.Nil(t, err) + assert.Equal(t, &agg, res) + }) + + t.Run("with a mix of aggregation and type inspection", func(t *testing.T) { + params := aggregation.Params{ + ClassName: "MyClass", + Properties: []aggregation.ParamProperty{ + { + Name: "label", + Aggregators: []aggregation.Aggregator{ + aggregation.TypeAggregator, + aggregation.NewTopOccurrencesAggregator(nil), + }, + }, + { + Name: "number", + Aggregators: []aggregation.Aggregator{ + aggregation.TypeAggregator, + aggregation.SumAggregator, + }, + }, + { + Name: "int", + Aggregators: []aggregation.Aggregator{ + aggregation.TypeAggregator, + aggregation.SumAggregator, + }, + }, + { + Name: "date", + Aggregators: []aggregation.Aggregator{ + aggregation.TypeAggregator, + aggregation.NewTopOccurrencesAggregator(nil), + }, + }, + { + Name: "a ref", + Aggregators: []aggregation.Aggregator{aggregation.TypeAggregator}, + }, + }, + } + + agg := aggregation.Result{ + Groups: []aggregation.Group{ + { + Properties: map[string]aggregation.Property{ + "label": { + TextAggregation: aggregation.Text{ + Items: []aggregation.TextOccurrence{ + { + Value: "Foo", + Occurs: 200, + }, + }, + }, + Type: aggregation.PropertyTypeText, + }, + "date": { + TextAggregation: aggregation.Text{ + Items: []aggregation.TextOccurrence{ + { + Value: "Bar", + Occurs: 100, + }, + }, + }, + Type: aggregation.PropertyTypeText, + }, + "number": { + Type: aggregation.PropertyTypeNumerical, + NumericalAggregations: map[string]interface{}{ + "sum": 200, + }, + }, + "int": { + Type: aggregation.PropertyTypeNumerical, + NumericalAggregations: map[string]interface{}{ + "sum": 100, + }, + }, + }, + }, + }, + } + + expectedResult := aggregation.Result{ + Groups: []aggregation.Group{ + { + Properties: map[string]aggregation.Property{ + "label": { + TextAggregation: aggregation.Text{ + Items: []aggregation.TextOccurrence{ + { + Value: "Foo", + Occurs: 200, + }, + }, + }, + Type: aggregation.PropertyTypeText, + SchemaType: string(schema.DataTypeText), + }, + "date": { + TextAggregation: aggregation.Text{ + Items: []aggregation.TextOccurrence{ + { + Value: "Bar", + Occurs: 100, + }, + }, + }, + SchemaType: string(schema.DataTypeDate), + Type: aggregation.PropertyTypeText, + }, + "number": { + Type: aggregation.PropertyTypeNumerical, + SchemaType: string(schema.DataTypeNumber), + NumericalAggregations: map[string]interface{}{ + "sum": 200, + }, + }, + "int": { + Type: aggregation.PropertyTypeNumerical, + SchemaType: string(schema.DataTypeInt), + NumericalAggregations: map[string]interface{}{ + "sum": 100, + }, + }, + "a ref": { + Type: aggregation.PropertyTypeReference, + ReferenceAggregation: aggregation.Reference{ + PointingTo: []string{"AnotherClass"}, + }, + SchemaType: string(schema.DataTypeCRef), + }, + }, + }, + }, + } + + vectorRepo.On("Aggregate", params).Return(&agg, nil) + res, err := traverser.Aggregate(context.Background(), principal, ¶ms) + require.Nil(t, err) + assert.Equal(t, &expectedResult, res) + }) + + t.Run("with hybrid search", func(t *testing.T) { + params := aggregation.Params{ + ClassName: "MyClass", + Properties: []aggregation.ParamProperty{ + { + Name: "label", + Aggregators: []aggregation.Aggregator{aggregation.NewTopOccurrencesAggregator(nil)}, + }, + { + Name: "number", + Aggregators: []aggregation.Aggregator{aggregation.SumAggregator}, + }, + { + Name: "int", + Aggregators: []aggregation.Aggregator{aggregation.SumAggregator}, + }, + { + Name: "date", + Aggregators: []aggregation.Aggregator{aggregation.NewTopOccurrencesAggregator(nil)}, + }, + }, + IncludeMetaCount: true, + Hybrid: &searchparams.HybridSearch{ + Type: "hybrid", + Alpha: 0.5, + Query: "some query", + Vector: []float32{1, 2, 3}, + }, + } + + agg := aggregation.Result{ + Groups: []aggregation.Group{ + { + Properties: map[string]aggregation.Property{ + "label": { + TextAggregation: aggregation.Text{ + Items: []aggregation.TextOccurrence{ + { + Value: "Foo", + Occurs: 200, + }, + }, + }, + Type: aggregation.PropertyTypeText, + }, + "date": { + TextAggregation: aggregation.Text{ + Items: []aggregation.TextOccurrence{ + { + Value: "Bar", + Occurs: 100, + }, + }, + }, + Type: aggregation.PropertyTypeText, + }, + "number": { + Type: aggregation.PropertyTypeNumerical, + NumericalAggregations: map[string]interface{}{ + "sum": 200, + }, + }, + "int": { + Type: aggregation.PropertyTypeNumerical, + NumericalAggregations: map[string]interface{}{ + "sum": 100, + }, + }, + }, + }, + }, + } + + vectorRepo.On("Aggregate", params).Return(&agg, nil) + res, err := traverser.Aggregate(context.Background(), principal, ¶ms) + require.Nil(t, err) + assert.Equal(t, &agg, res) + }) +} + +var aggregateTestSchema = schema.Schema{ + Objects: &models.Schema{ + Classes: []*models.Class{ + { + Class: "AnotherClass", + }, + { + Class: "MyClass", + Properties: []*models.Property{ + { + Name: "label", + DataType: schema.DataTypeText.PropString(), + Tokenization: models.PropertyTokenizationWhitespace, + }, + { + Name: "number", + DataType: []string{string(schema.DataTypeNumber)}, + }, + { + Name: "int", + DataType: []string{string(schema.DataTypeInt)}, + }, + { + Name: "date", + DataType: []string{string(schema.DataTypeDate)}, + }, + { + Name: "a ref", + DataType: []string{"AnotherClass"}, + }, + }, + }, + }, + }, +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_explore_concepts.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_explore_concepts.go new file mode 100644 index 0000000000000000000000000000000000000000..a89139f86e5850624493cd5a56709475b8ab5d8e --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_explore_concepts.go @@ -0,0 +1,50 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +import ( + "context" + + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/entities/searchparams" +) + +// Explore through unstructured search terms +func (t *Traverser) Explore(ctx context.Context, + principal *models.Principal, params ExploreParams, +) ([]search.Result, error) { + if params.Limit == 0 { + params.Limit = 20 + } + + // to conduct a cross-class vector search, all classes must + // be configured with the same vector index distance type. + // additionally, certainty cannot be passed to Explore when + // the classes are configured to use a distance type other + // than cosine. + if err := t.validateExploreDistance(params); err != nil { + return nil, err + } + + return t.explorer.CrossClassVectorSearch(ctx, params) +} + +// ExploreParams are the parameters used by the GraphQL `Explore { }` API +type ExploreParams struct { + NearVector *searchparams.NearVector + NearObject *searchparams.NearObject + Offset int + Limit int + ModuleParams map[string]interface{} + WithCertaintyProp bool +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_explore_concepts_test.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_explore_concepts_test.go new file mode 100644 index 0000000000000000000000000000000000000000..b24c5db1d7e4625532f346a7661e182a2b1006a7 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_explore_concepts_test.go @@ -0,0 +1,661 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +import ( + "context" + "testing" + + "github.com/go-openapi/strfmt" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/entities/searchparams" + "github.com/weaviate/weaviate/usecases/auth/authorization/mocks" + "github.com/weaviate/weaviate/usecases/config" +) + +func Test_ExploreConcepts(t *testing.T) { + t.Run("without any near searchers", func(t *testing.T) { + authorizer := mocks.NewMockAuthorizer() + logger, _ := test.NewNullLogger() + vectorSearcher := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(vectorSearcher, log, getFakeModulesProvider(), metrics, defaultConfig) + schemaGetter := &fakeSchemaGetter{} + traverser := NewTraverser(&config.WeaviateConfig{}, logger, authorizer, + vectorSearcher, explorer, schemaGetter, getFakeModulesProvider(), nil, -1) + params := ExploreParams{} + + _, err := traverser.Explore(context.Background(), nil, params) + assert.Contains(t, err.Error(), "received no search params") + }) + + t.Run("with two searchers set at the same time", func(t *testing.T) { + authorizer := mocks.NewMockAuthorizer() + logger, _ := test.NewNullLogger() + vectorSearcher := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(vectorSearcher, log, getFakeModulesProvider(), metrics, defaultConfig) + schemaGetter := &fakeSchemaGetter{} + traverser := NewTraverser(&config.WeaviateConfig{}, logger, authorizer, + vectorSearcher, explorer, schemaGetter, nil, nil, -1) + params := ExploreParams{ + NearVector: &searchparams.NearVector{}, + ModuleParams: map[string]interface{}{ + "nearCustomText": nil, + }, + } + + _, err := traverser.Explore(context.Background(), nil, params) + assert.Contains(t, err.Error(), "parameters which are conflicting") + }) + t.Run("nearCustomText with no movements set", func(t *testing.T) { + authorizer := mocks.NewMockAuthorizer() + logger, _ := test.NewNullLogger() + vectorSearcher := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(vectorSearcher, log, getFakeModulesProvider(), metrics, defaultConfig) + schemaGetter := &fakeSchemaGetter{} + traverser := NewTraverser(&config.WeaviateConfig{}, logger, authorizer, + vectorSearcher, explorer, schemaGetter, getFakeModulesProvider(), nil, -1) + params := ExploreParams{ + ModuleParams: map[string]interface{}{ + "nearCustomText": extractNearCustomTextParam(map[string]interface{}{ + "concepts": []interface{}{"a search term", "another"}, + }), + }, + } + vectorSearcher.results = []search.Result{ + { + ClassName: "BestClass", + ID: "123-456-789", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + { + ClassName: "AnAction", + ID: "987-654-321", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + } + + metrics.On("AddUsageDimensions", "n/a", "explore_graphql", "nearCustomText", 128) + + res, err := traverser.Explore(context.Background(), nil, params) + require.Nil(t, err) + assert.Equal(t, []search.Result{ + { + ClassName: "BestClass", + ID: "123-456-789", + Beacon: "weaviate://localhost/BestClass/123-456-789", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + { + ClassName: "AnAction", + ID: "987-654-321", + Beacon: "weaviate://localhost/AnAction/987-654-321", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + }, res) + + assert.Equal(t, []float32{1, 2, 3}, vectorSearcher.calledWithVector) + assert.Equal(t, 20, vectorSearcher.calledWithLimit, + "uses the default limit if not explicitly set") + }) + + t.Run("nearCustomText without optional params", func(t *testing.T) { + authorizer := mocks.NewMockAuthorizer() + logger, _ := test.NewNullLogger() + vectorSearcher := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(vectorSearcher, log, getFakeModulesProvider(), metrics, defaultConfig) + schemaGetter := &fakeSchemaGetter{} + traverser := NewTraverser(&config.WeaviateConfig{}, logger, authorizer, + vectorSearcher, explorer, schemaGetter, nil, nil, -1) + params := ExploreParams{ + NearVector: &searchparams.NearVector{ + Vectors: []models.Vector{[]float32{7.8, 9}}, + }, + } + vectorSearcher.results = []search.Result{ + { + ClassName: "BestClass", + ID: "123-456-789", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + { + ClassName: "AnAction", + ID: "987-654-321", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + } + + metrics.On("AddUsageDimensions", "n/a", "explore_graphql", "nearVector", 128) + res, err := traverser.Explore(context.Background(), nil, params) + require.Nil(t, err) + assert.Equal(t, []search.Result{ + { + ClassName: "BestClass", + ID: "123-456-789", + Beacon: "weaviate://localhost/BestClass/123-456-789", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + { + ClassName: "AnAction", + ID: "987-654-321", + Beacon: "weaviate://localhost/AnAction/987-654-321", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + }, res) + + assert.Equal(t, []float32{7.8, 9}, vectorSearcher.calledWithVector) + assert.Equal(t, 20, vectorSearcher.calledWithLimit, + "uses the default limit if not explicitly set") + }) + + t.Run("nearObject with id param", func(t *testing.T) { + authorizer := mocks.NewMockAuthorizer() + logger, _ := test.NewNullLogger() + vectorSearcher := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(vectorSearcher, log, getFakeModulesProvider(), metrics, defaultConfig) + schemaGetter := &fakeSchemaGetter{} + traverser := NewTraverser(&config.WeaviateConfig{}, logger, authorizer, + vectorSearcher, explorer, schemaGetter, nil, nil, -1) + params := ExploreParams{ + NearObject: &searchparams.NearObject{ + ID: "bd3d1560-3f0e-4b39-9d62-38b4a3c4f23a", + }, + } + searchRes := search.Result{ + ClassName: "BestClass", + ID: "bd3d1560-3f0e-4b39-9d62-38b4a3c4f23a", + } + vectorSearcher. + On("ObjectsByID", strfmt.UUID("bd3d1560-3f0e-4b39-9d62-38b4a3c4f23a")). + Return(search.Results{searchRes}, nil) + vectorSearcher.results = []search.Result{ + { + ClassName: "BestClass", + ID: "bd3d1560-3f0e-4b39-9d62-38b4a3c4f23a", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + { + ClassName: "AnAction", + ID: "bd3d1560-3f0e-4b39-9d62-38b4a3c4f23b", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + } + + metrics.On("AddUsageDimensions", "n/a", "explore_graphql", "nearObject", 128) + res, err := traverser.Explore(context.Background(), nil, params) + require.Nil(t, err) + assert.Equal(t, []search.Result{ + { + ClassName: "BestClass", + ID: "bd3d1560-3f0e-4b39-9d62-38b4a3c4f23a", + Beacon: "weaviate://localhost/BestClass/bd3d1560-3f0e-4b39-9d62-38b4a3c4f23a", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + { + ClassName: "AnAction", + ID: "bd3d1560-3f0e-4b39-9d62-38b4a3c4f23b", + Beacon: "weaviate://localhost/AnAction/bd3d1560-3f0e-4b39-9d62-38b4a3c4f23b", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + }, res) + + assert.Equal(t, 20, vectorSearcher.calledWithLimit, + "uses the default limit if not explicitly set") + }) + + t.Run("nearObject with beacon param", func(t *testing.T) { + authorizer := mocks.NewMockAuthorizer() + logger, _ := test.NewNullLogger() + vectorSearcher := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(vectorSearcher, log, getFakeModulesProvider(), metrics, defaultConfig) + schemaGetter := &fakeSchemaGetter{} + traverser := NewTraverser(&config.WeaviateConfig{}, logger, authorizer, + vectorSearcher, explorer, schemaGetter, nil, nil, -1) + params := ExploreParams{ + NearObject: &searchparams.NearObject{ + Beacon: "weaviate://localhost/bd3d1560-3f0e-4b39-9d62-38b4a3c4f23a", + }, + } + searchRes := search.Result{ + ClassName: "BestClass", + ID: "bd3d1560-3f0e-4b39-9d62-38b4a3c4f23a", + } + vectorSearcher. + On("ObjectsByID", strfmt.UUID("bd3d1560-3f0e-4b39-9d62-38b4a3c4f23a")). + Return(search.Results{searchRes}, nil) + vectorSearcher.results = []search.Result{ + { + ClassName: "BestClass", + ID: "bd3d1560-3f0e-4b39-9d62-38b4a3c4f23a", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + { + ClassName: "AnAction", + ID: "bd3d1560-3f0e-4b39-9d62-38b4a3c4f23b", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + } + + metrics.On("AddUsageDimensions", "n/a", "explore_graphql", "nearObject", 128) + res, err := traverser.Explore(context.Background(), nil, params) + require.Nil(t, err) + assert.Equal(t, []search.Result{ + { + ClassName: "BestClass", + ID: "bd3d1560-3f0e-4b39-9d62-38b4a3c4f23a", + Beacon: "weaviate://localhost/BestClass/bd3d1560-3f0e-4b39-9d62-38b4a3c4f23a", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + { + ClassName: "AnAction", + ID: "bd3d1560-3f0e-4b39-9d62-38b4a3c4f23b", + Beacon: "weaviate://localhost/AnAction/bd3d1560-3f0e-4b39-9d62-38b4a3c4f23b", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + }, res) + + assert.Equal(t, 20, vectorSearcher.calledWithLimit, + "uses the default limit if not explicitly set") + }) + + t.Run("nearCustomText with limit and distance set", func(t *testing.T) { + authorizer := mocks.NewMockAuthorizer() + logger, _ := test.NewNullLogger() + vectorSearcher := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(vectorSearcher, log, getFakeModulesProvider(), metrics, defaultConfig) + schemaGetter := &fakeSchemaGetter{} + traverser := NewTraverser(&config.WeaviateConfig{}, logger, authorizer, + vectorSearcher, explorer, schemaGetter, getFakeModulesProvider(), nil, -1) + params := ExploreParams{ + Limit: 100, + NearVector: &searchparams.NearVector{ + Vectors: []models.Vector{[]float32{7.8, 9}}, + Distance: 0.2, + WithDistance: true, + }, + } + vectorSearcher.results = []search.Result{ + { + ClassName: "BestClass", + ID: "123-456-789", + Dist: 0.4, + Dims: 128, + }, + { + ClassName: "AnAction", + ID: "987-654-321", + Dist: 0.4, + Dims: 128, + }, + } + + metrics.On("AddUsageDimensions", "n/a", "explore_graphql", "nearVector", 128) + res, err := traverser.Explore(context.Background(), nil, params) + require.Nil(t, err) + assert.Equal(t, []search.Result{}, res) // certainty not matched + + assert.Equal(t, []float32{7.8, 9}, vectorSearcher.calledWithVector) + assert.Equal(t, 100, vectorSearcher.calledWithLimit, + "uses the default limit if not explicitly set") + }) + + t.Run("nearCustomText with limit and certainty set", func(t *testing.T) { + authorizer := mocks.NewMockAuthorizer() + logger, _ := test.NewNullLogger() + vectorSearcher := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(vectorSearcher, log, getFakeModulesProvider(), metrics, defaultConfig) + schemaGetter := &fakeSchemaGetter{} + traverser := NewTraverser(&config.WeaviateConfig{}, logger, authorizer, + vectorSearcher, explorer, schemaGetter, getFakeModulesProvider(), nil, -1) + params := ExploreParams{ + Limit: 100, + NearVector: &searchparams.NearVector{ + Vectors: []models.Vector{[]float32{7.8, 9}}, + Certainty: 0.8, + }, + } + vectorSearcher.results = []search.Result{ + { + ClassName: "BestClass", + ID: "123-456-789", + Dims: 128, + }, + { + ClassName: "AnAction", + ID: "987-654-321", + Dims: 128, + }, + } + + metrics.On("AddUsageDimensions", "n/a", "explore_graphql", "nearVector", 128) + res, err := traverser.Explore(context.Background(), nil, params) + require.Nil(t, err) + assert.Equal(t, []search.Result{}, res) // certainty not matched + + assert.Equal(t, []float32{7.8, 9}, vectorSearcher.calledWithVector) + assert.Equal(t, 100, vectorSearcher.calledWithLimit, + "uses the default limit if not explicitly set") + }) + + t.Run("nearCustomText with minimum distance set to 0.4", func(t *testing.T) { + authorizer := mocks.NewMockAuthorizer() + logger, _ := test.NewNullLogger() + vectorSearcher := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(vectorSearcher, log, getFakeModulesProvider(), metrics, defaultConfig) + schemaGetter := &fakeSchemaGetter{} + traverser := NewTraverser(&config.WeaviateConfig{}, logger, authorizer, + vectorSearcher, explorer, schemaGetter, getFakeModulesProvider(), nil, -1) + params := ExploreParams{ + ModuleParams: map[string]interface{}{ + "nearCustomText": extractNearCustomTextParam(map[string]interface{}{ + "concepts": []interface{}{"a search term", "another"}, + "distance": float64(0.4), + }), + }, + } + vectorSearcher.results = []search.Result{} + + res, err := traverser.Explore(context.Background(), nil, params) + require.Nil(t, err) + assert.Equal(t, []search.Result{}, res, "empty result because distance is not met") + assert.Equal(t, []float32{1, 2, 3}, vectorSearcher.calledWithVector) + assert.Equal(t, 20, vectorSearcher.calledWithLimit, + "uses the default limit if not explicitly set") + }) + + t.Run("nearCustomText with minimum certainty set to 0.6", func(t *testing.T) { + authorizer := mocks.NewMockAuthorizer() + logger, _ := test.NewNullLogger() + vectorSearcher := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(vectorSearcher, log, getFakeModulesProvider(), metrics, defaultConfig) + schemaGetter := &fakeSchemaGetter{} + traverser := NewTraverser(&config.WeaviateConfig{}, logger, authorizer, + vectorSearcher, explorer, schemaGetter, getFakeModulesProvider(), nil, -1) + params := ExploreParams{ + ModuleParams: map[string]interface{}{ + "nearCustomText": extractNearCustomTextParam(map[string]interface{}{ + "concepts": []interface{}{"a search term", "another"}, + "certainty": float64(0.6), + }), + }, + } + vectorSearcher.results = []search.Result{ + { + ClassName: "BestClass", + ID: "123-456-789", + Dims: 128, + }, + { + ClassName: "AnAction", + ID: "987-654-321", + Dims: 128, + }, + } + + metrics.On("AddUsageDimensions", "n/a", "explore_graphql", "nearCustomText", 128) + res, err := traverser.Explore(context.Background(), nil, params) + require.Nil(t, err) + assert.Equal(t, []search.Result{}, res, "empty result because certainty is not met") + assert.Equal(t, []float32{1, 2, 3}, vectorSearcher.calledWithVector) + assert.Equal(t, 20, vectorSearcher.calledWithLimit, + "uses the default limit if not explicitly set") + }) + + t.Run("near text with movements set", func(t *testing.T) { + authorizer := mocks.NewMockAuthorizer() + logger, _ := test.NewNullLogger() + vectorSearcher := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(vectorSearcher, log, getFakeModulesProvider(), metrics, defaultConfig) + schemaGetter := &fakeSchemaGetter{} + traverser := NewTraverser(&config.WeaviateConfig{}, logger, authorizer, + vectorSearcher, explorer, schemaGetter, getFakeModulesProvider(), nil, -1) + params := ExploreParams{ + Limit: 100, + ModuleParams: map[string]interface{}{ + "nearCustomText": extractNearCustomTextParam(map[string]interface{}{ + "concepts": []interface{}{"a search term", "another"}, + "moveTo": map[string]interface{}{ + "concepts": []interface{}{"foo"}, + "force": float64(0.7), + }, + "moveAwayFrom": map[string]interface{}{ + "concepts": []interface{}{"bar"}, + "force": float64(0.7), + }, + }), + }, + } + vectorSearcher.results = []search.Result{ + { + ClassName: "BestClass", + ID: "123-456-789", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + { + ClassName: "AnAction", + ID: "987-654-321", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + } + + metrics.On("AddUsageDimensions", "n/a", "explore_graphql", "nearCustomText", 128) + res, err := traverser.Explore(context.Background(), nil, params) + require.Nil(t, err) + assert.Equal(t, []search.Result{ + { + ClassName: "BestClass", + ID: "123-456-789", + Beacon: "weaviate://localhost/BestClass/123-456-789", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + { + ClassName: "AnAction", + ID: "987-654-321", + Beacon: "weaviate://localhost/AnAction/987-654-321", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + }, res) + + // see dummy implementation of MoveTo and MoveAway for why the vector should + // be the way it is + assert.Equal(t, []float32{1.5, 2.5, 3.5}, vectorSearcher.calledWithVector) + assert.Equal(t, 100, vectorSearcher.calledWithLimit, + "limit explicitly set") + }) + + t.Run("near text with movements and objects set", func(t *testing.T) { + authorizer := mocks.NewMockAuthorizer() + logger, _ := test.NewNullLogger() + vectorSearcher := &fakeVectorSearcher{} + log, _ := test.NewNullLogger() + metrics := &fakeMetrics{} + explorer := NewExplorer(vectorSearcher, log, getFakeModulesProvider(), metrics, defaultConfig) + schemaGetter := &fakeSchemaGetter{} + traverser := NewTraverser(&config.WeaviateConfig{}, logger, authorizer, + vectorSearcher, explorer, schemaGetter, getFakeModulesProvider(), nil, -1) + + params := ExploreParams{ + Limit: 100, + ModuleParams: map[string]interface{}{ + "nearCustomText": extractNearCustomTextParam(map[string]interface{}{ + "concepts": []interface{}{"a search term", "another"}, + "moveTo": map[string]interface{}{ + "concepts": []interface{}{"foo"}, + "force": float64(0.7), + "objects": []interface{}{ + map[string]interface{}{ + "id": "e9c12c22-766f-4bde-b140-d4cf8fd6e041", + }, + }, + }, + "moveAwayFrom": map[string]interface{}{ + "concepts": []interface{}{"bar"}, + "force": float64(0.7), + "objects": []interface{}{ + map[string]interface{}{ + "id": "e9c12c22-766f-4bde-b140-d4cf8fd6e042", + }, + map[string]interface{}{ + "beacon": "weaviate://localhost/e9c12c22-766f-4bde-b140-d4cf8fd6e043", + }, + map[string]interface{}{ + "beacon": "weaviate://localhost/e9c12c22-766f-4bde-b140-d4cf8fd6e044", + }, + }, + }, + }), + }, + } + vectorSearcher.results = []search.Result{ + { + ClassName: "BestClass", + ID: "123-456-789", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + { + ClassName: "AnAction", + ID: "987-654-321", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + } + searchRes1 := search.Result{ + ClassName: "BestClass", + ID: "e9c12c22-766f-4bde-b140-d4cf8fd6e041", + Dims: 128, + } + searchRes2 := search.Result{ + ClassName: "BestClass", + ID: "e9c12c22-766f-4bde-b140-d4cf8fd6e042", + Dims: 128, + } + searchRes3 := search.Result{ + ClassName: "BestClass", + ID: "e9c12c22-766f-4bde-b140-d4cf8fd6e043", + Dims: 128, + } + searchRes4 := search.Result{ + ClassName: "BestClass", + ID: "e9c12c22-766f-4bde-b140-d4cf8fd6e044", + Dims: 128, + } + + vectorSearcher. + On("ObjectByID", strfmt.UUID("e9c12c22-766f-4bde-b140-d4cf8fd6e041")). + Return(&searchRes1, nil) + vectorSearcher. + On("ObjectByID", strfmt.UUID("e9c12c22-766f-4bde-b140-d4cf8fd6e042")). + Return(&searchRes2, nil) + vectorSearcher. + On("ObjectByID", strfmt.UUID("e9c12c22-766f-4bde-b140-d4cf8fd6e043")). + Return(&searchRes3, nil) + vectorSearcher. + On("ObjectByID", strfmt.UUID("e9c12c22-766f-4bde-b140-d4cf8fd6e044")). + Return(&searchRes4, nil) + + metrics.On("AddUsageDimensions", "n/a", "explore_graphql", "nearCustomText", 128) + res, err := traverser.Explore(context.Background(), nil, params) + require.Nil(t, err) + assert.Equal(t, []search.Result{ + { + ClassName: "BestClass", + ID: "123-456-789", + Beacon: "weaviate://localhost/BestClass/123-456-789", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + { + ClassName: "AnAction", + ID: "987-654-321", + Beacon: "weaviate://localhost/AnAction/987-654-321", + Certainty: 0.5, + Dist: 0.5, + Dims: 128, + }, + }, res) + + // see dummy implementation of MoveTo and MoveAway for why the vector should + // be the way it is + assert.Equal(t, []float32{1.5, 2.5, 3.5}, vectorSearcher.calledWithVector) + assert.Equal(t, 100, vectorSearcher.calledWithLimit, + "limit explicitly set") + }) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_get.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_get.go new file mode 100644 index 0000000000000000000000000000000000000000..dc735031d3e99db01abfd85c0d8355716b695ad8 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_get.go @@ -0,0 +1,116 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +import ( + "context" + "fmt" + "time" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/filters" + + "github.com/weaviate/weaviate/entities/dto" + enterrors "github.com/weaviate/weaviate/entities/errors" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/usecases/auth/authorization" +) + +func (t *Traverser) GetClass(ctx context.Context, principal *models.Principal, + params dto.GetParams, +) ([]interface{}, error) { + before := time.Now() + + ok := t.ratelimiter.TryInc() + if !ok { + // we currently have no concept of error status code or typed errors in + // GraphQL, so there is no other way then to send a message containing what + // we want to convey + return nil, enterrors.NewErrRateLimit() + } + + defer t.ratelimiter.Dec() + + t.metrics.QueriesGetInc(params.ClassName) + defer t.metrics.QueriesGetDec(params.ClassName) + defer t.metrics.QueriesObserveDuration(params.ClassName, before.UnixMilli()) + + if err := t.probeForRefDepthLimit(params.Properties); err != nil { + return nil, err + } + + // validate here, because filters can contain references that need to be authorized + if err := t.validateFilters(ctx, principal, params.Filters); err != nil { + return nil, errors.Wrap(err, "invalid 'where' filter") + } + + certainty := ExtractCertaintyFromParams(params) + if certainty != 0 || params.AdditionalProperties.Certainty { + // if certainty is provided as input, we must ensure + // that the vector index is configured to use cosine + // distance + if err := t.validateGetDistanceParams(params); err != nil { + return nil, err + } + } + + return t.explorer.GetClass(ctx, params) +} + +// probeForRefDepthLimit checks to ensure reference nesting depth doesn't exceed the limit +// provided by QUERY_CROSS_REFERENCE_DEPTH_LIMIT +func (t *Traverser) probeForRefDepthLimit(props search.SelectProperties) error { + var ( + determineDepth func(props search.SelectProperties, currDepth int) int + depthLimit = t.config.Config.QueryCrossReferenceDepthLimit + ) + + determineDepth = func(props search.SelectProperties, currDepth int) int { + if len(props) == 0 || currDepth > depthLimit { + return 0 + } + + currDepth++ + maxDepth := 0 + for _, prop := range props { + for _, refTarget := range prop.Refs { + maxDepth = max(maxDepth, determineDepth(refTarget.RefProperties, currDepth)) + } + } + + return maxDepth + 1 + } + + depth := determineDepth(props, 0) + if depth > depthLimit { + return fmt.Errorf("nested references depth exceeds QUERY_CROSS_REFERENCE_DEPTH_LIMIT (%d)", depthLimit) + } + return nil +} + +func (t *Traverser) validateFilters(ctx context.Context, principal *models.Principal, filter *filters.LocalFilter) error { + if filter == nil { + return nil + } + + f := func(name string) (*models.Class, error) { + err := t.authorizer.Authorize(ctx, principal, authorization.READ, authorization.CollectionsMetadata(name)...) + if err != nil { + return nil, err + } + + return t.schemaGetter.ReadOnlyClass(name), nil + } + + return filters.ValidateFilters(f, filter) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_get_params_test.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_get_params_test.go new file mode 100644 index 0000000000000000000000000000000000000000..7569a2800604a315fa876e736d85f5f7ef0cf3f1 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_get_params_test.go @@ -0,0 +1,747 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +import ( + "context" + "fmt" + "strings" + "testing" + + "github.com/pkg/errors" + "github.com/stretchr/testify/mock" + "github.com/weaviate/weaviate/entities/dto" + "github.com/weaviate/weaviate/entities/filters" + "github.com/weaviate/weaviate/entities/models" + "github.com/weaviate/weaviate/entities/schema" + "github.com/weaviate/weaviate/entities/searchparams" + + logrus "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/weaviate/weaviate/entities/search" + "github.com/weaviate/weaviate/usecases/auth/authorization/mocks" + "github.com/weaviate/weaviate/usecases/config" +) + +func TestGetParams(t *testing.T) { + t.Run("without any select properties", func(t *testing.T) { + sp := search.SelectProperties{} + assert.Equal(t, false, sp.HasRefs(), "indicates no refs are present") + }) + + t.Run("with only primitive select properties", func(t *testing.T) { + sp := search.SelectProperties{ + search.SelectProperty{ + IsPrimitive: true, + Name: "Foo", + }, + search.SelectProperty{ + IsPrimitive: true, + Name: "Bar", + }, + } + + assert.Equal(t, false, sp.HasRefs(), "indicates no refs are present") + + resolve, err := sp.ShouldResolve([]string{"inCountry", "Country"}) + require.Nil(t, err) + assert.Equal(t, false, resolve) + }) + + t.Run("with a ref prop", func(t *testing.T) { + sp := search.SelectProperties{ + search.SelectProperty{ + IsPrimitive: true, + Name: "name", + }, + search.SelectProperty{ + IsPrimitive: false, + Name: "inCity", + Refs: []search.SelectClass{ + { + ClassName: "City", + RefProperties: search.SelectProperties{ + search.SelectProperty{ + Name: "name", + IsPrimitive: true, + }, + search.SelectProperty{ + Name: "inCountry", + IsPrimitive: false, + Refs: []search.SelectClass{ + { + ClassName: "Country", + RefProperties: search.SelectProperties{ + search.SelectProperty{ + Name: "name", + IsPrimitive: true, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + t.Run("checking for refs", func(t *testing.T) { + assert.Equal(t, true, sp.HasRefs(), "indicates refs are present") + }) + + t.Run("checking valid single level ref", func(t *testing.T) { + resolve, err := sp.ShouldResolve([]string{"inCity", "City"}) + require.Nil(t, err) + assert.Equal(t, true, resolve) + }) + + t.Run("checking invalid single level ref", func(t *testing.T) { + resolve, err := sp.ShouldResolve([]string{"inCity", "Town"}) + require.Nil(t, err) + assert.Equal(t, false, resolve) + }) + + t.Run("checking valid nested ref", func(t *testing.T) { + resolve, err := sp.ShouldResolve([]string{"inCity", "City", "inCountry", "Country"}) + require.Nil(t, err) + assert.Equal(t, true, resolve) + }) + + t.Run("checking invalid nested level refs", func(t *testing.T) { + resolve, err := sp.ShouldResolve([]string{"inCity", "Town", "inCountry", "Country"}) + require.Nil(t, err) + assert.Equal(t, false, resolve) + + resolve, err = sp.ShouldResolve([]string{"inCity", "City", "inCountry", "Land"}) + require.Nil(t, err) + assert.Equal(t, false, resolve) + }) + + t.Run("selecting a specific prop", func(t *testing.T) { + prop := sp.FindProperty("inCity") + assert.Equal(t, prop, &sp[1]) + }) + }) +} + +func TestGet_NestedRefDepthLimit(t *testing.T) { + type testcase struct { + name string + props search.SelectProperties + maxDepth int + expectedErr string + } + + makeNestedRefProps := func(depth int) search.SelectProperties { + root := search.SelectProperties{} + next := &root + for i := 0; i < depth; i++ { + *next = append(*next, + search.SelectProperty{Name: "nextNode"}, + search.SelectProperty{Name: "otherRef"}, + ) + class0 := search.SelectClass{ClassName: "LinkedListNode"} + refs0 := []search.SelectClass{class0} + (*next)[0].Refs = refs0 + class1 := search.SelectClass{ClassName: "LinkedListNode"} + refs1 := []search.SelectClass{class1} + (*next)[1].Refs = refs1 + next = &refs0[0].RefProperties + } + return root + } + + newTraverser := func(depth int) *Traverser { + logger, _ := logrus.NewNullLogger() + schemaGetter := &fakeSchemaGetter{aggregateTestSchema} + cfg := config.WeaviateConfig{ + Config: config.Config{ + QueryCrossReferenceDepthLimit: depth, + }, + } + return NewTraverser(&cfg, logger, mocks.NewMockAuthorizer(), + &fakeVectorRepo{}, &fakeExplorer{}, schemaGetter, nil, nil, -1) + } + + tests := []testcase{ + { + name: "succeed with explicitly set low depth limit", + maxDepth: 5, + props: makeNestedRefProps(5), + }, + { + name: "fail with explicitly set low depth limit", + maxDepth: 5, + props: makeNestedRefProps(6), + expectedErr: "nested references depth exceeds QUERY_CROSS_REFERENCE_DEPTH_LIMIT (5)", + }, + { + name: "succeed with explicitly set high depth limit", + maxDepth: 500, + props: makeNestedRefProps(500), + }, + { + name: "fail with explicitly set high depth limit", + maxDepth: 500, + props: makeNestedRefProps(501), + expectedErr: "nested references depth exceeds QUERY_CROSS_REFERENCE_DEPTH_LIMIT (500)", + }, + { + name: "fail with explicitly set low depth limit, but high actual depth", + maxDepth: 10, + props: makeNestedRefProps(5000), + expectedErr: "nested references depth exceeds QUERY_CROSS_REFERENCE_DEPTH_LIMIT (10)", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + if test.maxDepth == 0 { + t.Fatalf("must provide maxDepth param for test %q", test.name) + } + traverser := newTraverser(test.maxDepth) + err := traverser.probeForRefDepthLimit(test.props) + if test.expectedErr != "" { + assert.EqualError(t, err, test.expectedErr) + } else { + assert.Nil(t, err) + } + }) + } +} + +func Test_GetClass_WithFilters(t *testing.T) { + valueNameFromDataType := func(dt schema.DataType) string { + return "value" + strings.ToUpper(string(dt[0])) + string(dt[1:]) + } + type test struct { + name string + filters *filters.LocalFilter + expectedError error + } + buildInvalidTests := func(op filters.Operator, path []interface{}, + correctDt schema.DataType, dts []schema.DataType, value interface{}, + ) []test { + out := make([]test, len(dts)) + for i, dt := range dts { + useInstead := correctDt + if baseType, ok := schema.IsArrayType(correctDt); ok { + useInstead = baseType + } + + out[i] = test{ + name: fmt.Sprintf("invalid %s filter - using %s", correctDt, dt), + filters: buildFilter(op, path, dt, value), + expectedError: errors.Errorf("invalid 'where' filter: data type filter cannot use"+ + " \"%s\" on type \"%s\", use \"%s\" instead", + valueNameFromDataType(dt), + correctDt, + valueNameFromDataType(useInstead), + ), + } + } + + return out + } + + newTraverser := func() *Traverser { + logger, _ := logrus.NewNullLogger() + schemaGetter := &fakeSchemaGetter{schemaForFiltersValidation()} + cfg := config.WeaviateConfig{} + return NewTraverser(&cfg, logger, mocks.NewMockAuthorizer(), + &fakeVectorRepo{}, &fakeExplorer{}, schemaGetter, nil, nil, -1) + } + + buildInvalidRefCountTests := func(op filters.Operator, path []interface{}, + correctDt schema.DataType, dts []schema.DataType, value interface{}, + ) []test { + out := make([]test, len(dts)) + for i, dt := range dts { + out[i] = test{ + name: fmt.Sprintf("invalid %s filter - using %s", correctDt, dt), + filters: buildFilter(op, path, dt, value), + expectedError: errors.Errorf("invalid 'where' filter: "+ + "Property %q is a ref prop to the class %q. Only "+ + "\"valueInt\" can be used on a ref prop directly to count the number of refs. "+ + "Or did you mean to filter on a primitive prop of the referenced class? "+ + "In this case make sure your path contains 3 elements in the form of "+ + "[, , ]", + path[0], "ClassTwo"), + } + } + + return out + } + + buildInvalidNestedTests := func(op filters.Operator, path []interface{}, + correctDt schema.DataType, dts []schema.DataType, value interface{}, + ) []test { + out := make([]test, len(dts)) + for i, dt := range dts { + useInstead := correctDt + if baseType, ok := schema.IsArrayType(correctDt); ok { + useInstead = baseType + } + + out[i] = test{ + name: fmt.Sprintf("invalid %s filter - using %s", correctDt, dt), + filters: buildNestedFilter(filters.OperatorAnd, + // valid operand + buildFilter(op, path, correctDt, value), + // invalid operand + buildFilter(op, path, dt, value), + ), + expectedError: errors.Errorf("invalid 'where' filter: child operand at "+ + "position 1: data type filter cannot use"+ + " \"%s\" on type \"%s\", use \"%s\" instead", + valueNameFromDataType(dt), + correctDt, + valueNameFromDataType(useInstead), + ), + } + } + + return out + } + + tests := [][]test{ + { + { + name: "without filter", + expectedError: nil, + }, + }, + + // single level, primitive props + arrays + { + { + name: "valid text search", + filters: buildFilter(filters.OperatorEqual, []interface{}{"text_prop"}, + schema.DataTypeText, "foo"), + expectedError: nil, + }, + }, + buildInvalidTests(filters.OperatorEqual, []interface{}{"text_prop"}, + schema.DataTypeText, allValueTypesExcept(schema.DataTypeText, schema.DataTypeString), "foo"), + { + { + name: "valid text array search", + filters: buildFilter(filters.OperatorEqual, []interface{}{"text_array_prop"}, + schema.DataTypeText, "foo"), + expectedError: nil, + }, + }, + buildInvalidTests(filters.OperatorEqual, []interface{}{"text_array_prop"}, + schema.DataTypeTextArray, allValueTypesExcept(schema.DataTypeText, schema.DataTypeString), "foo"), + { + { + name: "valid number search", + filters: buildFilter(filters.OperatorEqual, []interface{}{"number_prop"}, + schema.DataTypeNumber, "foo"), + expectedError: nil, + }, + }, + buildInvalidTests(filters.OperatorEqual, []interface{}{"number_prop"}, + schema.DataTypeNumber, allValueTypesExcept(schema.DataTypeNumber), "foo"), + { + { + name: "valid number array search", + filters: buildFilter(filters.OperatorEqual, []interface{}{"number_array_prop"}, + schema.DataTypeNumber, "foo"), + expectedError: nil, + }, + }, + buildInvalidTests(filters.OperatorEqual, []interface{}{"number_array_prop"}, + schema.DataTypeNumberArray, allValueTypesExcept(schema.DataTypeNumber), "foo"), + { + { + name: "valid int search", + filters: buildFilter(filters.OperatorEqual, []interface{}{"int_prop"}, + schema.DataTypeInt, "foo"), + expectedError: nil, + }, + }, + buildInvalidTests(filters.OperatorEqual, []interface{}{"int_prop"}, + schema.DataTypeInt, allValueTypesExcept(schema.DataTypeInt), "foo"), + { + { + name: "valid int array search", + filters: buildFilter(filters.OperatorEqual, []interface{}{"int_array_prop"}, + schema.DataTypeInt, "foo"), + expectedError: nil, + }, + }, + buildInvalidTests(filters.OperatorEqual, []interface{}{"int_array_prop"}, + schema.DataTypeIntArray, allValueTypesExcept(schema.DataTypeInt), "foo"), + { + { + name: "valid boolean search", + filters: buildFilter(filters.OperatorEqual, []interface{}{"boolean_prop"}, + schema.DataTypeBoolean, "foo"), + expectedError: nil, + }, + }, + buildInvalidTests(filters.OperatorEqual, []interface{}{"boolean_prop"}, + schema.DataTypeBoolean, allValueTypesExcept(schema.DataTypeBoolean), "foo"), + { + { + name: "valid boolean array search", + filters: buildFilter(filters.OperatorEqual, []interface{}{"boolean_array_prop"}, + schema.DataTypeBoolean, "foo"), + expectedError: nil, + }, + }, + buildInvalidTests(filters.OperatorEqual, []interface{}{"boolean_array_prop"}, + schema.DataTypeBooleanArray, allValueTypesExcept(schema.DataTypeBoolean), "foo"), + { + { + name: "valid date search", + filters: buildFilter(filters.OperatorEqual, []interface{}{"date_prop"}, + schema.DataTypeDate, "foo"), + expectedError: nil, + }, + }, + buildInvalidTests(filters.OperatorEqual, []interface{}{"date_prop"}, + schema.DataTypeDate, allValueTypesExcept(schema.DataTypeDate), "foo"), + { + { + name: "valid date array search", + filters: buildFilter(filters.OperatorEqual, []interface{}{"date_array_prop"}, + schema.DataTypeDate, "foo"), + expectedError: nil, + }, + }, + buildInvalidTests(filters.OperatorEqual, []interface{}{"date_array_prop"}, + schema.DataTypeDateArray, allValueTypesExcept(schema.DataTypeDate), "foo"), + { + { + name: "valid geoCoordinates search", + filters: buildFilter(filters.OperatorEqual, []interface{}{"geo_prop"}, + schema.DataTypeGeoCoordinates, "foo"), + expectedError: nil, + }, + }, + buildInvalidTests(filters.OperatorEqual, []interface{}{"geo_prop"}, + schema.DataTypeGeoCoordinates, allValueTypesExcept(schema.DataTypeGeoCoordinates), "foo"), + { + { + name: "valid phoneNumber search", + filters: buildFilter(filters.OperatorEqual, []interface{}{"phone_prop"}, + schema.DataTypePhoneNumber, "foo"), + expectedError: nil, + }, + }, + buildInvalidTests(filters.OperatorEqual, []interface{}{"phone_prop"}, + schema.DataTypePhoneNumber, allValueTypesExcept(schema.DataTypePhoneNumber), "foo"), + + // nested filters + { + { + name: "valid nested filter", + filters: buildNestedFilter(filters.OperatorAnd, + buildFilter(filters.OperatorEqual, []interface{}{"text_prop"}, + schema.DataTypeText, "foo"), + buildFilter(filters.OperatorEqual, []interface{}{"int_prop"}, + schema.DataTypeInt, "foo"), + ), + expectedError: nil, + }, + }, + buildInvalidNestedTests(filters.OperatorEqual, []interface{}{"text_prop"}, + schema.DataTypeText, allValueTypesExcept(schema.DataTypeText, schema.DataTypeString), "foo"), + + // cross-ref filters + { + { + name: "valid ref filter", + filters: buildFilter(filters.OperatorEqual, []interface{}{"ref_prop", "ClassTwo", "text_prop"}, + schema.DataTypeText, "foo"), + expectedError: nil, + }, + }, + buildInvalidTests(filters.OperatorEqual, []interface{}{"text_prop", "ClassTwo", "text_prop"}, + schema.DataTypeText, allValueTypesExcept(schema.DataTypeText, schema.DataTypeString), "foo"), + { + { + name: "invalid ref filter, due to non-existing class", + filters: buildFilter(filters.OperatorEqual, []interface{}{"ref_prop", "ClassThree", "text_prop"}, + schema.DataTypeText, "foo"), + expectedError: errors.Errorf("invalid 'where' filter: class " + + "\"ClassThree\" does not exist in schema"), + }, + { + name: "invalid ref filter, due to non-existing prop on ref", + filters: buildFilter(filters.OperatorEqual, []interface{}{"ref_prop", "ClassTwo", "invalid_prop"}, + schema.DataTypeText, "foo"), + expectedError: errors.Errorf("invalid 'where' filter: no such prop with name 'invalid_prop' " + + "found in class 'ClassTwo' " + + "in the schema. Check your schema files for which properties in this class are available"), + }, + }, + { + { + name: "counting ref props", + filters: buildFilter(filters.OperatorEqual, []interface{}{"ref_prop"}, + schema.DataTypeInt, "foo"), + expectedError: nil, + }, + }, + + // special case, trying to use filters on a ref prop directly + buildInvalidRefCountTests(filters.OperatorEqual, []interface{}{"ref_prop"}, + schema.DataTypeInt, allValueTypesExcept(schema.DataTypeInt), "foo"), + + // id filters + { + { + name: "filter by id", + filters: buildFilter(filters.OperatorEqual, []interface{}{"id"}, + schema.DataTypeText, "foo"), + expectedError: nil, + }, + { + name: "filter by id with wrong type", + filters: buildFilter(filters.OperatorEqual, []interface{}{"id"}, + schema.DataTypeInt, "foo"), + expectedError: errors.Errorf( + "invalid 'where' filter: using [\"_id\"] to filter by uuid: " + + "must use \"valueText\" to specify the id"), + }, + }, + + // string and stringArray are deprecated as of v1.19 + // however they are allowed in filters and considered aliases + // for text and textArray + { + { + name: "[deprecated string] valid text search", + filters: buildFilter(filters.OperatorEqual, []interface{}{"text_prop"}, + schema.DataTypeString, "foo"), + expectedError: nil, + }, + { + name: "[deprecated string] valid text array search", + filters: buildFilter(filters.OperatorEqual, []interface{}{"text_array_prop"}, + schema.DataTypeString, "foo"), + expectedError: nil, + }, + { + name: "[deprecated string] valid nested filter", + filters: buildNestedFilter(filters.OperatorAnd, + buildFilter(filters.OperatorEqual, []interface{}{"text_prop"}, + schema.DataTypeString, "foo"), + buildFilter(filters.OperatorEqual, []interface{}{"int_prop"}, + schema.DataTypeInt, "foo"), + ), + expectedError: nil, + }, + { + name: "[deprecated string] valid ref filter", + filters: buildFilter(filters.OperatorEqual, []interface{}{"ref_prop", "ClassTwo", "text_prop"}, + schema.DataTypeString, "foo"), + expectedError: nil, + }, + }, + } + + for _, outertest := range tests { + for _, test := range outertest { + t.Run(test.name, func(t *testing.T) { + params := dto.GetParams{ + ClassName: "ClassOne", + NearVector: &searchparams.NearVector{ + Vectors: []models.Vector{[]float32{0.8, 0.2, 0.7}}, + }, + Pagination: &filters.Pagination{Limit: 100}, + Filters: test.filters, + } + + //searchResults := []search.Result{ + // { + // ID: "id1", + // Schema: map[string]interface{}{ + // "name": "Foo", + // }, + // }, + //} + + // search := &fakeVectorSearcher{} + metrics := &fakeMetrics{} + metrics.On("AddUsageDimensions", mock.Anything, mock.Anything, mock.Anything, + mock.Anything) + traverser := newTraverser() + + if test.expectedError == nil { + // search. + // On("VectorSearch", mock.Anything, mock.Anything). + // Return(searchResults, nil) + + _, err := traverser.GetClass(context.Background(), nil, params) + assert.Nil(t, err) + + } else { + _, err := traverser.GetClass(context.Background(), nil, params) + require.NotNil(t, err) + assert.Equal(t, test.expectedError.Error(), err.Error()) + } + }) + } + } +} + +// produces two classes including a cross-ref between them. Contains all +// possible prop types. +func schemaForFiltersValidation() schema.Schema { + return schema.Schema{ + Objects: &models.Schema{ + Classes: []*models.Class{ + { + Class: "ClassOne", + Properties: []*models.Property{ + { + Name: "text_prop", + DataType: schema.DataTypeText.PropString(), + }, + { + Name: "text_array_prop", + DataType: schema.DataTypeTextArray.PropString(), + }, + { + Name: "number_prop", + DataType: []string{string(schema.DataTypeNumber)}, + }, + { + Name: "int_prop", + DataType: []string{string(schema.DataTypeInt)}, + }, + { + Name: "number_array_prop", + DataType: []string{string(schema.DataTypeNumberArray)}, + }, + { + Name: "int_array_prop", + DataType: []string{string(schema.DataTypeIntArray)}, + }, + { + Name: "boolean_prop", + DataType: []string{string(schema.DataTypeBoolean)}, + }, + { + Name: "boolean_array_prop", + DataType: []string{string(schema.DataTypeBooleanArray)}, + }, + { + Name: "date_prop", + DataType: []string{string(schema.DataTypeDate)}, + }, + { + Name: "date_array_prop", + DataType: []string{string(schema.DataTypeDateArray)}, + }, + { + Name: "blob_prop", + DataType: []string{string(schema.DataTypeBlob)}, + }, + { + Name: "geo_prop", + DataType: []string{string(schema.DataTypeGeoCoordinates)}, + }, + { + Name: "phone_prop", + DataType: []string{string(schema.DataTypePhoneNumber)}, + }, + { + Name: "ref_prop", + DataType: []string{"ClassTwo"}, + }, + }, + }, + { + Class: "ClassTwo", + Properties: []*models.Property{ + { + Name: "text_prop", + DataType: schema.DataTypeText.PropString(), + }, + }, + }, + }, + }, + } +} + +func buildFilter(op filters.Operator, path []interface{}, dataType schema.DataType, + value interface{}, +) *filters.LocalFilter { + pathParsed, err := filters.ParsePath(path, "ClassOne") + if err != nil { + panic(err) + } + return &filters.LocalFilter{ + Root: &filters.Clause{ + Operator: op, + On: pathParsed, + Value: &filters.Value{ + Value: value, + Type: dataType, + }, + }, + } +} + +func buildNestedFilter(op filters.Operator, + childFilters ...*filters.LocalFilter, +) *filters.LocalFilter { + out := &filters.LocalFilter{ + Root: &filters.Clause{ + Operator: op, + Operands: make([]filters.Clause, len(childFilters)), + }, + } + + for i, child := range childFilters { + out.Root.Operands[i] = *child.Root + } + + return out +} + +func allValueTypesExcept(except ...schema.DataType) []schema.DataType { + all := []schema.DataType{ + schema.DataTypeString, + schema.DataTypeText, + schema.DataTypeInt, + schema.DataTypeNumber, + schema.DataTypeGeoCoordinates, + schema.DataTypePhoneNumber, + schema.DataTypeBoolean, + schema.DataTypeDate, + } + + out := make([]schema.DataType, 0, len(all)) + + i := 0 +outer: + for _, dt := range all { + for _, exc := range except { + if dt == exc { + continue outer + } + } + out = append(out, dt) + i++ + } + + return out[:i] +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_schema_search_params.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_schema_search_params.go new file mode 100644 index 0000000000000000000000000000000000000000..d9ccaaa0c4a80711f9a1c16cfe23d7167db2c0b5 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_schema_search_params.go @@ -0,0 +1,68 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +import ( + "fmt" +) + +// TODO: is this still used? + +// SearchType to search for either class names or property names +type SearchType string + +const ( + // SearchTypeClass to search the contextionary for class names + SearchTypeClass SearchType = "class" + // SearchTypeProperty to search the contextionary for property names + SearchTypeProperty SearchType = "property" +) + +// SearchParams to be used for a SchemaSearch. See individual properties for +// additional documentation on what they do +type SearchParams struct { + // SearchType can be SearchTypeClass or SearchTypeProperty + SearchType SearchType + + // Name is the string-representation of the class or property name + Name string + + // Certainty must be a value between 0 and 1. The higher it is the narrower + // is the search, the lower it is, the wider the search is + Certainty float32 +} + +// Validate the feasibility of the specified arguments +func (p SearchParams) Validate() error { + if p.Name == "" { + return fmt.Errorf("Name cannot be empty") + } + + if err := p.validateCertaintyOrWeight(p.Certainty); err != nil { + return fmt.Errorf("invalid Certainty: %w", err) + } + + if p.SearchType != SearchTypeClass && p.SearchType != SearchTypeProperty { + return fmt.Errorf( + "SearchType must be SearchTypeClass or SearchTypeProperty, but got '%s'", p.SearchType) + } + + return nil +} + +func (p SearchParams) validateCertaintyOrWeight(c float32) error { + if c >= 0 && c <= 1 { + return nil + } + + return fmt.Errorf("must be between 0 and 1, but got '%f'", c) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_schema_search_params_test.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_schema_search_params_test.go new file mode 100644 index 0000000000000000000000000000000000000000..742486c4966ccaa83de2ba56d28a55225e8dd429 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_schema_search_params_test.go @@ -0,0 +1,112 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +type schemaSearchTest struct { + name string + searchParams SearchParams + expectedError error +} + +type schemaSearchTests []schemaSearchTest + +func Test__SchemaSearch_Validation(t *testing.T) { + tests := schemaSearchTests{ + { + name: "valid params", + searchParams: SearchParams{ + SearchType: SearchTypeClass, + Name: "foo", + Certainty: 1.0, + }, + expectedError: nil, + }, + { + name: "missing search name", + searchParams: SearchParams{ + SearchType: SearchTypeClass, + Name: "", + Certainty: 0.0, + }, + expectedError: errors.New("Name cannot be empty"), + }, + { + name: "certainty too low", + searchParams: SearchParams{ + SearchType: SearchTypeClass, + Name: "bestName", + Certainty: -4, + }, + expectedError: errors.New("invalid Certainty: must be between 0 and 1, but got '-4.000000'"), + }, + { + name: "certainty too high", + searchParams: SearchParams{ + SearchType: SearchTypeClass, + Name: "bestName", + Certainty: 4, + }, + expectedError: errors.New("invalid Certainty: must be between 0 and 1, but got '4.000000'"), + }, + { + name: "invalid search type", + searchParams: SearchParams{ + SearchType: SearchType("invalid"), + Name: "bestName", + Certainty: 0.5, + }, + expectedError: errors.New("SearchType must be SearchTypeClass or SearchTypeProperty, but got 'invalid'"), + }, + { + name: "missing kind on class search", + searchParams: SearchParams{ + SearchType: SearchTypeClass, + Name: "bestName", + Certainty: 0.5, + }, + expectedError: nil, + }, + { + name: "valid keywords", + searchParams: SearchParams{ + SearchType: SearchTypeClass, + Name: "foo", + Certainty: 1.0, + }, + expectedError: nil, + }, + } + + tests.AssertValidation(t) +} + +func (s schemaSearchTests) AssertValidation(t *testing.T) { + for _, test := range s { + t.Run(test.name, func(t *testing.T) { + err := test.searchParams.Validate() + + // assert error + if test.expectedError != nil { + assert.Equal(t, test.expectedError.Error(), err.Error(), "should match the expected error") + } else { + assert.Equal(t, test.expectedError, err, "should match the expected error") + } + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_validate_distance_metrics.go b/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_validate_distance_metrics.go new file mode 100644 index 0000000000000000000000000000000000000000..8c148eceed875e3ada75ed4d10464521efb15e1a --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/traverser/traverser_validate_distance_metrics.go @@ -0,0 +1,143 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package traverser + +import ( + "fmt" + "strings" + + "github.com/weaviate/weaviate/entities/schema/configvalidation" + + "github.com/pkg/errors" + "github.com/weaviate/weaviate/entities/dto" + schemaConfig "github.com/weaviate/weaviate/entities/schema/config" + "github.com/weaviate/weaviate/entities/vectorindex/common" +) + +func (t *Traverser) validateExploreDistance(params ExploreParams) error { + targetVectors := t.extractTargetVectors(params) + distType, err := t.validateCrossClassDistanceCompatibility(targetVectors) + if err != nil { + return err + } + + return t.validateExploreDistanceParams(params, distType) +} + +// ensures that all classes are configured with the same distance type. +// if all classes are configured with the same type, said type is returned. +// otherwise an error indicating which classes are configured differently. +func (t *Traverser) validateCrossClassDistanceCompatibility(targetVectors []string) (distType string, err error) { + s := t.schemaGetter.GetSchemaSkipAuth() + if s.Objects == nil { + return common.DefaultDistanceMetric, nil + } + + var ( + // a set used to determine the discrete number + // of vector index distance types used across + // all classes. if more than one type exists, + // a cross-class vector search is not possible + distancerTypes = make(map[string]struct{}) + + // a mapping of class name to vector index distance + // type. used to emit an error if more than one + // distance type is found + classDistanceConfigs = make(map[string]string) + ) + + for _, class := range s.Objects.Classes { + if class == nil { + continue + } + + vectorConfig, assertErr := schemaConfig.TypeAssertVectorIndex(class, targetVectors) + if assertErr != nil { + err = assertErr + return + } + + if len(vectorConfig) == 0 { + err = fmt.Errorf("empty vectorConfig fot %v, %v", class, targetVectors) + } + + distancerTypes[vectorConfig[0].DistanceName()] = struct{}{} + classDistanceConfigs[class.Class] = vectorConfig[0].DistanceName() + } + + if len(distancerTypes) != 1 { + err = crossClassDistCompatError(classDistanceConfigs) + return + } + + // the above check ensures that the + // map only contains one entry + for dt := range distancerTypes { + distType = dt + } + + return +} + +func (t *Traverser) validateExploreDistanceParams(params ExploreParams, distType string) error { + certainty := extractCertaintyFromExploreParams(params) + + if certainty == 0 && !params.WithCertaintyProp { + return nil + } + + if distType != common.DistanceCosine { + return certaintyUnsupportedError(distType) + } + + return nil +} + +func (t *Traverser) validateGetDistanceParams(params dto.GetParams) error { + class := t.schemaGetter.ReadOnlyClass(params.ClassName) + if class == nil { + return fmt.Errorf("failed to find class '%s' in schema", params.ClassName) + } + + targetVectors := t.targetVectorParamHelper.GetTargetVectorsFromParams(params) + if err := configvalidation.CheckCertaintyCompatibility(class, targetVectors); err != nil { + return err + } + + return nil +} + +func (t *Traverser) extractTargetVectors(params ExploreParams) []string { + if params.NearVector != nil { + return params.NearVector.TargetVectors + } + if params.NearObject != nil { + return params.NearObject.TargetVectors + } + return []string{} +} + +func crossClassDistCompatError(classDistanceConfigs map[string]string) error { + errorMsg := "vector search across classes not possible: found different distance metrics:" + for class, dist := range classDistanceConfigs { + errorMsg = fmt.Sprintf("%s class '%s' uses distance metric '%s',", errorMsg, class, dist) + } + errorMsg = strings.TrimSuffix(errorMsg, ",") + + return errors.New(errorMsg) +} + +func certaintyUnsupportedError(distType string) error { + return errors.Errorf( + "can't compute and return certainty when vector index is configured with %s distance", + distType) +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/vectorizer/combine.go b/platform/dbops/binaries/weaviate-src/usecases/vectorizer/combine.go new file mode 100644 index 0000000000000000000000000000000000000000..1eaad5b0a2d8e61ec720388f87a8cfddf1f44a61 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/vectorizer/combine.go @@ -0,0 +1,51 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import "github.com/weaviate/weaviate/entities/dto" + +// CombineVectors combines all of the vector into sum of their parts +func CombineVectors[T dto.Embedding](vectors []T) T { + switch any(vectors).(type) { + case [][]float32: + return any(CombineVectorsWithWeights(any(vectors).([][]float32), nil)).(T) + default: + return nil + } +} + +func CombineVectorsWithWeights(vectors [][]float32, weights []float32) []float32 { + maxVectorLength := 0 + for i := range vectors { + if len(vectors[i]) > maxVectorLength { + maxVectorLength = len(vectors[i]) + } + } + sums := make([]float32, maxVectorLength) + dividers := make([]int32, maxVectorLength) + for indx, vector := range vectors { + for i := 0; i < len(vector); i++ { + if weights != nil { + // apply weight to vector value + sums[i] += vector[i] * weights[indx] + } else { + sums[i] += vector[i] + } + dividers[i]++ + } + } + for i := 0; i < len(sums); i++ { + sums[i] /= float32(dividers[i]) + } + + return sums +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/vectorizer/combine_test.go b/platform/dbops/binaries/weaviate-src/usecases/vectorizer/combine_test.go new file mode 100644 index 0000000000000000000000000000000000000000..816543d610a86ad88a9018c84cf2d34e998930f9 --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/vectorizer/combine_test.go @@ -0,0 +1,137 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "reflect" + "testing" +) + +func TestCombineVectors(t *testing.T) { + type args struct { + vectors [][]float32 + } + tests := []struct { + name string + args args + want []float32 + }{ + { + "Combine simple vectors", + args{ + vectors: [][]float32{ + {1, 2, 3}, + {2, 3, 4}, + }, + }, + []float32{1.5, 2.5, 3.5}, + }, + { + "Combine empty vectors", + args{ + vectors: [][]float32{}, + }, + []float32{}, + }, + { + "Combine more complex vectors", + args{ + vectors: [][]float32{ + {-0.214571, -0.605529, -0.335769, -0.185277, -0.212256, 0.478032, -0.536662, 0.298211}, + {-0.14713769, -0.06872862, 0.09911085, -0.06342313, 0.10092197, -0.06624051, -0.06812558, 0.07360107}, + {-0.18123996, -0.2089612, 0.03738429, -0.26224917, 0.18499854, -0.2620146, -0.12802331, -0.07601682}, + {-0.06659584, -0.17120242, 0.07603133, -0.07171547, 0.12537181, -0.19367254, -0.18376349, -0.05517439}, + }, + }, + []float32{-0.15238613, -0.2636053, -0.030810636, -0.1456662, 0.049759082, -0.010973915, -0.2291436, 0.060155217}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := CombineVectors(tt.args.vectors); !reflect.DeepEqual(got, tt.want) { + t.Errorf("CombineVectors() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestCombineVectorsWithWeights(t *testing.T) { + type args struct { + vectors [][]float32 + weights []float32 + } + tests := []struct { + name string + args args + want []float32 + }{ + { + "Combine simple vectors with 0 weights", + args{ + vectors: [][]float32{ + {1, 2, 3}, + {2, 3, 4}, + }, + weights: []float32{0, 0, 0}, + }, + []float32{0, 0, 0}, + }, + { + "Combine simple vectors with 1 weights", + args{ + vectors: [][]float32{ + {1, 2, 3}, + {2, 3, 4}, + }, + weights: []float32{1, 1, 1}, + }, + []float32{1.5, 2.5, 3.5}, + }, + { + "Combine empty vectors", + args{ + vectors: [][]float32{}, + weights: []float32{}, + }, + []float32{}, + }, + { + "Combine simple vectors without weights", + args{ + vectors: [][]float32{ + {1, 2, 3}, + {2, 3, 4}, + }, + weights: nil, + }, + []float32{1.5, 2.5, 3.5}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := CombineVectorsWithWeights(tt.args.vectors, tt.args.weights); !reflect.DeepEqual(got, tt.want) { + t.Errorf("CombineVectors() = %v, want %v", got, tt.want) + } + }) + } +} + +func BenchmarkCombine(b *testing.B) { + for i := 0; i < b.N; i++ { + CombineVectors([][]float32{ + {-0.214571, -0.605529, -0.335769, -0.185277, -0.212256, 0.478032, -0.536662, 0.298211}, + {-0.14713769, -0.06872862, 0.09911085, -0.06342313, 0.10092197, -0.06624051, -0.06812558, 0.07360107}, + {-0.18123996, -0.2089612, 0.03738429, -0.26224917, 0.18499854, -0.2620146, -0.12802331, -0.07601682}, + {-0.06659584, -0.17120242, 0.07603133, -0.07171547, 0.12537181, -0.19367254, -0.18376349, -0.05517439}, + }) + } +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/vectorizer/distance.go b/platform/dbops/binaries/weaviate-src/usecases/vectorizer/distance.go new file mode 100644 index 0000000000000000000000000000000000000000..49ba4370dbe1a82d99dc5db45734bd6cebc58bad --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/vectorizer/distance.go @@ -0,0 +1,48 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "fmt" + "math" +) + +// NormalizedDistance between two arbitrary vectors, errors if dimensions don't +// match, will return results between 0 (no distance) and 1 (maximum distance) +func NormalizedDistance(a, b []float32) (float32, error) { + sim, err := cosineSim(a, b) + if err != nil { + return 1, fmt.Errorf("normalized distance: %w", err) + } + + return (1 - sim) / 2, nil +} + +func cosineSim(a, b []float32) (float32, error) { + if len(a) != len(b) { + return 0, fmt.Errorf("vectors have different dimensions") + } + + var ( + sumProduct float64 + sumASquare float64 + sumBSquare float64 + ) + + for i := range a { + sumProduct += float64(a[i] * b[i]) + sumASquare += float64(a[i] * a[i]) + sumBSquare += float64(b[i] * b[i]) + } + + return float32(sumProduct / (math.Sqrt(sumASquare * sumBSquare))), nil +} diff --git a/platform/dbops/binaries/weaviate-src/usecases/vectorizer/distance_test.go b/platform/dbops/binaries/weaviate-src/usecases/vectorizer/distance_test.go new file mode 100644 index 0000000000000000000000000000000000000000..e0db0283e7c78670ab710e6c1b9028b4e5c8044d --- /dev/null +++ b/platform/dbops/binaries/weaviate-src/usecases/vectorizer/distance_test.go @@ -0,0 +1,85 @@ +// _ _ +// __ _____ __ ___ ___ __ _| |_ ___ +// \ \ /\ / / _ \/ _` \ \ / / |/ _` | __/ _ \ +// \ V V / __/ (_| |\ V /| | (_| | || __/ +// \_/\_/ \___|\__,_| \_/ |_|\__,_|\__\___| +// +// Copyright © 2016 - 2025 Weaviate B.V. All rights reserved. +// +// CONTACT: hello@weaviate.io +// + +package vectorizer + +import ( + "reflect" + "testing" +) + +func TestCosineSimilarity(t *testing.T) { + type args struct { + a, b []float32 + } + tests := []struct { + name string + args args + want float32 + }{ + { + "Distance between identical vectors", + args{ + a: []float32{1, 2, 3}, + b: []float32{1, 2, 3}, + }, + 1.0, + }, + { + "Distance between similar vectors", + args{ + a: []float32{1, 2, 3}, + b: []float32{2, 3, 4}, + }, + 0.99258333, + }, + { + "Distance between opposite vectors", + args{ + a: []float32{0, 1}, + b: []float32{0, -1}, + }, + -1.0, + }, + { + "Distance between perpendicular vectors", + args{ + a: []float32{0, 1}, + b: []float32{1, 0}, + }, + 0.0, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got, _ := cosineSim(tt.args.a, tt.args.b); !reflect.DeepEqual(got, tt.want) { + t.Errorf("CombineVectors() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestCosineSim_DifferentDimensions(t *testing.T) { + a := []float32{1, 2, 3} + b := []float32{4, 5} + _, err := cosineSim(a, b) + if err == nil { + t.Errorf("expected error, got nil") + } +} + +func BenchmarkCosineSimilarity(b *testing.B) { + for i := 0; i < b.N; i++ { + cosineSim([]float32{-0.214571, -0.605529, -0.335769, -0.185277, -0.212256, 0.478032, -0.536662, 0.298211}, + []float32{-0.14713769, -0.06872862, 0.09911085, -0.06342313, 0.10092197, -0.06624051, -0.06812558, 0.07360107}, + ) + } +}