text
stringlengths
11
4.05M
package sql import ( "context" "github.com/opentracing/opentracing-go" "github.com/opentracing/opentracing-go/ext" tracinglog "github.com/opentracing/opentracing-go/log" ) // startTracingSpan start a span from context before any db operation func startTracingSpan(ctx context.Context, operationName, query string) opentracing.Span { parentSpan := opentracing.SpanFromContext(ctx) if parentSpan == nil { // log.ErrorContext(ctx, "cannot found tracing span from context") // TODO: enable this return nil } span := opentracing.GlobalTracer().StartSpan(operationName, opentracing.ChildOf(parentSpan.Context())) ext.SpanKindRPCClient.Set(span) ext.DBType.Set(span, "database") ext.DBStatement.Set(span, query) return span } // finishTracingSpan finish a span after db operation func finishTracingSpan(span opentracing.Span, err error) { if err != nil { ext.Error.Set(span, true) span.LogFields(tracinglog.String("event", "error"), tracinglog.String("message", err.Error())) } span.Finish() }
// an example of cli // https://github.com/urfave/cli // needs to use go modules // go install // cliex simon // returns Hello 'Simon' package main import ( "fmt" "log" "os" "github.com/urfave/cli/v2" ) func main() { app := &cli.App{ Action: func(c *cli.Context) error { fmt.Printf("Hello %q", c.Args().Get(0)) return nil }, } err := app.Run(os.Args) if err != nil { log.Fatal(err) } }
package pie // ZipLongest will return a new slice containing pairs with elements from input slices. // If input slices have diffrent length, missing elements will be padded with default values. func ZipLongest[T1, T2 any](ss1 []T1, ss2 []T2) []Zipped[T1, T2] { var minLen, maxLen int var small int8 if len(ss1) <= len(ss2) { small = 1 minLen = len(ss1) maxLen = len(ss2) } else { small = 2 minLen = len(ss2) maxLen = len(ss1) } ss3 := []Zipped[T1, T2]{} for i := 0; i < minLen; i++ { ss3 = append(ss3, Zipped[T1, T2]{ss1[i], ss2[i]}) } if small == 1 { var t T1 for i := minLen; i < maxLen; i++ { ss3 = append(ss3, Zipped[T1, T2]{t, ss2[i]}) } } else { var t T2 for i := minLen; i < maxLen; i++ { ss3 = append(ss3, Zipped[T1, T2]{ss1[i], t}) } } return ss3 }
package funding import ( "github.com/centrifuge/go-centrifuge/bootstrap" "github.com/centrifuge/go-centrifuge/config" "github.com/centrifuge/go-centrifuge/documents" "github.com/centrifuge/go-centrifuge/errors" ) const ( // BootstrappedFundingAPIHandler is the key for the api handler in Context BootstrappedFundingAPIHandler = "Funding API Handler" ) // Bootstrapper implements Bootstrapper Interface type Bootstrapper struct{} // Bootstrap adds the funding API handler to the context. func (Bootstrapper) Bootstrap(ctx map[string]interface{}) (err error) { defer func() { if err != nil { err = errors.New("funding bootstrapper: %v", err) } }() cfgSrv, ok := ctx[config.BootstrappedConfigStorage].(config.Service) if !ok { return errors.New("config service not initialised") } docSrv, ok := ctx[documents.BootstrappedDocumentService].(documents.Service) if !ok { return errors.New("document service not initialised") } tokenRegistry, ok := ctx[bootstrap.BootstrappedInvoiceUnpaid].(documents.TokenRegistry) if !ok { return errors.New("token registry not initialisation") } srv := DefaultService(docSrv, tokenRegistry) handler := GRPCHandler(cfgSrv, srv) ctx[BootstrappedFundingAPIHandler] = handler return nil }
package main import ( "fmt" "io/ioutil" "log" "net/http" "net/url" ) type Message struct { Text, Desp string } const ( pushUrl = "https://sc.ftqq.com/SCU150195T2c5855e7da9b91fef1774a40de01c2cd5fffb1ee24c03.send" ) //Send message to remote server func (m Message) Push() { form := make(url.Values) form.Set("text", m.Text) form.Set("desp", m.Desp) resp, err := http.PostForm(pushUrl, form) if err != nil { log.Println("message push failed", err) } defer resp.Body.Close() body, _ := ioutil.ReadAll(resp.Body) log.Printf("message push resp: %s", body) } func NewMessage(stocks []Stock) Message { var text, desp string for i := 0; i < len(stocks); i++ { text += fmt.Sprintf("%s:%.2f%% ", stocks[i].Name, stocks[i].ChgR) desp += fmt.Sprintf("* %s(%s)\t%.3f\t%.2f%%(%.3f)\n\r", stocks[i].Name, stocks[i].Code, stocks[i].Price, stocks[i].ChgR, stocks[i].Chg) } return Message{Text: text, Desp: desp} }
package xattr import "github.com/kormoc/xattr/xattrsyscall" func Remove(filePath string, xattrName string) error { return syscallErrorToXAttrError(xattrsyscall.Removexattr(filePath, xattrName)) } func Has(filePath string, xattrName string) (bool, error) { _, err := GetBytes(filePath, xattrName) switch syscallErrorToXAttrError(err) { case nil: return true, nil case XAttrErrorAttributeNotFound: return false, nil case XAttrErrorNoDataAvailable: return false, nil case XAttrErrorResultTooLarge: return true, nil default: return false, err } }
package redisutil import ( "net/url" "testing" "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestParseClusterURL(t *testing.T) { opts, err := ParseClusterURL("redis+cluster://CLUSTER_USERNAME:CLUSTER_PASSWORD@localhost:26379,otherhost:26479/?" + (&url.Values{ "read_only": {"true"}, "username": {"USERNAME"}, "password": {"PASSWORD"}, "max_retries": {"11"}, "min_retry_backoff": {"31s"}, "max_retry_backoff": {"22m"}, "dial_timeout": {"3m"}, "read_timeout": {"4m"}, "write_timeout": {"5m"}, "pool_size": {"7"}, "min_idle_conns": {"2"}, "max_conn_age": {"1h"}, "pool_timeout": {"30m"}, "idle_timeout": {"31m"}, "idle_check_frequency": {"32m"}, }).Encode()) require.NoError(t, err) assert.Equal(t, []string{"localhost:26379", "otherhost:26479"}, opts.Addrs) assert.Equal(t, "CLUSTER_USERNAME", opts.Username) assert.Equal(t, "CLUSTER_PASSWORD", opts.Password) assert.True(t, opts.ReadOnly) assert.Equal(t, 11, opts.MaxRetries) assert.Equal(t, time.Second*31, opts.MinRetryBackoff) assert.Equal(t, time.Minute*22, opts.MaxRetryBackoff) assert.Equal(t, time.Minute*3, opts.DialTimeout) assert.Equal(t, time.Minute*4, opts.ReadTimeout) assert.Equal(t, time.Minute*5, opts.WriteTimeout) assert.Equal(t, 7, opts.PoolSize) assert.Equal(t, 2, opts.MinIdleConns) assert.Equal(t, time.Hour, opts.MaxConnAge) assert.Equal(t, time.Minute*30, opts.PoolTimeout) assert.Equal(t, time.Minute*31, opts.IdleTimeout) assert.Equal(t, time.Minute*32, opts.IdleCheckFrequency) } func TestParseSentinelURL(t *testing.T) { opts, err := ParseSentinelURL("redis+sentinel://:SENTINEL_PASSWORD@localhost:26379,otherhost:26479/mymaster/3?" + (&url.Values{ "slave_only": {"true"}, "use_disconnected_slaves": {"T"}, "username": {"USERNAME"}, "password": {"PASSWORD"}, "max_retries": {"11"}, "min_retry_backoff": {"31s"}, "max_retry_backoff": {"22m"}, "dial_timeout": {"3m"}, "read_timeout": {"4m"}, "write_timeout": {"5m"}, "pool_size": {"7"}, "min_idle_conns": {"2"}, "max_conn_age": {"1h"}, "pool_timeout": {"30m"}, "idle_timeout": {"31m"}, "idle_check_frequency": {"32m"}, }).Encode()) require.NoError(t, err) assert.Equal(t, "mymaster", opts.MasterName) assert.Equal(t, []string{"localhost:26379", "otherhost:26479"}, opts.SentinelAddrs) assert.Equal(t, "SENTINEL_PASSWORD", opts.SentinelPassword) assert.True(t, opts.SlaveOnly) assert.True(t, opts.UseDisconnectedSlaves) assert.Equal(t, "USERNAME", opts.Username) assert.Equal(t, "PASSWORD", opts.Password) assert.Equal(t, 3, opts.DB) assert.Equal(t, 11, opts.MaxRetries) assert.Equal(t, time.Second*31, opts.MinRetryBackoff) assert.Equal(t, time.Minute*22, opts.MaxRetryBackoff) assert.Equal(t, time.Minute*3, opts.DialTimeout) assert.Equal(t, time.Minute*4, opts.ReadTimeout) assert.Equal(t, time.Minute*5, opts.WriteTimeout) assert.Equal(t, 7, opts.PoolSize) assert.Equal(t, 2, opts.MinIdleConns) assert.Equal(t, time.Hour, opts.MaxConnAge) assert.Equal(t, time.Minute*30, opts.PoolTimeout) assert.Equal(t, time.Minute*31, opts.IdleTimeout) assert.Equal(t, time.Minute*32, opts.IdleCheckFrequency) }
// Copyright 2017 Vlad Didenko. All rights reserved. // See the included LICENSE.md file for licensing information package slops // import "go.didenko.com/slops" // CollectVariety applies a Collector to every item in left and // right slices. Common items are collected as many times as // there are common occurrences by the related collector. Non-common // items each, with their duplicates which are extra to the common // items, are collected by related leftOnly and rightOnly Collectors. // Both left and right input slices are expected to be sorted. func CollectVariety(left, right []string, onlyLeft, common, onlyRight Collector) []string { collected := make([]string, 0) for l, r := 0, 0; l < len(left) || r < len(right); { if l < len(left) && (r == len(right) || left[l] < right[r]) { collected = onlyLeft(collected, left[l]) l++ continue } if r < len(right) && (l == len(left) || left[l] > right[r]) { collected = onlyRight(collected, right[r]) r++ continue } collected = common(collected, left[l]) l++ r++ } return collected }
package arena // RunFullTournament runs all possible 1-against-1 match combinations func RunFullTournament(arena *Arena) { programCount := arena.ProgramCount() match := make([]int, 2) for i := 0; i < programCount-1; i++ { for j := i + 1; j < programCount; j++ { match[0], match[1] = i, j arena.RunMatch(match) } } }
package http import ( "errors" "fmt" "github.com/whosonfirst/go-sanitize" "github.com/whosonfirst/go-whosonfirst-image" "github.com/whosonfirst/go-whosonfirst-readwrite/reader" "github.com/whosonfirst/go-whosonfirst-static/utils" gohttp "net/http" ) type RasterSize struct { Label string MaxHeight int MaxWidth int } type RasterOptions struct { Format string Sizes map[string]RasterSize } func NewDefaultRasterOptions() (*RasterOptions, error) { xsm := RasterSize{ Label: "xsm", MaxHeight: 100, MaxWidth: 100, } sm := RasterSize{ Label: "sm", MaxHeight: 300, MaxWidth: 300, } med := RasterSize{ Label: "med", MaxHeight: 640, MaxWidth: 640, } lg := RasterSize{ Label: "lg", MaxHeight: 1024, MaxWidth: 1024, } sz := map[string]RasterSize{ "xsm": xsm, "sm": sm, "med": med, "lg": lg, } opts := RasterOptions{ Format: "png", Sizes: sz, } return &opts, nil } func RasterHandler(r reader.Reader, opts *RasterOptions) (gohttp.Handler, error) { if opts.Format != "png" { return nil, errors.New("Invalid or unsupported raster format") } fn := func(rsp gohttp.ResponseWriter, req *gohttp.Request) { f, err, status := utils.FeatureFromRequest(req, r) if err != nil { gohttp.Error(rsp, err.Error(), status) return } sn_opts := sanitize.DefaultOptions() sz := "lg" query := req.URL.Query() query_sz := query.Get("size") req_sz, err := sanitize.SanitizeString(query_sz, sn_opts) if err != nil { gohttp.Error(rsp, err.Error(), status) return } if req_sz != "" { sz = req_sz } sz_info, ok := opts.Sizes[sz] if !ok { gohttp.Error(rsp, "Invalid output size", gohttp.StatusBadRequest) return } img_opts := image.NewDefaultOptions() img_opts.Writer = rsp img_opts.Height = sz_info.MaxHeight img_opts.Width = sz_info.MaxWidth content_type := fmt.Sprintf("image/%s", opts.Format) rsp.Header().Set("Content-Type", content_type) image.FeatureToPNG(f, img_opts) } h := gohttp.HandlerFunc(fn) return h, nil }
package Gophp import ( "encoding/hex" "math" "math/rand" "strconv" "time" ) // Rand rand() // Range: [0, 2147483647] func Rand(min, max int) int { if min > max { panic("min: min cannot be greater than max") } // PHP: getrandmax() if int31 := 1<<31 - 1; max > int31 { panic("max: max can not be greater than " + strconv.Itoa(int31)) } if min == max { return min } r := rand.New(rand.NewSource(time.Now().UnixNano())) return r.Intn(max+1-min) + min } // Round round() func Round(value float64) float64 { return math.Floor(value + 0.5) } // Max max() func Max(nums ...float64) float64 { if len(nums) < 2 { panic("nums: the nums length is less than 2") } max := nums[0] for i := 1; i < len(nums); i++ { max = math.Max(max, nums[i]) } return max } // Min min() func Min(nums ...float64) float64 { if len(nums) < 2 { panic("nums: the nums length is less than 2") } min := nums[0] for i := 1; i < len(nums); i++ { min = math.Min(min, nums[i]) } return min } // 16进制转字符转 // Hex2bin hex2bin() func Hex2bin(data string) string { b, err := hex.DecodeString(data) if err != nil { return "" } return string(b) } // 字符转转16进制 // Bin2hex bin2hex() func Bin2hex(str string) string { return hex.EncodeToString([]byte(str)) } // IsNan is_nan() func IsNan(val float64) bool { return math.IsNaN(val) }
package calc import "testing" func TestAdd(t *testing.T) { cases := []struct { ain, bin, want int }{ {1, 2, 3}, {0, 0, 0}, {100, -110, -10}, } for _, c := range cases { got := Add(c.ain, c.bin) if got != c.want { t.Errorf("Add(%d, %d) == %d, want %d", c.ain, c.bin, got, c.want) } } }
package models import ( "time" "github.com/sirupsen/logrus" "golang.org/x/crypto/bcrypt" ) type User struct { ID uint `gorm:"primaryKey" uri:"id" json:"id"` CreatedAt time.Time `json:"created_at"` UpdatedAt time.Time `json:"updated_at"` DeletedAt *time.Time `gorm:"index" json:"deleted_at"` Username string `gorm:"type:varchar(50);not null;unique" description:"用户名" json:"username"` Password string `gorm:"type:varchar(100)" json:"password,omitempty"` // omitempty作用: 该字段为空时, 对象转json将忽略该字段 -字符: 可彻底忽略 ChineseName string `gorm:"type:varchar(25)" description:"中文名" json:"chinese_name"` // UserDN string `gorm:"type:varchar(100)" description:"LDAP DN" json:"user_dn"` Active bool `gorm:"default:1" json:"active"` Superuser bool `gorm:"default:0" json:"superuser"` Phone string `gorm:"type:varchar(20)" description:"手机" json:"phone"` Permissions []*Permission `gorm:"many2many:user_permission" json:"permissions"` PermissionIds []uint `gorm:"-" json:"permission_ids"` Groups []*Group `gorm:"many2many:user_group" json:"groups"` GroupIds []uint `gorm:"-" json:"group_ids"` } type Permission struct { ID uint `gorm:"primaryKey" uri:"id" json:"id"` Name string `gorm:"type:varchar(30);not null;unique" json:"name"` Description string `gorm:"type:text" json:"description"` Users []*User `gorm:"many2many:user_permission" json:"users"` UserIds []uint `gorm:"-" json:"user_ids"` Groups []*Group `gorm:"many2many:group_permission" json:"groups"` GroupIds []uint `gorm:"-" json:"group_ids"` } type Group struct { ID uint `gorm:"primaryKey" uri:"id" json:"id"` Name string `gorm:"type:varchar(30);not null" json:"name"` Description string `gorm:"type:text" json:"description"` Permissions []*Permission `gorm:"many2many:group_permission" json:"permissions"` PermissionIds []uint `gorm:"-" json:"permission_ids"` Users []*User `gorm:"many2many:user_group" json:"users"` UserIds []uint `gorm:"-" json:"user_ids"` } // func (s *User) AfterFind(tx *gorm.DB) (err error) { // if s.Password != "" { // s.Password = "" // } // return // } func (s *User) EncryptPassword() error { hash, err := bcrypt.GenerateFromPassword([]byte(s.Password), bcrypt.DefaultCost) if err != nil { logrus.Error("encrypt password error:", err) return err } s.Password = string(hash) return nil } func (s *User) SetPassword(tempPw string) error { hash, err := bcrypt.GenerateFromPassword([]byte(tempPw), bcrypt.DefaultCost) if err != nil { logrus.Error("set password error:", err) return err } db.Model(s).Update("Password", string(hash)) return nil } func (s *User) CheckPassword() bool { if s.Username == "" { return false } var userPassword string row := db.Table("user").Where("username = ?", s.Username).Select("password").Row() err := row.Scan(&userPassword) if err == nil { err := bcrypt.CompareHashAndPassword([]byte(userPassword), []byte(s.Password)) if err != nil { logrus.Warn("password error:", s.Username) return false } else { logrus.Info("password ok:", s.Username) return true } } else { logrus.Warn("get user password error: ", s.Username) logrus.Error(err) } return false } func (s *User) LoadPermAssociationIds() error { var permissionIds []uint result := db.Table("user_permission").Where("user_id = ?", s.ID).Pluck("permission_id", &permissionIds) if result.Error != nil { logrus.Error(result.Error) logrus.Error(result.Error) return result.Error } s.PermissionIds = permissionIds return nil } func (s *User) LoadGroupAssociationIds() error { var groupIds []uint result := db.Table("user_group").Where("user_id = ?", s.ID).Pluck("group_id", &groupIds) if result.Error != nil { logrus.Error(result.Error) return result.Error } s.GroupIds = groupIds return nil } func (s *User) LoadAllAssociationIds() error { var groupIds []uint result := db.Table("user_group").Where("user_id = ?", s.ID).Pluck("group_id", &groupIds) if result.Error != nil { logrus.Error(result.Error) return result.Error } s.GroupIds = groupIds var permissionIds []uint result = db.Table("user_permission").Where("user_id = ?", s.ID).Pluck("permission_id", &permissionIds) if result.Error != nil { logrus.Error(result.Error) logrus.Error(result.Error) return result.Error } s.PermissionIds = permissionIds return nil } func (s *User) LoadAllAssociations() error { result := db.Preload("Groups").Preload("Permissions").First(s) if result.Error != nil { logrus.Error(result.Error) return result.Error } return nil } func (s *User) Create() error { // add relations var groups []*Group var permissions []*Permission for _, groupId := range s.GroupIds { group := Group{ID: groupId} groups = append(groups, &group) } s.Groups = groups for _, permId := range s.PermissionIds { perm := Permission{ID: permId} permissions = append(permissions, &perm) } s.Permissions = permissions result := db.Create(s) if result.Error != nil { logrus.Error(result.Error) return result.Error } return nil } func (s *User) Delete() error { if s.ID == 0 { return nil } // clear relations db.Model(s).Association("Groups").Clear() db.Model(s).Association("Permissions").Clear() result := db.Delete(s) if result.Error != nil { logrus.Error(result.Error) return result.Error } return nil } func (s *User) Update() error { // replace relations var groups []*Group var permissions []*Permission if s.GroupIds != nil { for _, groupId := range s.GroupIds { group := Group{ID: groupId} groups = append(groups, &group) } result := db.Model(s).Association("Groups").Replace(groups) if result.Error != nil { logrus.Error(result.Error) return result.Error } } if s.PermissionIds != nil { for _, permId := range s.PermissionIds { perm := Permission{ID: permId} permissions = append(permissions, &perm) } result := db.Model(s).Association("Permissions").Replace(permissions) if result.Error != nil { logrus.Error(result.Error) return result.Error } } result := db.Model(s).Updates(s) if result.Error != nil { logrus.Error(result.Error) return result.Error } return nil } func (s *Group) LoadAllAssociationIds() error { var userIds []uint result := db.Table("user_group").Where("group_id = ?", s.ID).Pluck("user_id", &userIds) if result.Error != nil { logrus.Error(result.Error) return result.Error } s.UserIds = userIds var permissionIds []uint result = db.Table("group_permission").Where("group_id = ?", s.ID).Pluck("permission_id", &permissionIds) if result.Error != nil { logrus.Error(result.Error) return result.Error } s.PermissionIds = permissionIds return nil } func (s *Group) LoadAllAssociations() error { result := db.Preload("Users").Preload("Permissions").First(s) if result.Error != nil { logrus.Error(result.Error) return result.Error } return nil } func (s *Group) Create() error { // add relations var users []*User var permissions []*Permission for _, uerId := range s.UserIds { user := User{ID: uerId} users = append(users, &user) } s.Users = users for _, permId := range s.PermissionIds { perm := Permission{ID: permId} permissions = append(permissions, &perm) } s.Permissions = permissions result := db.Create(s) if result.Error != nil { logrus.Error(result.Error) return result.Error } return nil } func (s *Group) Delete() error { if s.ID == 0 { return nil } // clear relations db.Model(s).Association("Users").Clear() db.Model(s).Association("Permissions").Clear() result := db.Delete(s) if result.Error != nil { logrus.Error(result.Error) return result.Error } return nil } func (s *Group) Update() error { // replace relations var users []*User var permissions []*Permission if s.UserIds != nil { for _, uerId := range s.UserIds { user := User{ID: uerId} users = append(users, &user) } result := db.Model(s).Association("Users").Replace(users) if result.Error != nil { logrus.Error(result.Error) return result.Error } } if s.PermissionIds != nil { for _, permId := range s.PermissionIds { perm := Permission{ID: permId} permissions = append(permissions, &perm) } result := db.Model(s).Association("Permissions").Replace(permissions) if result.Error != nil { logrus.Error(result.Error) return result.Error } } result := db.Model(s).Updates(s) if result.Error != nil { logrus.Error(result.Error) return result.Error } return nil } func (s *Permission) LoadGroupAssociationIds() error { var groupIds []uint result := db.Table("group_permission").Where("permission_id = ?", s.ID).Pluck("group_id", &groupIds) if result.Error != nil { logrus.Error(result.Error) return result.Error } s.GroupIds = groupIds return nil } func (s *Permission) LoadAllAssociationIds() error { var groupIds []uint result := db.Table("group_permission").Where("permission_id = ?", s.ID).Pluck("group_id", &groupIds) if result.Error != nil { logrus.Error(result.Error) return result.Error } s.GroupIds = groupIds var userIds []uint result = db.Table("user_permission").Where("permission_id = ?", s.ID).Pluck("user_id", &userIds) if result.Error != nil { logrus.Error(result.Error) return result.Error } s.UserIds = userIds return nil } func (s *Permission) LoadAllAssociations() error { result := db.Preload("Users").Preload("Permissions").First(s) if result.Error != nil { logrus.Error(result.Error) return result.Error } return nil } func (s *Permission) Create() error { // add relations var groups []*Group var users []*User for _, groupId := range s.GroupIds { group := Group{ID: groupId} groups = append(groups, &group) } s.Groups = groups for _, userId := range s.UserIds { user := User{ID: userId} users = append(users, &user) } s.Users = users result := db.Create(s) if result.Error != nil { logrus.Error(result.Error) return result.Error } return nil } func (s *Permission) Delete() error { if s.ID == 0 { return nil } // clear relations db.Model(s).Association("Groups").Clear() db.Model(s).Association("Users").Clear() result := db.Delete(s) if result.Error != nil { logrus.Error(result.Error) return result.Error } return nil } func (s *Permission) Update() error { // replace relations var groups []*Group var users []*User if s.GroupIds != nil { for _, groupId := range s.GroupIds { group := Group{ID: groupId} groups = append(groups, &group) } result := db.Model(s).Association("Groups").Replace(groups) if result.Error != nil { logrus.Error(result.Error) return result.Error } } if s.UserIds != nil { for _, userId := range s.UserIds { user := User{ID: userId} users = append(users, &user) } result := db.Model(s).Association("Users").Replace(users) if result.Error != nil { logrus.Error(result.Error) return result.Error } } result := db.Model(s).Updates(s) if result.Error != nil { logrus.Error(result.Error) return result.Error } return nil }
package board import ( "errors" ) type Player int func (p Player) String() string { if p == 1 { return "X" } else if p == 2 { return "O" } return "_" } var ( Empty Player = 0 PlayerX Player = 1 PlayerO Player = 2 ) var PlaceTakenErr = errors.New("the place is already taken") // Board is a Custom Type so that we can // attach methods to it, that make it // easier to interact with. type Board [4][4]Player // Place places the player on the Board. Returns an error // if the place is already taken by either player. func (b *Board) Place(column, row int, player Player) error { val := b[column][row] if val != 0 { return PlaceTakenErr } b[column][row] = player return nil } func (b *Board) HasNoSpaceLeft() bool { for _, column := range b { for _, row := range column { // since a place is still empty, // the game can't have ended yet. if row == Empty { return false } } } return true } func (b *Board) IsDraw() bool { // A draw can only occur when all // places on the board are filled. if !b.HasNoSpaceLeft() { return false } // if there is no winner, even though // the game has ended, we have a draw. hasWinner, _ := b.HasWinner() if !hasWinner { return true } return false } // Get returns the element at this position // but returning -1 if the place is outisde the board. // Makes it safer to access places on the board func (b *Board) Get(column, row int) Player { // check wether the index is inside the bounds if column < 0 || column > 2 { return -1 } if row < 0 || row > 2 { return -1 } return b[column][row] } func areSamePlayers(elements []Player) bool { var lastElement = elements[0] // Empty is not a real Player if lastElement == Empty { return false } for _, elem := range elements { if elem != lastElement { return false } } return true } func (b *Board) HasWinner() (bool, Player) { // horizontal check for _, row := range b { // `row[:]` is a trick to convert a fixed size // array into a slice. That way I don't have to // hardcode the size of the array. if areSamePlayers(row[:]) { return true, row[0] } } // vertical check var inverted [4][4]Player for i := 0; i < len(b); i++ { for j := 0; j < len(b[i]); j++ { inverted[j][i] = b[i][j] } } for _, elems := range inverted { if areSamePlayers(elems[:]) { return true, elems[0] } } // diagonal check // TODO: implement diagonal check return false, 0 }
package cool import ( "encoding/json" "fmt" "strings" sdk "github.com/cosmos/cosmos-sdk/types" ) // a really cool msg type, these fields are can be entirely arbitrary and // custom to your message type MsgSetTrend struct { Sender sdk.AccAddress Cool string } // genesis state - specify genesis trend type Genesis struct { Trend string `json:"trend"` } // new cool message func NewMsgSetTrend(sender sdk.AccAddress, cool string) MsgSetTrend { return MsgSetTrend{ Sender: sender, Cool: cool, } } // enforce the msg type at compile time var _ sdk.Msg = MsgSetTrend{} // nolint func (msg MsgSetTrend) Route() string { return "cool" } func (msg MsgSetTrend) Type() string { return "set_trend" } func (msg MsgSetTrend) GetSigners() []sdk.AccAddress { return []sdk.AccAddress{msg.Sender} } func (msg MsgSetTrend) String() string { return fmt.Sprintf("MsgSetTrend{Sender: %v, Cool: %v}", msg.Sender, msg.Cool) } // Validate Basic is used to quickly disqualify obviously invalid messages quickly func (msg MsgSetTrend) ValidateBasic() sdk.Error { if len(msg.Sender) == 0 { return sdk.ErrUnknownAddress(msg.Sender.String()).TraceSDK("") } if strings.Contains(msg.Cool, "hot") { return sdk.ErrUnauthorized("").TraceSDK("hot is not cool") } if strings.Contains(msg.Cool, "warm") { return sdk.ErrUnauthorized("").TraceSDK("warm is not very cool") } return nil } // Get the bytes for the message signer to sign on func (msg MsgSetTrend) GetSignBytes() []byte { b, err := json.Marshal(msg) if err != nil { panic(err) } return sdk.MustSortJSON(b) } //_______________________________________________________________________ // A message type to quiz how cool you are. these fields are can be entirely // arbitrary and custom to your message type MsgQuiz struct { Sender sdk.AccAddress CoolAnswer string } // New cool message func NewMsgQuiz(sender sdk.AccAddress, coolerthancool string) MsgQuiz { return MsgQuiz{ Sender: sender, CoolAnswer: coolerthancool, } } // enforce the msg type at compile time var _ sdk.Msg = MsgQuiz{} // nolint func (msg MsgQuiz) Route() string { return "cool" } func (msg MsgQuiz) Type() string { return "quiz" } func (msg MsgQuiz) GetSigners() []sdk.AccAddress { return []sdk.AccAddress{msg.Sender} } func (msg MsgQuiz) String() string { return fmt.Sprintf("MsgQuiz{Sender: %v, CoolAnswer: %v}", msg.Sender, msg.CoolAnswer) } // Validate Basic is used to quickly disqualify obviously invalid messages quickly func (msg MsgQuiz) ValidateBasic() sdk.Error { if len(msg.Sender) == 0 { return sdk.ErrUnknownAddress(msg.Sender.String()).TraceSDK("") } return nil } // Get the bytes for the message signer to sign on func (msg MsgQuiz) GetSignBytes() []byte { b, err := json.Marshal(msg) if err != nil { panic(err) } return sdk.MustSortJSON(b) }
package worker import ( "testing" ) func Test_PingIP4_1(t *testing.T) { addr, num := "127.0.0.1", 5 result := PingIP4(&addr, &num, true) if result.Error != "" { t.Error(result.Error) return } t.Log("pass check: ping localhost") if result.SendSum != num { t.Errorf("%d icmp sent, expect %d icmp sent", result.SendSum, num) return } t.Log("pass check: SendSum") if result.RecvSum != num { t.Errorf("%d response receive, expect %d response receive", result.RecvSum, num) return } t.Log("pass check: RecvSum") if result.AvgTime == -1 { t.Errorf("average == -1, receive 0 response") return } t.Log("pass check: AvgTime") }
package mathematics const ( BCMax = 510000 BCMod = 1000000007 ) var fac, finv, inv [BCMax]int // BCInit initializes the binomial coefficient table func BCInit() { fac[0], fac[1] = 1, 1 finv[0], finv[1] = 1, 1 inv[1] = 1 for i := 2; i < BCMax; i++ { fac[i] = fac[i-1] * i % BCMod inv[i] = BCMod - inv[BCMod%i]*(BCMod/i)%BCMod finv[i] = finv[i-1] * inv[i] % BCMod } } // BC calculates the binomial coefficient nCr func BC(n, r int) int { if n < r { return 0 } if n < 0 || r < 0 { return 0 } // n!/(r!*(n-r)!) = n!*(r!)^{-1}*((n-r)!)^{-1} return fac[n] * (finv[r] * finv[n-r] % BCMod) % BCMod }
// Package log provides a level based logger in place of go's standard logger package log import ( "bytes" "fmt" "io" "os" "runtime" "strconv" "strings" "sync" "time" ) // Log flags const ( FTimestamp = 1 << iota FShowFile // Maybe some other time. ) // Log levels const ( TRACE = 10 * iota DEBUG INFO WARN ERROR CRIT PANIC ) func levelToString(level int) string { switch level { case TRACE: return "TRACE" case DEBUG: return "DEBUG" case INFO: return "INFO " case WARN: return "WARN " case ERROR: return "ERROR" case CRIT: return "CRIT " case PANIC: return "PANIC" } return "?????" } // Logger is a level based logging engine type Logger struct { flags int output io.Writer prefix string wMutex sync.Mutex minLevel int parent *Logger } // Flags is a getter for the flags the logger currently has func (l *Logger) Flags() int { return l.flags } // SetFlags is a setter for the logger's flags. It returns the same Logger it was run on for use in call chains func (l *Logger) SetFlags(flags int) *Logger { l.flags = flags return l } // Prefix is a getter for the prefix the logger currently has func (l *Logger) Prefix() string { return l.prefix } // SetPrefix is a setter for the logger's flags. It returns the same Logger it was run on for use in call chains func (l *Logger) SetPrefix(prefix string) *Logger { l.prefix = prefix return l } // MinLevel is a getter for the minimum log level on the logger func (l *Logger) MinLevel() int { return l.minLevel } // New creates a new logger with the set options func New(flags int, output io.Writer, prefix string, minLevel int) *Logger { return &Logger{flags: flags, output: output, prefix: prefix, minLevel: minLevel, wMutex: sync.Mutex{}} } func shortenFilename(filename string) string { out := filename for i := len(filename) - 1; i > 0; i-- { if filename[i] == '/' { out = filename[i+1:] break } } return out } const openBrace = '[' const closeBrace = ']' const space = ' ' func (l *Logger) writeMsg(msg string, level int) { if level < l.minLevel { return } outStr := bytes.Buffer{} if l.flags&FTimestamp != 0 { outStr.WriteRune(openBrace) outStr.WriteString(time.Now().Format("15:04:05.000")) outStr.WriteRune(closeBrace) outStr.WriteRune(space) } outStr.WriteRune(openBrace) outStr.WriteString(levelToString(level)) outStr.WriteRune(closeBrace) outStr.WriteRune(space) if l.flags&FShowFile != 0 { outStr.WriteRune(openBrace) _, file, line, ok := runtime.Caller(2) if !ok { outStr.WriteString("???") } else { outStr.WriteString(shortenFilename(file)) outStr.WriteRune(':') outStr.WriteString(strconv.Itoa(line)) } outStr.WriteRune(closeBrace) outStr.WriteRune(space) } if l.prefix != "" { outStr.WriteRune(openBrace) outStr.WriteString(l.prefix) outStr.WriteRune(closeBrace) outStr.WriteRune(space) } outStr.WriteString(strings.TrimRight(msg, "\r\n")) outStr.WriteRune('\n') if _, err := l.write(outStr.Bytes()); err != nil { fmt.Fprintf(os.Stderr, "Could not write log entry: %s", err) } } // Trace logs the passed data at the Trace level. The passed arguments are run through fmt.Sprintf before logging func (l *Logger) Trace(args ...interface{}) { l.writeMsg(fmt.Sprint(args...), TRACE) } // Tracef logs the passed data at the Tracef level using the format string passed as the first argument to // format the message. The passed arguments are run through fmt.Sprintf before logging func (l *Logger) Tracef(format string, args ...interface{}) { l.writeMsg(fmt.Sprintf(format, args...), TRACE) } // Debug logs the passed data at the Debug level. The passed arguments are run through fmt.Sprintf before logging func (l *Logger) Debug(args ...interface{}) { l.writeMsg(fmt.Sprint(args...), DEBUG) } // Debugf logs the passed data at the Debugf level using the format string passed as the first argument to // format the message. The passed arguments are run through fmt.Sprintf before logging func (l *Logger) Debugf(format string, args ...interface{}) { l.writeMsg(fmt.Sprintf(format, args...), DEBUG) } // Info logs the passed data at the Info level. The passed arguments are run through fmt.Sprintf before logging func (l *Logger) Info(args ...interface{}) { l.writeMsg(fmt.Sprint(args...), INFO) } // Infof logs the passed data at the Infof level using the format string passed as the first argument to // format the message. The passed arguments are run through fmt.Sprintf before logging func (l *Logger) Infof(format string, args ...interface{}) { l.writeMsg(fmt.Sprintf(format, args...), INFO) } // Warn logs the passed data at the Warn level. The passed arguments are run through fmt.Sprintf before logging func (l *Logger) Warn(args ...interface{}) { l.writeMsg(fmt.Sprint(args...), WARN) } // Warnf logs the passed data at the Warnf level using the format string passed as the first argument // to format the message. The passed arguments are run through fmt.Sprintf before logging func (l *Logger) Warnf(format string, args ...interface{}) { l.writeMsg(fmt.Sprintf(format, args...), WARN) } // Crit logs the passed data at the Crit level. The passed arguments are run through fmt.Sprintf before logging func (l *Logger) Crit(args ...interface{}) { l.writeMsg(fmt.Sprint(args...), CRIT) os.Exit(1) } // Critf logs the passed data at the Critf level using the format string passed as the first argument to // format the message. The passed arguments are run through fmt.Sprintf before logging func (l *Logger) Critf(format string, args ...interface{}) { l.writeMsg(fmt.Sprintf(format, args...), CRIT) os.Exit(1) } // Panic logs the passed data at the Panic level. The passed arguments are run through fmt.Sprintf before logging func (l *Logger) Panic(args ...interface{}) { msg := fmt.Sprint(args...) l.writeMsg(msg, PANIC) panic(msg) } // Panicf logs the passed data at the Panicf level using the format string passed as the first argument to // format the message. The passed arguments are run through fmt.Sprintf before logging func (l *Logger) Panicf(format string, args ...interface{}) { msg := fmt.Sprintf(format, args...) l.writeMsg(msg, PANIC) panic(msg) } func (l *Logger) write(msg []byte) (int, error) { if l.parent != nil { return l.parent.write(msg) } l.wMutex.Lock() defer l.wMutex.Unlock() return l.output.Write(msg) } // Clone duplicates the logger it is run on, returning a clean version that can be modified func (l *Logger) Clone() *Logger { return &Logger{ flags: l.flags, output: l.output, prefix: l.prefix, minLevel: l.minLevel, parent: l, } }
package auction // Auctioneer is a manager at auction house which handles concurrent bids type Auctioneer struct { house *House bidChans map[string]chan Offer // item_name:Offer } // NewAuctioneer instantiates a new auctioneer for an auction house func NewAuctioneer(house *House) *Auctioneer { return &Auctioneer{ house: house, bidChans: make(map[string]chan Offer, 0), } } // List a new item func (a *Auctioneer) List(newItemName string) { _, exist := a.bidChans[newItemName] if !exist { // if item exists, new listing will be ignored. a.bidChans[newItemName] = make(chan Offer) // create a non-block channel newItem := NewItem(newItemName) a.house.Add(newItem) } go a.chant(newItemName) } // Hear a bid on specific item // Returns if the bid is valid // Returns failure with reason if any func (a *Auctioneer) Hear(itemName string, offer *Offer) (bool, error) { item, err := a.house.getItem(itemName) if err == ErrItemNotExist { return false, err } if item.IsClosed() { return false, ErrAuctionClose } bidChan, exist := a.bidChans[itemName] if exist { offer.ReplyChan = make(chan BidResult, 1) bidChan <- *offer } else { return false, ErrAuctionClose // A safeguard on closed auction } result := <-offer.ReplyChan var rejectReason error switch result.RejectReason { case ErrItemNotExist.Error(): rejectReason = ErrItemNotExist case ErrAuctionClose.Error(): rejectReason = ErrAuctionClose default: rejectReason = nil } return result.Accepted, rejectReason } // chant on bidding // An indefinite executing process until an item auction is closed func (a *Auctioneer) chant(itemName string) { c := a.bidChans[itemName] for offer := range c { accepted, err := a.house.Bid(offer.Bidder, offer.Item, offer.Price) var result *BidResult if err != nil { result = NewBidResult(accepted, err.Error()) } else { result = NewBidResult(accepted, "") } offer.ReplyChan <- *result } // This channel will be closed when auction has ended for this item // This indefinite loop will end } // Close auctioning on an item // This function is not protected against concurrency. // Therefore, it is DDoS vulnerable. func (a *Auctioneer) Close(itemName string) { item, err := a.house.getItem(itemName) if err != ErrItemNotExist && !item.IsClosed() { auctionChan, exist := a.bidChans[itemName] if exist { close(auctionChan) delete(a.bidChans, itemName) a.house.Close(itemName) } item.Close() } }
package main import ( "fmt" "runtime" "sync" ) //GOMAXPROCS var wg sync.WaitGroup func f1(){ for i:=0;i<9;i++{ fmt.Printf("A: %d\n",i) } defer wg.Done() } func f2(){ for i:=0;i<9;i++{ fmt.Printf("B: %d\n",i) } defer wg.Done() } func main(){ runtime.GOMAXPROCS(1) //默认cpu核心数,默认跑满整个cpu,那么即使开了两个线程,还是一个一个的去运行,因为一个跑满了,下面输出是一个线程输出完后再输出另外一个的,如果是2那么就 fmt.Println(runtime.NumCPU()) wg.Add(2) //不是,改为2可能就会乱了 go f1() go f2() wg.Wait() }
package users import ( "github.com/cheynewallace/tabby" "github.com/sirupsen/logrus" "github.com/spf13/cobra" "github.com/spf13/viper" "github.com/foundriesio/fioctl/client" "github.com/foundriesio/fioctl/subcommands" ) func NewCommand() *cobra.Command { cmd := &cobra.Command{ Use: "users", Short: "List users with access to a factory", Run: func(cmd *cobra.Command, args []string) { doList(subcommands.Login(cmd), viper.GetString("factory")) }, } subcommands.RequireFactory(cmd) return cmd } func doList(api *client.Api, factory string) { logrus.Debugf("Listing factory users for %s", factory) users, err := api.UsersList(factory) subcommands.DieNotNil(err) t := tabby.New() t.AddHeader("ID", "NAME", "ROLE") for _, user := range users { t.AddLine(user.PolisId, user.Name, user.Role) } t.Print() }
package filesrv import ( "httpsrv" "io" "log" "net/http" "os" "path/filepath" "strconv" "strings" "time" "util" ) type File struct { Size int64 `json:"size"` Path string `json:"path"` } func Upload(w http.ResponseWriter, r *http.Request) { r.ParseMultipartForm(cfg.MaxSize) file, handler, err := r.FormFile("file") if err != nil { httpsrv.SendFailure(w, r, 400, httpsrv.Failure{Message: "上传错误"}) return } extname := filepath.Ext(handler.Filename) if isAllowedExt(extname) == false { httpsrv.SendFailure(w, r, 400, httpsrv.Failure{Message: "不允许的上传类型"}) return } filename := strconv.FormatInt(time.Now().Unix(), 10) + extname if filename, err = util.Md5FromReader(file); err != nil { defer file.Close() log.Printf("fail to sum md5: %q\n", err) httpsrv.SendFailure(w, r, 400, httpsrv.Failure{Message: "fail to sum md5"}) return } filename = util.Md5PathName(filename) + extname fileOfPath := filepath.Join(cfg.Root, filename) os.MkdirAll(fileOfPath[:strings.LastIndex(fileOfPath, "/")], os.ModePerm) f, err := os.OpenFile(fileOfPath, os.O_CREATE|os.O_WRONLY, 0660) if err != nil { httpsrv.SendFailure(w, r, 400, httpsrv.Failure{Message: "上传失败"}) return } file, _, _ = r.FormFile("file") defer file.Close() defer f.Close() _, err = io.Copy(f, file) if err != nil { httpsrv.SendFailure(w, r, 400, httpsrv.Failure{Message: "上传失败"}) return } httpsrv.SendSuccess(w, r, File{ Size: handler.Size, Path: filepath.Join(cfg.URI, filename), }) } func isAllowedExt(extname string) (isAllowedExt bool) { disallowed := []string{".exe", ".js"} isAllowedExt = true for _, v := range disallowed { if v == extname { isAllowedExt = false return } } return }
package handlers import ( "InkaTry/warehouse-storage-be/internal/http/admin/dtos" "InkaTry/warehouse-storage-be/internal/pkg/errs" "InkaTry/warehouse-storage-be/internal/pkg/stores" "InkaTry/warehouse-storage-be/mocks/mock_mysql" "context" "errors" "github.com/golang/mock/gomock" "github.com/stretchr/testify/assert" "testing" ) func TestListProductTypes(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() mockMysql := mock_mysql.NewMockClienter(ctrl) ctx := context.Background() handler := NewAdminHandler(&Params{ DB: mockMysql, }) var tts = []struct { caseName string expectations func() results func(response *dtos.ListProductTypesResponse, err error) }{ { caseName: "db error", expectations: func() { mockMysql.EXPECT().ListProductTypes(ctx).Return(nil, errors.New("any")) }, results: func(response *dtos.ListProductTypesResponse, err error) { assert.NotNil(t, err) assert.Equal(t, errors.New("any"), err) }, }, { caseName: "no result found", expectations: func() { mockMysql.EXPECT().ListProductTypes(ctx).Return(nil, errs.ErrNoResultFound) }, results: func(response *dtos.ListProductTypesResponse, err error) { assert.NotNil(t, err) assert.Equal(t, errs.ErrNoResultFound, err) }, }, { caseName: "success", expectations: func() { mockMysql.EXPECT().ListProductTypes(ctx). Return(stores.Results{ { ID: uint16(1), Name: "test", }, }, nil) }, results: func(response *dtos.ListProductTypesResponse, err error) { assert.Nil(t, err) assert.Equal(t, stores.Results{ { ID: uint16(1), Name: "test", }, }, response.ProductTypes) }, }, } for _, tt := range tts { t.Log(tt.caseName) tt.expectations() tt.results(handler.ListProductTypes(ctx)) } }
package main /* * Quick validation * */ import ( "fmt" "log" "reflect" "time" "github.com/blewater/rsaints/lib" ) func assertBool(funcName string, res, expected bool) { if res != expected { log.Fatalf("%s resulted in %t, expected %t\n", funcName, res, expected) } } func assertEqInt64(funcName string, res, expected lib.Integer) { if res != expected { log.Fatalf("%s resulted in %d, expected %d\n", funcName, res, expected) } } func assertEqFactors(funcName string, res, expected lib.Factors) { if !reflect.DeepEqual(res, expected) { log.Fatalf("%s resulted in %v, expected %v\n", funcName, res, expected) } } func main() { assertBool("IsPrimeOptimized", lib.IsPrimeOptimized(5), true) assertBool("IsPrimeOptimized", lib.IsPrimeOptimized(23), true) assertBool("IsPrimeOptimized", lib.IsPrimeOptimized(81), false) // non-prime number: 7 * 157 * 8365633 assertBool("IsPrimeOptimized", lib.IsPrimeOptimized(9193830667), false) // known 10 digit prime number < 0.5 sec start := time.Now() res := lib.IsPrimeOptimized(9576890767) fmt.Printf("large prime number check took %v\n", time.Since(start)) assertBool("IsPrimeOptimized", res, true) assertEqFactors("Factor", lib.Factor(23), lib.Factors{23}) assertEqFactors("Factor", lib.Factor(26), lib.Factors{2, 13}) assertEqFactors("Factor", lib.Factor(81), lib.Factors{3, 3, 3, 3}) assertEqFactors("Factor", lib.Factor(150), lib.Factors{2, 3, 5, 5}) assertEqFactors("Factor", lib.Factor(147), lib.Factors{3, 7, 7}) assertEqFactors("Factor", lib.Factor(150), lib.Factors{2, 3, 5, 5}) assertEqFactors("Factor", lib.Factor(330), lib.Factors{2, 3, 5, 11}) // non-prime number: 7 * 157 * 8365633 assertEqFactors("Factor", lib.Factor(9193830667), lib.Factors{7, 157, 8365633}) // known 10 digit prime number assertEqFactors("Factor", lib.Factor(9576890767), lib.Factors{9576890767}) assertEqInt64("Euclid", lib.CalcEuclid(499017086208, 676126714752), 93312) assertEqInt64("Euclid", lib.CalcEuclid(5988737349, 578354589), 9) assertEqInt64("Mod Mult Inverse", lib.CalcModInvByEuclid(15, 26), 7) assertEqInt64("Mod Mult Inverse", lib.CalcModInvByEuclid(342952340, 4230493243), 583739113) assertBool("Validate RSA Encryption and Decryption", lib.CheckRSA(654321, 937513, 638471), true) assertBool("Validate RSA Encryption and Decryption", lib.CheckRSA(10000, 937513, 638471), true) assertBool("Validate RSA Encryption and Decryption", lib.CheckRSA(937512, 937513, 638471), true) assertBool("Validate RSA Encryption and Decryption", lib.CheckRSA(1, 937513, 638471), true) fmt.Println("Successful completion of all tests.") }
package cmd import ( "fmt" "github.com/spf13/cobra" ) var versionCmd = &cobra.Command{ Use: "version", Short: "Print the version of namedns", Run: func(cmd *cobra.Command, args []string) { fmt.Println("namedns", Version) }, } // Version should be set to actual version string in `go build -ldflags "-X github.com/aelindeman/namedns/cmd.Version="` var Version string func init() { rootCmd.AddCommand(versionCmd) }
package main import ( "encoding/json" "fmt" "io/ioutil" "log" "net/http" "github.com/gorilla/mux" _ "github.com/gorilla/mux" ) //Article stuct ... type Article struct { Id string `json:"Id"` Title string `json:"Title"` Author string `json:"Author"` Content string `json:"Content"` } type ErrorMessage struct { Message string `json:"Message"` } //Articles - local DataBase var Articles []Article //GET request for /articles func GetAllArticles(w http.ResponseWriter, r *http.Request) { fmt.Println("Hint: getAllArticles woked.....") json.NewEncoder(w).Encode(Articles) //ResponseWriter - место , куда пишем. Articles - кого пишем } //GET request for article with ID func GetArticleWithId(w http.ResponseWriter, r *http.Request) { vars := mux.Vars(r) find := false for _, article := range Articles { if article.Id == vars["id"] { find = true json.NewEncoder(w).Encode(article) } } if !find { w.WriteHeader(http.StatusNotFound) // Изменить статус код запроса на 404 var erM = ErrorMessage{Message: "Not found article with that ID"} json.NewEncoder(w).Encode(erM) } } //PostNewArticle func for create new article func PostNewArticle(w http.ResponseWriter, r *http.Request) { // { // "Id" : "3", // "Title" : "Title from json POST method", // "Author" : "Me", // "Content" : "Content from json POST method" // } reqBody, _ := ioutil.ReadAll(r.Body) var article Article json.Unmarshal(reqBody, &article) // Считываем все из тела зпроса в подготовленный пустой объект Article w.WriteHeader(http.StatusCreated) // Изменить статус код запроса на 201 Articles = append(Articles, article) json.NewEncoder(w).Encode(article) //После добавления новой статьи возвращает добавленную } //DeleterArticleWithId ... func DeleterArticleWithId(w http.ResponseWriter, r *http.Request) { vars := mux.Vars(r) id := vars["id"] find := false for index, article := range Articles { if article.Id == id { find = true w.WriteHeader(http.StatusAccepted) // Изменить статус код на 202 Articles = append(Articles[:index], Articles[index+1:]...) } } if !find { w.WriteHeader(http.StatusNotFound) // Изменить статус код на 404 var erM = ErrorMessage{Message: "Article with that id doesn't exist"} json.NewEncoder(w).Encode(erM) } } //PutExistsArticle .... func PutExistsArticle(w http.ResponseWriter, r *http.Request) { vars := mux.Vars(r) idKey := vars["id"] // СТРОКА finded := false for index, article := range Articles { if article.Id == idKey { finded = true reqBody, _ := ioutil.ReadAll(r.Body) w.WriteHeader(http.StatusAccepted) // Изменяем статус код на 202 json.Unmarshal(reqBody, &Articles[index]) // перезаписываем всю информацию для статьи с Id } } if !finded { w.WriteHeader(http.StatusNotFound) // Изменяем статус код на 404 var erM = ErrorMessage{Message: "Not found article with that ID. Try use POST first"} json.NewEncoder(w).Encode(erM) } } func main() { //Добавляю 2 статьи в свою базу Articles = []Article{ Article{Id: "1", Title: "First title", Author: "First author", Content: "First content"}, Article{Id: "2", Title: "Second title", Author: "Second author", Content: "Second content"}, } fmt.Println("REST API V2.0 worked....") //СОздаю свой маршрутизатор на основе либы mux myRouter := mux.NewRouter().StrictSlash(true) myRouter.HandleFunc("/articles", GetAllArticles).Methods("GET") myRouter.HandleFunc("/article/{id}", GetArticleWithId).Methods("GET") //Создадим запрос на добавление новой статьи myRouter.HandleFunc("/article", PostNewArticle).Methods("POST") //Создадим запрос на удаление статьи (гарантировано существует) myRouter.HandleFunc("/article/{id}", DeleterArticleWithId).Methods("DELETE") //ДЗ - Добавить PUT // Алгоритмы. Вводный курс (Т. Кормен) - это с нуля // Грокаем алгоритмы (А. Бхаргава) // // Кнут + Кормен (Алгоритмы. Построение и анализ) - это для ПРО myRouter.HandleFunc("/article/{id}", PutExistsArticle).Methods("PUT") log.Fatal(http.ListenAndServe(":8000", myRouter)) }
//go:generate protoc -I./protobuf -I$GOPATH/src -I$GOPATH/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis --grpc-gateway_out=logtostderr=true:./protobuf ./protobuf/your_service.proto //protoc -I. -I$GOPATH/src -I$GOPATH/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis --grpc-gateway_out=logtostderr=true:. ./protobuf/your_service.proto package main import ( "flag" "github.com/golang/glog" "github.com/grpc-ecosystem/grpc-gateway/runtime" "golang.org/x/net/context" "google.golang.org/grpc" "net/http" pb "./protobuf" ) var ( echoEndpoint = flag.String("echpo_endpoint", "localhost:9090", "endpoint of YourService") ) func run() error { ctx := context.Background() ctx, cancel := context.WithCancel(ctx) defer cancel() mux := runtime.NewServeMux() opts := []grpc.DialOption{grpc.WithInsecure()} err := pb.RegisterGreeterHandlerFromEndpoint(ctx, mux, *echoEndpoint, opts) if err != nil { return err } return http.ListenAndServe(":8080", mux) } func main() { flag.Parse() defer glog.Flush() if err := run(); err != nil { glog.Fatal(err) } }
package handler import ( "net/http" miniprop "github.com/firefirestyle/engine-v01/prop" "google.golang.org/appengine" ) func (obj *ArticleHandler) HandleUpdate(w http.ResponseWriter, r *http.Request) { obj.HandleUpdateBase(w, r, obj.GetInputProp(w, r)) } func (obj *ArticleHandler) HandleUpdateBase(w http.ResponseWriter, r *http.Request, inputProp *miniprop.MiniProp) { ctx := appengine.NewContext(r) propObj := miniprop.NewMiniProp() // // load param from json articleId := inputProp.GetString("articleId", "") ownerName := inputProp.GetString("userName", "") // // // if articleId == "" { obj.HandleError(w, r, miniprop.NewMiniProp(), ErrorCodeNotFoundArticleId, "Not Found Article") return } artObj, errGetArt := obj.GetManager().GetArticleFromPointer(ctx, articleId) if errGetArt != nil { obj.HandleError(w, r, miniprop.NewMiniProp(), ErrorCodeNotFoundArticleId, "Not Found Article") return } // if inputProp.Contain("title") { title := inputProp.GetString("title", "") artObj.SetTitle(title) } if inputProp.Contain("userName") { artObj.SetUserName(ownerName) } if inputProp.Contain("content") { content := inputProp.GetString("content", "") artObj.SetCont(content) } if inputProp.Contain("info") { content := inputProp.GetString("info", "") artObj.SetInfo(content) } if inputProp.Contain("tags") { tags := inputProp.GetPropStringList("", "tags", make([]string, 0)) artObj.SetTags(tags) } if inputProp.Contain("lat") { lat := inputProp.GetFloat("lat", -999.0) artObj.SetLat(lat) } if inputProp.Contain("lng") { lng := inputProp.GetFloat("lng", -999.0) artObj.SetLng(lng) } if inputProp.Contain("iconUrl") { iconUrl := inputProp.GetString("iconUrl", "") artObj.SetIconUrl(iconUrl) } // // if inputProp.Contain("propKeys") { propKeys := inputProp.GetPropStringList("", "propKeys", make([]string, 0)) propValues := inputProp.GetPropStringList("", "propValues", make([]string, 0)) artObj.ClearProp() if len(propKeys) == len(propValues) { for i, kv := range propKeys { artObj.SetProp(kv, propValues[i]) } } } // // _, errSave := obj.GetManager().SaveArticleWithImmutable(ctx, artObj) if errSave != nil { obj.HandleError(w, r, miniprop.NewMiniProp(), ErrorCodeFailedToSave, errSave.Error()) return } else { propObj.SetPropString("", "articleId", artObj.GetArticleId()) propObj.SetPropString("", "articleKey", artObj.GetStringId()) w.WriteHeader(http.StatusOK) w.Write(propObj.ToJson()) } }
package main func maxArea(height []int) int { left := 0 right := len(height) - 1 res := 0 for left < right { res = max11(min11(height[left], height[right])*(right-left), res) if height[left] < height[right] { left++ } else { right-- } } return res } func min11(a, b int) int { if a > b { return b } return a } func max11(a, b int) int { if a > b { return a } return b }
package mut import "github.com/lycying/log" var logger *log.Logger func init() { logger, _ = log.New(log.DEBUG, "") }
package main import "testing" func TestFib(t *testing.T) { if fib(1) != 1 { t.Fatal("fib(n)") } }
// ˅ package main import "strconv" // ˄ type Trouble struct { // ˅ // ˄ // Trouble number id int // ˅ // ˄ } func NewTrouble(id int) *Trouble { // ˅ trouble := &Trouble{id: id} return trouble // ˄ } func (self *Trouble) ToString() string { // ˅ return "[Trouble " + strconv.Itoa(self.id) + "]" // ˄ } // ˅ // ˄
// // Payload Injector // // Coder: zoid package main import ( "sync" "flag" "log" "bufio" "os" "fmt" "net/url" "net/http" "net/http/httputil" "runtime" "github.com/nu7hatch/gouuid" ) var Reset = "\033[0m" var Red = "\033[31m" var Green = "\033[32m" var Yellow = "\033[33m" var Blue = "\033[34m" var Purple = "\033[35m" var Cyan = "\033[36m" var Gray = "\033[37m" var White = "\033[97m" func init() { if runtime.GOOS == "windows" { Reset = "" Red = "" Green = "" Yellow = "" Blue = "" Purple = "" Cyan = "" Gray = "" White = "" } } func main() { // Banner banner() // Payload to be used. concurrPtr:= flag.Int("c", 20, "the concurrency") payloadPtr := flag.String("p", "", "the payload to be used") payloadsPtr := flag.String("pL", "", "the list of payloads to be used") // Parse the arguments flag.Parse() if (*payloadPtr == "" && *payloadsPtr == "") { flag.PrintDefaults() return }else{ // Create the output directory _,err := os.Stat("output") if os.IsNotExist(err) { errDir := os.Mkdir("output", 0755) if errDir != nil { log.Fatal(err) } } if *payloadsPtr != "" && *payloadPtr == "" { // Implement Concurrency var wg sync.WaitGroup for i := 0; i < *concurrPtr/2; i++ { wg.Add(1) go func() { // Run the scanner runWithMultiplePayload(*payloadsPtr) wg.Done() }() wg.Wait() } }else{ // Implement Concurrency var wg sync.WaitGroup for i := 0; i < *concurrPtr/2; i++ { wg.Add(1) go func() { // Run the scanner runWithSinglePayload(*payloadPtr) wg.Done() }() wg.Wait() } } } } // Print the banner func banner() { m1 := ` _ (_) _____ _ __ |_ / | '_ \ / /| | | | | /___|_|_| |_| ` m2 := ` May the bounties come ` fmt.Println(Red + m1 + Cyan + m2) } // Read the file containing the urls from stdin func runWithMultiplePayload(payloads string) { fmt.Println(White + "[" + Blue + "~" + White + "] Searching for URL(s)") fmt.Println(White + "[" + Green+ "~" + White + "]" + Red + " Multiple Payloads") fmt.Println(White + "========================================================================================\n") fmt.Println("Status Code\tBytes\t\tURL") fmt.Println("-----------\t-----\t\t---\n") // Create the 'NewScanner' object and print each line in the file scanner := bufio.NewScanner(os.Stdin) for scanner.Scan() { file,err := os.Open(payloads) if err != nil { log.Fatal(err) } // Parse the URL u,err := url.Parse(scanner.Text()) if err != nil{ log.Fatal(err) } // Fetch the URL Values qs := url.Values{} // Generate a unique UUID for the folder uID, err := uuid.NewV4() if err != nil { log.Fatal(err) } // Create the Unique Folder errDir := os.Mkdir("output/"+uID.String(), 0755) if errDir != nil { log.Fatal(errDir) } // Create the response file f,err := os.Create("output/"+uID.String()+"/responses.txt") if err != nil { log.Fatal(err) } // Dump the response resp,err := http.Get(scanner.Text()) if err != nil { log.Fatal(err) } dump,err := httputil.DumpResponse(resp, true) if err != nil { log.Fatal(err) } l,err := f.WriteString(string(dump)) if err != nil { log.Fatal(err) } defer resp.Body.Close() defer f.Close() pL := bufio.NewScanner(file) for pL.Scan() { // Get the url paraemters and set the newvalue (payload) for param,vv := range u.Query() { qs.Set(param, vv[0]+pL.Text()) } // Url encoding the url u.RawQuery = qs.Encode() // Print the values fmt.Printf("%s\t", resp.StatusCode) fmt.Printf("%d Bytes\t", l) fmt.Println(White + "[" + Green + "~" + White + "] " + White + u.String()) } defer file.Close() if err := pL.Err(); err != nil { log.Fatal(err) } } if err := scanner.Err(); err != nil { log.Fatal(err) } } // Read the file containing the urls from stdin func runWithSinglePayload(payload string) { fmt.Println(White + "[" + Blue + "~" + White + "] Searching for URL(s)") fmt.Println(White + "[" + Green+ "~" + White + "] Payload: " + payload) fmt.Println(White + "========================================================================================\n") fmt.Println("Status Code\tBytes\t\tURL") fmt.Println("-----------\t-----\t\t---\n") // Create the 'NewScanner' object and print each line in the file scanner := bufio.NewScanner(os.Stdin) for scanner.Scan() { // Parse the URL u,err := url.Parse(scanner.Text()) if err != nil{ log.Fatal(err) } // Fetch the URL Values qs := url.Values{} // Get the url paraemters and set the newvalue (payload) for param,vv := range u.Query() { qs.Set(param, vv[0]+payload) } // Url encoding the url u.RawQuery = qs.Encode() // Dump the response resp,err := http.Get(scanner.Text()) if err != nil { log.Fatal(err) } // Generate a unique UUID for the folder uID, err := uuid.NewV4() if err != nil { log.Fatal(err) } // Create the Unique Folder errDir := os.Mkdir("output/"+uID.String(), 0755) if errDir != nil { log.Fatal(errDir) } // Create the response file f,err := os.Create("output/"+uID.String()+"/responses.txt") if err != nil { log.Fatal(err) } dump,err := httputil.DumpResponse(resp, true) if err != nil { log.Fatal(err) } l,err := f.WriteString(string(dump)) if err != nil { log.Fatal(err) } defer resp.Body.Close() defer f.Close() // Print the values fmt.Printf("%s\t", resp.StatusCode) fmt.Printf("%d Bytes\t", l) fmt.Println(White + "[" + Green + "~" + White + "] " + White + u.String()) } if err := scanner.Err(); err != nil { log.Fatal(err) } }
package point // Stack ... type Stack struct { pts []Point } // Push ... func (s *Stack) Push(p Point) { s.pts = append(s.pts, p) } // Top ... func (s Stack) Top() Point { p := s.pts[len(s.pts)-1] return p } // NextToTop ... func (s Stack) NextToTop() Point { p := s.pts[len(s.pts)-2] return p } // Pop ... func (s *Stack) Pop() Point { p := s.Top() s.pts = s.pts[:len(s.pts)-1] return p } // Count ... func (s Stack) Count() int { return len(s.pts) } // Points ... func (s Stack) Points() []Point { return s.pts }
package main import "fmt" func main() { var n int64 fmt.Scanf("%d", &n) a := n * (n - 1) b := a * (n - 2) * (n - 3) fmt.Println(1 + a/2 + b/24) }
// +build ignore package main import ( "fmt" "image/png" "os" "github.com/viocle-kvanek/gotomation" ) func main() { fmt.Println("Capture Screen") screen, err := gotomation.GetMainScreen() if err != nil { panic(err) } fmt.Printf("id: %d\n", screen.ID()) fmt.Printf("w: %d\n", screen.W()) fmt.Printf("h: %d\n", screen.H()) image, err := screen.Capture() if err != nil { panic(err) } file, err := os.Create("capture.png") if err != nil { panic(err) } err = png.Encode(file, image) if err != nil { panic(err) } }
package main import ( "io" "net/http" "os" "github.com/2509934810/dbserver" _ "github.com/go-sql-driver/mysql" "github.com/labstack/echo" ) func getUser(c echo.Context) error { id := c.Param("id") return c.String(http.StatusOK, id) } func login(c echo.Context) error { name := c.QueryParam("name") password := c.QueryParam("password") return c.String(http.StatusOK, name+password) } func loginForm(c echo.Context) error { name := c.FormValue("name") password := c.FormValue("password") return c.String(http.StatusOK, name+password) } type Person struct { name string age int8 } func show(c echo.Context) error { a := new(Person) if err := c.Bind(a); err != nil { return err } return c.JSON(http.StatusCreated, a) } func save(c echo.Context) error { name := c.FormValue("name") avator, err := c.FormFile("avator") if err != nil { panic(err.Error()) } src, err := avator.Open() if err != nil { panic(err.Error()) } defer src.Close() dst, err := os.Create("/Users/jlei-ext/go/" + avator.Filename) if err != nil { panic(err.Error()) } defer dst.Close() if _, err := io.Copy(dst, src); err != nil { return err } return c.HTML(http.StatusOK, "<b>Thanks"+name+"contribute your file</b>") } func main() { e := echo.New() e.GET("/", func(c echo.Context) error { return c.String(http.StatusOK, "hello world") }) e.GET("/login", login) e.GET("/user/:id", getUser) e.POST("/loginform", loginForm) e.POST("/save", save) e.POST("/show", show) e.GET("/createUser", dbserver.CreateUser) e.GET("/database/create", dbserver.CreateDb) // e.Use(middleware.LoggerWithConfig(middleware.LoggerConfig{ // Format: "method=${method}, uri=${uri}, status=${status}\n", // })) e.Start(":1234") }
// Copyright 2017 European Digital Reading Lab. All rights reserved. // Licensed to the Readium Foundation under one or more contributor license agreements. // Use of this source code is governed by a BSD-style license // that can be found in the LICENSE file exposed on Github (readium) in the project repository. package license // GenerateUserKey function prepares the user key func GenerateUserKey(key UserKey) []byte { var userKey []byte userKey = key.Value return userKey }
// // Copyright 2021 IBM Corporation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package controllers import ( "context" "os" batchv1 "k8s.io/api/batch/v1" operatorv1alpha1 "github.com/IBM/ibm-auditlogging-operator/api/v1alpha1" appsv1 "k8s.io/api/apps/v1" "github.com/IBM/ibm-auditlogging-operator/controllers/constant" rbacv1 "k8s.io/api/rbac/v1" certmgr "github.com/jetstack/cert-manager/pkg/apis/certmanager/v1alpha1" corev1 "k8s.io/api/core/v1" res "github.com/IBM/ibm-auditlogging-operator/controllers/resources" opversion "github.com/IBM/ibm-auditlogging-operator/version" "k8s.io/apimachinery/pkg/types" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" testdata "github.com/IBM/ibm-auditlogging-operator/controllers/testutil" ) var _ = Describe("AuditLogging controller", func() { const requestName = "example-auditlogging" var ( ctx context.Context requestNamespace string auditLogging *operatorv1alpha1.AuditLogging namespacedName types.NamespacedName ) BeforeEach(func() { ctx = context.Background() requestNamespace = createNSName(namespace) By("Creating the Namespace") Expect(k8sClient.Create(ctx, testdata.NamespaceObj(requestNamespace))).Should(Succeed()) Expect(os.Setenv(constant.OperatorNamespaceKey, requestNamespace)).Should(Succeed()) auditLogging = testdata.AuditLoggingObj(requestName) // AuditLogging is cluster scoped and does not have a namespace namespacedName = types.NamespacedName{Name: requestName, Namespace: ""} By("Creating a new AuditLogging") Expect(k8sClient.Create(ctx, auditLogging)).Should(Succeed()) }) AfterEach(func() { By("Deleting the AuditLogging") Expect(k8sClient.Delete(ctx, auditLogging)).Should(Succeed()) By("Deleting the Namespace") Expect(k8sClient.Delete(ctx, testdata.NamespaceObj(requestNamespace))).Should(Succeed()) }) Context("When creating an AuditLogging instance", func() { It("Should create all secondary resources", func() { createdAuditLogging := &operatorv1alpha1.AuditLogging{} Eventually(func() error { return k8sClient.Get(ctx, namespacedName, createdAuditLogging) }, timeout, interval).Should(BeNil()) By("Check status of AuditLogging") audit := &operatorv1alpha1.AuditLogging{} Eventually(func() string { Expect(k8sClient.Get(ctx, namespacedName, audit)).Should(Succeed()) return audit.Status.Versions.Reconciled }, timeout, interval).Should(Equal(opversion.Version)) By("Check if Job was created") foundJob := &batchv1.Job{} Eventually(func() error { return k8sClient.Get(ctx, types.NamespacedName{Name: res.JobName, Namespace: requestNamespace}, foundJob) }, timeout, interval).Should(Succeed()) By("Check if Policy Controller deployment was created") foundDeploy := &appsv1.Deployment{} Eventually(func() error { return k8sClient.Get(ctx, types.NamespacedName{Name: res.AuditPolicyControllerDeploy, Namespace: requestNamespace}, foundDeploy) }, timeout, interval).Should(Succeed()) By("Check if ConfigMaps were created") foundCM := &corev1.ConfigMap{} for _, cm := range res.FluentdConfigMaps { Eventually(func() error { return k8sClient.Get(ctx, types.NamespacedName{Name: cm, Namespace: requestNamespace}, foundCM) }, timeout, interval).Should(Succeed()) } By("Check if Certificates were created") foundHTTPSCert := &certmgr.Certificate{} Eventually(func() error { return k8sClient.Get(ctx, types.NamespacedName{Name: res.AuditLoggingHTTPSCertName, Namespace: requestNamespace}, foundHTTPSCert) }, timeout, interval).Should(Succeed()) foundCert := &certmgr.Certificate{} Eventually(func() error { return k8sClient.Get(ctx, types.NamespacedName{Name: res.AuditLoggingCertName, Namespace: requestNamespace}, foundCert) }, timeout, interval).Should(Succeed()) By("Check if SA was created") foundSA := &corev1.ServiceAccount{} Eventually(func() error { return k8sClient.Get(ctx, types.NamespacedName{Name: res.OperandServiceAccount, Namespace: requestNamespace}, foundSA) }, timeout, interval).Should(Succeed()) By("Check if Role was created") foundRole := &rbacv1.Role{} Eventually(func() error { return k8sClient.Get(ctx, types.NamespacedName{Name: res.FluentdDaemonSetName + "-role", Namespace: requestNamespace}, foundRole) }, timeout, interval).Should(Succeed()) By("Check if RoleBinding was created") foundRB := &rbacv1.RoleBinding{} Eventually(func() error { return k8sClient.Get(ctx, types.NamespacedName{Name: res.FluentdDaemonSetName + "-rolebinding", Namespace: requestNamespace}, foundRB) }, timeout, interval).Should(Succeed()) By("Check if Service was created") foundSvc := &corev1.Service{} Eventually(func() error { return k8sClient.Get(ctx, types.NamespacedName{Name: constant.AuditLoggingComponentName, Namespace: requestNamespace}, foundSvc) }, timeout, interval).Should(Succeed()) By("Check if DaemonSet was created") foundDaemonset := &appsv1.DaemonSet{} Eventually(func() error { return k8sClient.Get(ctx, types.NamespacedName{Name: res.FluentdDaemonSetName, Namespace: requestNamespace}, foundDaemonset) }, timeout, interval).Should(Succeed()) }) }) })
// DRUNKWATER TEMPLATE(add description and prototypes) // Question Title and Description on leetcode.com // Function Declaration and Function Prototypes on leetcode.com //318. Maximum Product of Word Lengths //Given a string array words, find the maximum value of length(word[i]) * length(word[j]) where the two words do not share common letters. You may assume that each word will contain only lower case letters. If no such two words exist, return 0. //Example 1: //Given ["abcw", "baz", "foo", "bar", "xtfn", "abcdef"] //Return 16 //The two words can be "abcw", "xtfn". //Example 2: //Given ["a", "ab", "abc", "d", "cd", "bcd", "abcd"] //Return 4 //The two words can be "ab", "cd". //Example 3: //Given ["a", "aa", "aaa", "aaaa"] //Return 0 //No such pair of words. //Credits: //Special thanks to @dietpepsi for adding this problem and creating all test cases. //func maxProduct(words []string) int { //} // Time Is Money
package main import ( "context" "database/sql" "fmt" "os" "github.com/operator-framework/operator-registry/pkg/sqlite" ) func checkErr(err error) { if err != nil { panic(err) } } func main() { dbFile := "bundles.db" bundleImage := "quay.io/olmtest/installplan_e2e-bundle-image:latest" dataPath := "../kiali-manifests" // start with a clean slate os.Remove(dbFile) // create database db, err := sql.Open("sqlite3", dbFile) checkErr(err) dbLoader, err := sqlite.NewSQLLiteLoader(db) checkErr(err) err = dbLoader.Migrate(context.TODO()) checkErr(err) // populate database with data loader := sqlite.NewSQLLoaderForDirectory(dbLoader, dataPath) err = loader.Populate() checkErr(err) // add a bundlepath for kiali 1.4.2 so that later a bundle image lookup is performed updateSQL := fmt.Sprintf(`UPDATE operatorbundle SET bundlepath = '%v' WHERE version = "1.4.2";`, bundleImage) _, err = db.Exec(updateSQL) checkErr(err) }
package main import ( "bytes" "container/ring" "fmt" "io/ioutil" "log" "strconv" ) func part1(dat []byte) { var numList []string for _, num := range dat { numList = append(numList, string(num)) } r := ring.New(len(numList)) for i := 0; i < r.Len(); i++ { r.Value = numList[i] r = r.Next() } var matchSum int64 i := 1 for i <= r.Len() { if r.Value == r.Next().Value { //fmt.Printf("[match] cur: %s next: %s\n", r.Value, r.Next().Value) val, err := strconv.ParseInt(r.Value.(string), 10, 64) if err != nil { log.Fatalf("NaN wtf: %s", err) } //fmt.Printf("adding: %d\n", val) matchSum = matchSum + val //fmt.Printf("matchsum = %d\n", matchSum) } i++ r = r.Next() } fmt.Println("[part1]", matchSum) } func part2(dat []byte) { var numList []string for _, num := range dat { numList = append(numList, string(num)) } r := ring.New(len(numList)) for i := 0; i < r.Len(); i++ { r.Value = numList[i] r = r.Next() } var matchSum int64 i := 1 for i <= r.Len() { var nextVal *ring.Ring j := 1 nextVal = r for j <= r.Len()/2 { nextVal = nextVal.Next() j++ } if r.Value == nextVal.Value { //fmt.Printf("[match] cur: %s next: %s\n", r.Value, r.Next().Value) val, err := strconv.ParseInt(r.Value.(string), 10, 64) if err != nil { log.Fatalf("NaN wtf: %s", err) } //fmt.Printf("adding: %d\n", val) matchSum = matchSum + val //fmt.Printf("matchsum = %d\n", matchSum) } i++ r = r.Next() } fmt.Println("[part2]", matchSum) } func main() { dat, err := ioutil.ReadFile("./input.txt") if err != nil { log.Fatal(err) } dat = bytes.TrimSpace(dat) part1(dat) part2(dat) }
package main import ( "context" "fmt" "log" "time" "github.com/spf13/viper" clientv3 "go.etcd.io/etcd/client/v3" ) var cli *clientv3.Client func init() { // 读取配置文件 viper.AddConfigPath(".") viper.SetConfigName("app") viper.SetConfigType("toml") _ = viper.ReadInConfig() etcdCluster := viper.GetStringSlice("etcd.cluster") cli, _ = clientv3.New(clientv3.Config{ Endpoints: etcdCluster, DialTimeout: 5 * time.Second, }) } func main() { ctx, cancel := context.WithTimeout(context.TODO(), time.Second*5) defer cancel() res, _ := cli.Get(ctx, "/mysql/host") fmt.Println(res) sample() } func sample() { ctx, cancel := context.WithTimeout(context.Background(), time.Second*5) // resp, err := cli.Put(ctx, "sample_key", "sample_value") resp, err := cli.Get(ctx, "sample_key") cancel() if err != nil { switch err { case context.Canceled: log.Fatalf("ctx is canceled by another routine: %v", err) case context.DeadlineExceeded: log.Fatalf("ctx is attached with a deadline is exceeded: %v", err) // case rpctypes.ErrEmptyKey: // log.Fatalf("client-side error: %v", err) default: log.Fatalf("bad cluster endpoints, which are not etcd servers: %v", err) } } // use the response log.Default().Println(resp) }
package tools import ( "github.com/PagerDuty/go-pagerduty" "github.com/jeevatkm/go-model" ) // GetMappedEscalationPolicies - model.Copy returns a slice of errors if any occur in case we want to do something with them. func GetMappedEscalationPolicies(policies []pagerduty.EscalationPolicy) []EscalationsPolicy { escalationPoliciesToPersist := []EscalationsPolicy{} for i := range policies { nextPolicy := EscalationsPolicy{} model.Copy(&nextPolicy, policies[i]) escalationPoliciesToPersist = append(escalationPoliciesToPersist, nextPolicy) } return escalationPoliciesToPersist } func GetMappedEscalationRules(rules []pagerduty.EscalationRule, policyID string) []EscalationsRule { escalationRulesToPersist := []EscalationsRule{} for i := range rules { nextRule := EscalationsRule{} model.Copy(&nextRule, rules[i]) nextRule.PolicyID = policyID nextRule.LevelIndex = i escalationRulesToPersist = append(escalationRulesToPersist, nextRule) } return escalationRulesToPersist } func GetMappedUsers(users []pagerduty.User) []User { usersToPersist := []User{} for i := range users { nextUser := User{} model.Copy(&nextUser, users[i]) usersToPersist = append(usersToPersist, nextUser) } return usersToPersist } func GetMappedSchedules(schedules []pagerduty.Schedule) []Schedule { schedulesToPersist := []Schedule{} for i := range schedules { nextSchedule := Schedule{} model.Copy(&nextSchedule, schedules[i]) schedulesToPersist = append(schedulesToPersist, nextSchedule) } return schedulesToPersist } func GetMappedServices(services []pagerduty.Service) []Service { servicesToPersist := []Service{} for i := range services { nextService := Service{} model.Copy(&nextService, services[i]) servicesToPersist = append(servicesToPersist, nextService) } return servicesToPersist } func GetMappedIncidents(incidents []pagerduty.Incident) []Incident { incidentsToPersist := []Incident{} for i := range incidents { nextIncident := Incident{} model.Copy(&nextIncident, incidents[i]) incidentsToPersist = append(incidentsToPersist, nextIncident) } return incidentsToPersist } func GetMappedLogEntries(logEntries []pagerduty.LogEntry) []LogEntry { logEntriesToPersist := []LogEntry{} for i := range logEntries { nextLogEntry := LogEntry{} model.Copy(&nextLogEntry, logEntries[i]) logEntriesToPersist = append(logEntriesToPersist, nextLogEntry) } return logEntriesToPersist }
/* Given a string containing the English word for one of the single-digit numbers, return the number without using any of the words in your code. Examples: eng_to_dec('zero') # => 0 eng_to_dec('four') # => 4 Note: there is no right or wrong way to complete this challenge. Be creative with your solutions! Thanks to HazierPhonics for posting this idea on r/dailyprogrammer_ideas! If you have a problem that you think would be good for us, head over there and contribute! */ package main import ( "crypto/sha512" "fmt" ) func main() { tab := []string{"zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine"} for i, s := range tab { assert(digit(s) == i) } } func assert(x bool) { if !x { panic("assertion failed") } } func digit(s string) int { m := map[string]int{ "2dc48a941c39b33a018574c517260c7887c7ba528c4ad68d7b792c4b2037a0ceb0f8d8c166d4a9f2d0b92ec0246df0a2f936a9f6e9da2e03a37cc9600abc3c7b": 0, "05f70341078acf6a06d423d21720f9643d5f953626d88a02636dc3a9e79582aeb0c820857fd3f8dc502aa8360d2c8fa97a985fda5b629b809cad18ffb62d3899": 1, "928d50d1e24dab7cca62cfe84fcdcf9fc695160a278f91b5c0af22b709d82f8aa3b4955b3de9ba6d0a0eb7d932dc64c4d5c63fc2de87441ad2e5b929f9b67c5e": 2, "62758e4a57b76feb1fc878387c4d2639e4bfbded06afb5f22227835d9fc63bfbd30df34447ae21ece6b1659c0abebd716f350e81c3439a3799172dca9b05f78f": 3, "39a52bfc3af78ae6cb5a2c110014a610134bb7c3d67678a5bee8b5ab86a733509fe50a69d0d4d5e8a84eef546713d6334f1d5207112f1140de025e6b77413d32": 4, "71471f1accd118982f790f9503c368c72914b099b69a5333579dc8edb4378080fba9e67707c276a179301ead88b70cf618fb015fe62272c1d5580a31e844cefa": 5, "6c02bea31ac9996e6da800d38796ec5278b6dd151dd53d30fcc3333f26c49f97c54dae012cb57eab95d6831def50694505bb617b127621a4d09fa7fbc4b52b6d": 6, "35ad15b2d691a1f51ce3fc7205e31dc0632587d910952c9e3cd4bef9340d1919b2778841b0e72ae22228d50967595f0e8a0fa5cadb6c80a4981cb9d47b536898": 7, "b26ca1695872a06fc11268e609464dff4b98377a81a7513b05187b90d3f314ba3b74c7d3aa70a8865267bd2454322c4d64179e6400ecde2c27eeca5671a8e69c": 8, "aa84334bdf3487c9fcb3f6cffb9425aa234c52b3dc3e1626d40972911c2c1dde69504ecda7f08bae5ee86ca028eb1e560bf0870b8779d0a01421050f4198135d": 9, } k := fmt.Sprintf("%x", sha512.Sum512([]byte(s))) if v, f := m[k]; f { return v } return -1 }
package main import ( "encoding/json" "fmt" "net/http" "os" ) type SiteList struct { Sites []Site `json:"sites"` } func handler(w http.ResponseWriter, r *http.Request) { r.ParseForm() xml, err := os.Open("nsf.xml") if err != nil { panic(err) } defer xml.Close() sites := ParseSites(xml) if r.Form["artist"] != nil { fmt.Println("artists!", r.Form["artist"]) var states []string for _, artist := range r.Form["artist"] { states = append(states, StatesShowingIn(artist)...) } sites = SitesInState(sites, states...) } if r.Form["keyword"] != nil { fmt.Println("keywords!", r.Form["keyword"]) sites = SitesWithKeyword(sites, r.Form["keyword"]...) } j, err := json.MarshalIndent(SiteList{Sites: sites}, "", " ") if err != nil { fmt.Println(err) } fmt.Println(r.Form, len(sites)) fmt.Fprint(w, (string)(j)) } func main() { http.HandleFunc("/query", handler) http.ListenAndServe(":8080", nil) }
package msgsystem import ( "context" ) // MsgSystem message system type MsgSystem interface { Close() error SendMessage(ctx context.Context, message []byte) error ReceiveMessages(ctx context.Context, processFunc func(ctx context.Context, message []byte) error) error }
// Package entity contains all the entities of the project //go:generate go run github.com/tinylib/msgp -tests=false package entity import "time" // User is the entity of the user type User struct { ID int64 `json:"id"` Name string `json:"name"` Password string `json:"-"` Created time.Time `json:"created"` Updated time.Time `json:"updated"` }
package scroll import ( "golang.org/x/net/context" "reflect" "sync" ) type memLog struct { Objects []interface{} LastIndex map[string]int sync.Mutex } type memCursor struct { Log *memLog Offset int } // A MemoryLog is an in-memory representation of a log. Primarily it is useful // for unit tests. func MemoryLog() Log { return &memLog{ Objects: make([]interface{}, 0), LastIndex: make(map[string]int), } } func (m *memLog) Cursor() Cursor { return &memCursor{m, 0} } func (c *memCursor) Next(ctx context.Context, x interface{}) error { m := c.Log vx := reflect.ValueOf(x) m.Lock() defer m.Unlock() for c.Offset < len(m.Objects) { if y, ok := m.Objects[c.Offset].(Unique); ok { if m.LastIndex[y.Key()] != c.Offset { c.Offset += 1 continue } } vx.Elem().Set(reflect.ValueOf(m.Objects[c.Offset])) c.Offset += 1 return nil } return Done } func (m *memLog) Append(ctx context.Context, x interface{}) error { m.Lock() defer m.Unlock() if uniq, ok := x.(Unique); ok { m.LastIndex[uniq.Key()] = len(m.Objects) } m.Objects = append(m.Objects, x) return nil }
package mr import ( "fmt" "log" "net" "net/http" "net/rpc" "os" "strconv" "sync" "time" ) const ( MAPPING = iota REDUCING DONE MAXJOB = 2 REJOBTIME = 10 * time.Second ) type Master struct { // Your definitions here. mu sync.Mutex status int // 0 mapping; 1 reducing; 2 done jid int nReduce int needMapFile map[string]bool mappingFile map[string]bool needReduceFile map[int][]string reducingFile map[int][]string channels map[int]chan bool } // Your code here -- RPC handlers for the worker to call. // // start a thread that listens for RPCs from worker.go // func (m *Master) server() { rpc.Register(m) rpc.HandleHTTP() //l, e := net.Listen("tcp", ":1234") sockname := masterSock() os.Remove(sockname) l, e := net.Listen("unix", sockname) if e != nil { log.Fatal("listen error:", e) } go http.Serve(l, nil) } // // worker.go calls GetJob to get a current job from master // func (m *Master) GetJob(args *JobArgs, reply *JobReply) error { m.mu.Lock() defer m.mu.Unlock() switch m.status { case MAPPING: if len(m.needMapFile) > 0 { var i = 0 for key := range m.needMapFile { i++ if i > MAXJOB { break } reply.Jfiles = append(reply.Jfiles, key) delete(m.needMapFile, key) m.mappingFile[key] = true } reply.Jtype = MAP reply.Jid = m.jid reply.Jnum = m.nReduce m.channels[m.jid] = make(chan bool) go m.waitFinish(m.channels[m.jid], reply) m.jid++ // go wait for resopnse } else { reply.Jtype = WAIT } case REDUCING: if len(m.needReduceFile) > 0 { for key, value := range m.needReduceFile { reply.Jfiles = value reply.Jid = key delete(m.needReduceFile, key) m.reducingFile[key] = value break } reply.Jnum = m.nReduce reply.Jtype = REDUCE m.channels[reply.Jid] = make(chan bool) go m.waitFinish(m.channels[reply.Jid], reply) } else { reply.Jtype = WAIT } case DONE: reply.Jtype = QUIT } return nil } func (m *Master) waitFinish(ch chan bool, reply *JobReply) { select { case <-ch: // finished m.mu.Lock() if reply.Jtype == MAP && m.status == MAPPING { fmt.Printf("Finish map job #%v\n", reply.Jid) for _, file := range reply.Jfiles { delete(m.mappingFile, file) delete(m.needMapFile, file) } for i := 0; i < m.nReduce; i++ { fileName := "mr-" + strconv.Itoa(reply.Jid) + "-" + strconv.Itoa(i) + ".tmp" m.needReduceFile[i] = append(m.needReduceFile[i], fileName) } fmt.Printf("Last %v files\n", len(m.needMapFile)+len(m.mappingFile)) if len(m.needMapFile) == 0 && len(m.mappingFile) == 0 { fmt.Printf("Swich status mapping to reducing\n") m.status = REDUCING } } else if reply.Jtype == REDUCE && m.status == REDUCING { fmt.Printf("Finish reduce job #%v\n", reply.Jid) delete(m.reducingFile, reply.Jid) delete(m.needReduceFile, reply.Jid) if len(m.needReduceFile) == 0 && len(m.reducingFile) == 0 { fmt.Printf("Swich status reducing to done\n") m.status = DONE } } else { fmt.Printf("Job #%v finished, but is dropped.\n", reply.Jid) } delete(m.channels, reply.Jid) m.mu.Unlock() return case <-time.After(REJOBTIME): m.mu.Lock() if reply.Jtype == MAP { fmt.Printf("Map job #%v timeout!\n", reply.Jid) for _, file := range reply.Jfiles { delete(m.mappingFile, file) m.needMapFile[file] = true } delete(m.channels, reply.Jid) } else if reply.Jtype == REDUCE { fmt.Printf("Reduce job #%v timeout!\n", reply.Jid) delete(m.reducingFile, reply.Jid) m.needReduceFile[reply.Jid] = reply.Jfiles } m.mu.Unlock() } } // // worker.go calls FinishJob to indicate it has finish the job // func (m *Master) FinishJob(args *JobReply, reply *EmptyReply) error { m.channels[args.Jid] <- true return nil } // // main/mrmaster.go calls Done() periodically to find out // if the entire job has finished. // func (m *Master) Done() bool { return m.status == 2 } // // create a Master. // main/mrmaster.go calls this function. // nReduce is the number of reduce tasks to use. // func MakeMaster(files []string, nReduce int) *Master { m := Master{} m.status = MAPPING m.jid = 0 m.nReduce = nReduce m.needMapFile = make(map[string]bool) m.mappingFile = make(map[string]bool) m.needReduceFile = make(map[int][]string) m.reducingFile = make(map[int][]string) m.channels = make(map[int]chan bool) for _, file := range files { m.needMapFile[file] = true } // Your code here. m.server() return &m }
package searcher import ( "github.com/emicklei/go-restful" api "github.com/emicklei/go-restful-openapi" "grm-searcher/dbcentral/es" "grm-searcher/dbcentral/etcd" "grm-searcher/dbcentral/pg" . "grm-searcher/types" // . "grm-service/dbcentral/pg" . "grm-service/util" ) type SearcherSvc struct { SysDB *pg.SystemDB MetaDB *pg.MetaDB DynamicDB *etcd.DynamicDB EsUtil *es.ESUtil DataDir string ConfigDir string DataIdConns map[string]string } // WebService creates a new service that can handle REST requests for resources. func (s SearcherSvc) WebService() *restful.WebService { s.DataIdConns = make(map[string]string, 0) ws := new(restful.WebService) ws.Path("/"). //Consumes(restful.MIME_JSON, restful.MIME_JSON). Produces(restful.MIME_JSON, restful.MIME_JSON) tags := []string{TR("searcher")} //对传入的数据进行过滤 ws.Route(ws.POST("/data/_filter").To(s.dataFilter). Doc(TR("data fileter")). Metadata(api.KeyOpenAPITags, tags). Reads(DataFilterRequest{}). Writes(MetaInfosTotalReply{})) //查询数据的内容 ws.Route(ws.POST("/data/_search").To(s.dataSearch). Doc(TR("data search")). Metadata(api.KeyOpenAPITags, tags). Reads(SearchInfo{}). Writes(TableData{})) //查询元数据信息 只按照geom字段来查询 // ws.Route(ws.POST("/meta/geometry").To(s.geoSearch). // Doc(TR("meta geometry search")). // Metadata(api.KeyOpenAPITags, tags). // Writes(MetaInfosTotalReply{})) ws.Route(ws.GET("/meta/id/{data_id}").To(s.metaIdSearch). Doc(TR("meta id search")). Param(ws.PathParameter("data_id", "data id").DataType("string")). Metadata(api.KeyOpenAPITags, tags). Writes(TypeMeta{})) //这里要用到es,会存在基于关键字的查询,同时会有空间查询 ws.Route(ws.GET("/meta/key/{key}").To(s.keySearch). Doc(TR("meta key search")). Param(ws.PathParameter("key", "key words").DataType("string")). Param(ws.QueryParameter("order", "order").DataType("string")). Param(ws.QueryParameter("limit", "limit").DataType("string")). Param(ws.QueryParameter("sort", "sort").DataType("string")). Param(ws.QueryParameter("offset", "offset").DataType("string")). Metadata(api.KeyOpenAPITags, tags). Writes(MetaInfosTotalReply{})) ws.Route(ws.GET("/dataset/{id}/_search").To(s.datasetIdSearch). Doc(TR("dataset id search")). Param(ws.PathParameter("id", "dataset id").DataType("string")). Param(ws.QueryParameter("key", "key words").DataType("string")). Param(ws.QueryParameter("type", "type name").DataType("string")). Param(ws.QueryParameter("order", "order").DataType("string")). Param(ws.QueryParameter("limit", "limit").DataType("string")). Param(ws.QueryParameter("sort", "sort").DataType("string")). Param(ws.QueryParameter("offset", "offset").DataType("string")). Metadata(api.KeyOpenAPITags, tags). Writes(MetaInfosTotalReply{})) ws.Route(ws.GET("/marketplace/{id}/_search").To(s.marketIdSearch). Doc(TR("marketplace dataset id search")). Param(ws.PathParameter("id", "dataset id").DataType("string")). Param(ws.QueryParameter("key", "key words").DataType("string")). Param(ws.QueryParameter("order", "order").DataType("string")). Param(ws.QueryParameter("limit", "limit").DataType("string")). Param(ws.QueryParameter("sort", "sort").DataType("string")). Param(ws.QueryParameter("offset", "offset").DataType("string")). Metadata(api.KeyOpenAPITags, tags). Writes(MetaInfosTotalReply{})) ws.Route(ws.POST("/meta/type/{type_name}").To(s.typeSearch). Doc(TR("meta type search")). Param(ws.PathParameter("type_name", "type name").DataType("string")). Metadata(api.KeyOpenAPITags, tags). Reads(SearchInfo{}). Writes(MetaInfosTotalReply{})) return ws }
package restapi import ( "bytes" "encoding/json" "go-openapi/models" "go-openapi/restapi/operations" "github.com/go-openapi/loads" "log" "net/http" "net/http/httptest" "testing" ) func getAPI() (*operations.GoOpenapiAPI, error) { swaggerSpec, err := loads.Embedded(SwaggerJSON, FlatSwaggerJSON) if err != nil { log.Fatalln(err) } api := operations.NewGoOpenapiAPI(swaggerSpec) return api, nil } func GetAPIHandler() (http.Handler, error) { api, err := getAPI() if err != nil { return nil, err } h := configureAPI(api) err = api.Validate() if err != nil { return nil, err } return h, nil } func TestCreateUser(t *testing.T){ handler, err := GetAPIHandler() if err != nil { t.Fatal("get api handler", err) } ts := httptest.NewServer(handler) defer ts.Close() user := models.User{ Email: "foo@bar.com", FirstName: "alex", LastName: "jones", Password: "test", Phone: "555-555", Username: "alex", } jsonValue, _ := json.Marshal(user) resp, err := http.Post(ts.URL + "/v2/user", "application/json", bytes.NewBuffer(jsonValue)) if err != nil { t.Fatal() } if resp.StatusCode != 501 { t.Fatal() } }
package main import ( "fmt" "ibgame/actions/getui_action" "ibgame/actions/user_action" "ibgame/logs" "net/http" ) func main() { http.HandleFunc("/login", user_action.Login) http.HandleFunc("/register", user_action.Register) http.HandleFunc("/pushsingle", getui_action.PushSingle) http.HandleFunc("/parseauthtoken", user_action.ParseAuthToken) err := http.ListenAndServe(":12356", nil) //设置监听的端口 if err != nil { logs.Error.Println("ListenAndServe:error") } else { fmt.Println("监听12356") } }
package main import ( "context" "strings" "github.com/Masterminds/squirrel" "github.com/jmoiron/sqlx" ) var CollectJoinMethod = Method{ Name: "collectjoin", SupportsBatching: true, SupportsTransactions: false, MaxBatchSize: 32767, CreateRunnerFunc: func(runnerConfig *RunnerConfig) Runner { incorporater := &CollectJoinMethodIncorporater{ runnerConfig: runnerConfig, } return &RunnerBase{ RunnerConfig: runnerConfig, ProcessingSize: runnerConfig.BatchSize, CreateProcessor: func(nestedRunnerConfig *RunnerConfig) ProcessBatcher { return &CollectJoinMethodProcessor{ runnerConfig: nestedRunnerConfig, incorporater: incorporater, } }, Prepare: incorporater.Prepare, Finalise: incorporater.Finalise, TearDown: incorporater.TearDown, } }, } type CollectJoinMethodIncorporater struct { runnerConfig *RunnerConfig replacer *strings.Replacer } func (c *CollectJoinMethodIncorporater) Prepare(ctx context.Context) error { var err error c.replacer = c.createReplacer() err = c.calculatedChecksumsCreateTable(ctx, c.runnerConfig.DB) if err != nil { return err } err = c.checksumWarningsCreateTable(ctx, c.runnerConfig.DB) if err != nil { _ = c.TearDown(ctx) return err } return nil } func (c *CollectJoinMethodIncorporater) createReplacer() *strings.Replacer { return strings.NewReplacer( "{CHECKSUM_WARNINGS}", c.checksumWarningsTableName(), "{CALCULATED_CHECKSUMS}", c.calculatedChecksumsTableName(), ) } func (c *CollectJoinMethodIncorporater) substituteAll(query GenericQuery) string { return c.replacer.Replace(string(query.SubstituteAll(c.runnerConfig.DB))) } const writeChecksumWarningsQuery = GenericQuery(` INSERT INTO {CHECKSUM_WARNINGS} ( file_id, path, modification_time, file_size, expected_checksum, actual_checksum, discovered, last_read, created ) SELECT {CALCULATED_CHECKSUMS}.file_id, {FILES}.path, {FILES}.modification_time, {FILES}.file_size, {FILES}.checksum, {CALCULATED_CHECKSUMS}.checksum, ?, {FILES}.last_read, NOW() FROM {CALCULATED_CHECKSUMS} LEFT JOIN {FILES} ON {CALCULATED_CHECKSUMS}.file_id = {FILES}.id WHERE {FILES}.to_be_compared = 1 AND {FILES}.checksum <> {CALCULATED_CHECKSUMS}.checksum ; `) const updateFilesQuery = GenericQuery(` UPDATE {FILES} RIGHT JOIN {CALCULATED_CHECKSUMS} ON {CALCULATED_CHECKSUMS}.file_id = {FILES}.id SET {FILES}.to_be_read = 0, {FILES}.to_be_compared = 0, {FILES}.checksum = {CALCULATED_CHECKSUMS}.checksum, {FILES}.last_read = ? ; `) func (c *CollectJoinMethodIncorporater) Finalise(ctx context.Context) error { var err error _, err = c.runnerConfig.DB.ExecContext(ctx, c.substituteAll(writeChecksumWarningsQuery), c.runnerConfig.RunID) if err != nil { return err } _, err = c.runnerConfig.DB.ExecContext(ctx, c.substituteAll(updateFilesQuery), c.runnerConfig.RunID) if err != nil { return err } return nil } func (c *CollectJoinMethodIncorporater) TearDown(ctx context.Context) error { var err error err = c.checksumWarningsDropTable(ctx, c.runnerConfig.DB) if err != nil { _ = c.calculatedChecksumsDropTable(ctx, c.runnerConfig.DB) return err } err = c.calculatedChecksumsDropTable(ctx, c.runnerConfig.DB) if err != nil { return err } return nil } const calculatedChecksumsTableNameBase = "calculated_checksums" func (c *CollectJoinMethodIncorporater) calculatedChecksumsTableName() string { return c.runnerConfig.DB.Config.TablePrefix + calculatedChecksumsTableNameBase } const calculatedChecksumsCreateTableQuery = GenericQuery(` CREATE TABLE IF NOT EXISTS {CALCULATED_CHECKSUMS} ( file_id bigint(20) unsigned NOT NULL, checksum varbinary(64) NOT NULL, PRIMARY KEY (file_id) ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; `) func (c *CollectJoinMethodIncorporater) calculatedChecksumsCreateTable(ctx context.Context, execer sqlx.ExecerContext) error { _, err := execer.ExecContext(ctx, c.substituteAll(calculatedChecksumsCreateTableQuery)) return err } const calculatedChecksumsDropTableQuery = GenericQuery(` DROP TABLE IF EXISTS {CALCULATED_CHECKSUMS}; `) func (c *CollectJoinMethodIncorporater) calculatedChecksumsDropTable(ctx context.Context, execer sqlx.ExecerContext) error { _, err := execer.ExecContext(ctx, c.substituteAll(calculatedChecksumsDropTableQuery)) return err } const checksumWarningsTableNameBase = "checksum_warnings" func (c *CollectJoinMethodIncorporater) checksumWarningsTableName() string { return c.runnerConfig.DB.Config.TablePrefix + checksumWarningsTableNameBase } const checksumWarningsCreateTableQuery = GenericQuery(` CREATE TABLE IF NOT EXISTS {CHECKSUM_WARNINGS} ( id bigint(20) unsigned NOT NULL AUTO_INCREMENT, file_id bigint(20) unsigned NOT NULL, path varbinary(4096) NOT NULL, modification_time datetime(6) NOT NULL, file_size bigint(20) unsigned NOT NULL, expected_checksum varbinary(64) NOT NULL, actual_checksum varbinary(64) NOT NULL, discovered bigint(20) unsigned NOT NULL, last_read bigint(20) unsigned NOT NULL, created datetime(6) NOT NULL, PRIMARY KEY (id) ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; `) func (c *CollectJoinMethodIncorporater) checksumWarningsCreateTable(ctx context.Context, execer sqlx.ExecerContext) error { _, err := execer.ExecContext(ctx, c.substituteAll(checksumWarningsCreateTableQuery)) return err } const checksumWarningsDropTableQuery = GenericQuery(` DROP TABLE IF EXISTS {CHECKSUM_WARNINGS}; `) func (c *CollectJoinMethodIncorporater) checksumWarningsDropTable(ctx context.Context, execer sqlx.ExecerContext) error { _, err := execer.ExecContext(ctx, c.substituteAll(checksumWarningsDropTableQuery)) return err } var _ ProcessBatcher = &CollectJoinMethodProcessor{} type CollectJoinMethodProcessor struct { runnerConfig *RunnerConfig incorporater *CollectJoinMethodIncorporater } func (c *CollectJoinMethodProcessor) ProcessBatch(ctx context.Context, batch Batch) error { var err error calculatedChecksums := batch.Checksums tx, err := c.runnerConfig.DB.BeginTxx(ctx, nil) if err != nil { return err } defer tx.Rollback() insert := c.buildInsertBase(tx) for id, checksum := range calculatedChecksums { insert = insert.Values( id, checksum, ) } _, err = insert.ExecContext(ctx) if err != nil { return err } err = tx.Commit() if err != nil { return err } return nil } func (_ *CollectJoinMethodProcessor) Finalise(_ context.Context) error { return nil } func (c *CollectJoinMethodProcessor) buildInsertBase(runner squirrel.BaseRunner) squirrel.InsertBuilder { return squirrel.Insert(c.incorporater.calculatedChecksumsTableName()). Columns( "file_id", "checksum", ). PlaceholderFormat(squirrel.Question). RunWith(runner) }
package game type Choice struct { Label string `json:"label"` Value string `json:"value"` Correct bool `json:"correct"` } type Question struct { Id int `json:"id"` Сhoices []Choice `json:"choices"` Value string `json:"value"` } type Level struct { Questions []Question `json:"questions"` Prize int `json:"prize"` } type SingleGameLevel struct { Question Question `json:"question"` Prize int `json:"prize"` } type GameConfigs = map[string]Level type SingleGameData = map[string]SingleGameLevel
package main import ( "fmt" ) func main() { for { var volume float32 _, err := fmt.Scanf("%f", &volume) if err != nil { break } var diametro float32 _, err = fmt.Scanf("%f", &diametro) if err != nil { break } altura := 4 * volume / (3.14 * diametro * diametro) area := volume / altura fmt.Printf("ALTURA = %.2f\n", altura) fmt.Printf("AREA = %.2f\n", area) } }
package config import ( "reflect" "testing" ) // Tests GetClientDisasterConfig too func TestGetClientConfig(t *testing.T) { cases := []struct { name string config Config want ClientConfig }{ { name: "all visible", config: Config{ CostOfLiving: 1, MinimumResourceThreshold: 2, MaxCriticalConsecutiveTurns: 3, MaxSeasons: 4, // not visible DisasterConfig: DisasterConfig{ CommonpoolThreshold: 6, CommonpoolThresholdVisible: true, Period: 4, // not visible }, }, want: ClientConfig{ CostOfLiving: 1, MinimumResourceThreshold: 2, MaxCriticalConsecutiveTurns: 3, DisasterConfig: ClientDisasterConfig{ CommonpoolThreshold: SelectivelyVisibleResources{ Value: 6, Valid: true, }, }, }, }, { name: "all selectively visible invisible", config: Config{ CostOfLiving: 1, MinimumResourceThreshold: 2, MaxCriticalConsecutiveTurns: 3, MaxSeasons: 4, // not visible DisasterConfig: DisasterConfig{ CommonpoolThreshold: 6, CommonpoolThresholdVisible: false, Period: 4, // not visible }, }, want: ClientConfig{ CostOfLiving: 1, MinimumResourceThreshold: 2, MaxCriticalConsecutiveTurns: 3, DisasterConfig: ClientDisasterConfig{ CommonpoolThreshold: SelectivelyVisibleResources{ Valid: false, }, }, }, }, } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { got := tc.config.GetClientConfig() if !reflect.DeepEqual(tc.want, got) { t.Errorf("want '%v' got '%v'", tc.want, got) } }) } }
package ranged import ( "github.com/realm/realm-server/items" "github.com/realm/realm-server/items/weapons" ) // ERangeType defines ERangeType enums underlying type. type ERangeType string // ERangeType enums. const ( SHORT ERangeType = "Short Range" MID ERangeType = "Mid Range" LONG ERangeType = "Long Range" ) var rangeTypes = map[string]ERangeType{ "Short": SHORT, "Mid": MID, "Long": LONG, } // Accuracy is structure for ranged weapon accuracy bonuses. // S - size Small // M - size Medium // L - size Large type Accuracy struct { S int64 `json:"small"` M int64 `json:"medium"` L int64 `json:"large"` } // Range is structure for minimum and maximum usage distances. type Range struct { Min int64 `json:"min"` Max int64 `json:"max"` } // Ranges is structure for ranged weapon ranges based on size. // S - size Small // M - size Medium // L - size Large type Ranges struct { S Range `json:"small"` M Range `json:"medium"` L Range `json:"large"` } // Munition is structure for all ranged weapon ammunitions. type Munition struct { items.Item Dmg items.DieRoll `json:"damage"` DmgType weapons.EDamageType `json:"damageType"` Amount int64 `json:"amount"` } // Ranged is structure for all ranged weaponry. type Ranged struct { items.Item Acc Accuracy `json:"accuracy"` Rng Ranges `json:"range"` RngType ERangeType `json:"rangeType"` AmmoType string `json:"ammoType"` LoadType string `json:"loadType"` }
package main import ( "fmt" "grm-service/util" ) func main() { a := "1 2345 " if util.IsNum(a) { fmt.Println("is num") } else { fmt.Println("not num") } }
// Copyright (C) 2019 rameshvk. All rights reserved. // Use of this source code is governed by a MIT-style license // that can be found in the LICENSE file. package code_test import ( "github.com/tvastar/gogo/pkg/code" "bytes" "fmt" "go/format" "go/token" ) func Example() { x := code.Ident("x") y := code.Ident("y") z := code.Ident("z") n := code.Ident("n") strz := code.Return(code.Import("strconv").Dot("Itoa").Call(z)) file := code.File( "example", code.Func("testfn"). WithParam(x, code.Ident("int"), nil). WithParam(y, code.Ident("int"), nil). WithResult(nil, code.Ident("string"), nil). WithBody(code.If2(n.Assign(":=", x), n.Op("<", y)).Then(strz)), ) var buf bytes.Buffer node := file.MarshalNode(code.RootScope()) if err := format.Node(&buf, &token.FileSet{}, node); err != nil { fmt.Println("Unexpected error", err) } fmt.Println(buf.String()) // Output: // package example // // import "strconv" // // func () testfn(x int, y int) (string) { // if n := x; n < y { // return strconv.Itoa(z) // } // } }
package main import ( "crypto/rand" "encoding/json" "fmt" "io/ioutil" "math/big" "os" "strconv" "strings" "time" "github.com/docopt/docopt-go" "github.com/nsf/termbox-go" ) const ( usage = `Short 1.0, short term memory tester. Usage: ./short [options] Options: -f <file> use specified file as database [default: ~/.config/short-term]. -n <number> show specified count of tests [default: 20]. -c <count> show specified count of numbers in tests [default: 7]. -i <min> use specified number as minimum value of number [default: 10] -a <max> use specified number as maximum value of number [default: 99] ` ) // test result type Result struct { Score int `json:"score"` Duration float64 `json:"duration"` Count int `json:"count"` } func main() { args, _ := docopt.Parse(usage, nil, true, "1.0", false) file := args["-f"].(string) if file[:2] == "~/" { file = os.Getenv("HOME") + file[1:] } var ( testsCount, _ = strconv.Atoi(args["-n"].(string)) numbersCount, _ = strconv.Atoi(args["-c"].(string)) minNumber, _ = strconv.Atoi(args["-i"].(string)) maxNumber, _ = strconv.Atoi(args["-a"].(string)) ) err := termbox.Init() if err != nil { panic(err) } clearScreen() results := []Result{} for i := 0; i < testsCount; i++ { result := runTest(minNumber, maxNumber, numbersCount) results = append(results, result) } var ( sumScore int sumDuration float64 ) for _, result := range results { sumScore += result.Score sumDuration += result.Duration } avgDuration := sumDuration / float64(len(results)) avgScore := float64(sumScore) / float64(len(results)) termbox.Close() fmt.Printf("Score: %.2f (%.2f sec)\n", avgScore, avgDuration) saveResults(file, results, sumScore, avgDuration) } func saveResults( file string, results []Result, totalScore int, avgDuration float64, ) { type DatabaseItem struct { Date string `json:"date"` AvgDuration float64 `json:"avg_duration"` TotalScore int `json:"total_score"` Results []Result `json:"results"` } fd, err := os.OpenFile(file, os.O_RDWR|os.O_CREATE, 0600) if err != nil { panic(err) } defer fd.Close() content, err := ioutil.ReadAll(fd) if err != nil { panic(err) } database := []DatabaseItem{} json.Unmarshal(content, &database) database = append(database, DatabaseItem{ Date: time.Now().String(), AvgDuration: avgDuration, TotalScore: totalScore, Results: results, }) content, err = json.Marshal(database) if err != nil { panic(err) } fd.WriteAt(content, 0) } func runTest(minNumber, maxNumber, numbersCount int) Result { validNumbers := generateRandomNumbers( minNumber, maxNumber, numbersCount, ) numberStrings := []string{} for _, number := range validNumbers { numberStrings = append(numberStrings, strconv.Itoa(number)) } wholeTest := strings.Join(numberStrings, " ") width, height := termbox.Size() timeStart := time.Now() x := width/2 - len(wholeTest)/2 y := height / 2 termbox.SetCursor(x, y) for _, symbol := range wholeTest { x += 1 termbox.SetCell( x, y, symbol, termbox.ColorDefault, termbox.ColorDefault, ) } termbox.HideCursor() termbox.Flush() wait() //wait for input 'Enter' timeFinish := time.Now() clearScreen() termbox.SetCursor(x-len(wholeTest)+1, y) termbox.Flush() userNumbers := getNumbers(x-len(wholeTest), y) clearScreen() score := compare(validNumbers, userNumbers) duration := timeFinish.Sub(timeStart).Seconds() return Result{ score, duration, numbersCount, } } func generateRandomNumbers(min, max, count int) []int { numbers := []int{} for i := 0; i < count; i++ { bigNumber, _ := rand.Int(rand.Reader, big.NewInt(int64(max))) number := int(bigNumber.Int64()) if number < min { i-- continue } numbers = append(numbers, number) } return numbers } func getNumbers(x, y int) []int { numbers := []int{} text := readText(x, y) pieces := strings.Split(text, " ") for _, piece := range pieces { number, _ := strconv.Atoi(piece) numbers = append(numbers, number) } return numbers } func readText(x, y int) string { text := "" for { event := termbox.PollEvent() if event.Type != termbox.EventKey { continue } if event.Ch >= '0' && event.Ch <= '9' { text += string(event.Ch) } switch event.Key { case termbox.KeySpace: text += " " case termbox.KeyBackspace2: if len(text) == 0 { break } text = text[0 : len(text)-1] clearScreen() printText(text, x, y) case termbox.KeyEnter: return text case termbox.KeyCtrlC, termbox.KeyCtrlZ: termbox.Close() os.Exit(0) } printText(text, x, y) } } func compare(validNumbers, inputNumbers []int) (score int) { length := len(inputNumbers) if len(validNumbers) < length { length = len(validNumbers) } for index := 0; index < length; index++ { if validNumbers[index] == inputNumbers[index] { score++ } else { break } } return score } func clearScreen() { termbox.Clear(termbox.ColorDefault, termbox.ColorDefault) err := termbox.Flush() if err != nil { panic(err) } } // just wait for any user input (like 'Press Enter to continue') func wait() { for { event := termbox.PollEvent() if event.Type != termbox.EventKey { continue } switch event.Key { case termbox.KeyEnter: return case termbox.KeyCtrlC, termbox.KeyCtrlZ: termbox.Close() os.Exit(0) } } } func printText(text string, x, y int) { termbox.SetCursor(x, y) for _, symbol := range text { x += 1 termbox.SetCell( x, y, symbol, termbox.ColorDefault, termbox.ColorDefault, ) } termbox.SetCursor(x+1, y) termbox.Flush() }
package main import ( "bytes" "crypto/tls" "encoding/base64" "encoding/json" "fmt" "image" "image/jpeg" "io/ioutil" "log" "net/http" "os" "strings" "time" ) func main() { conf := &tls.Config{ // MinVersion: tls.VersionTLS12, MinVersion: tls.VersionTLS11, // MinVersion: tls.VersionTLS10, // weak, only for xp CurvePreferences: []tls.CurveID{tls.CurveP521, tls.CurveP384, tls.CurveP256}, PreferServerCipherSuites: true, InsecureSkipVerify: true, CipherSuites: []uint16{ tls.TLS_RSA_WITH_RC4_128_SHA, tls.TLS_RSA_WITH_3DES_EDE_CBC_SHA, tls.TLS_RSA_WITH_AES_128_CBC_SHA, tls.TLS_RSA_WITH_AES_256_CBC_SHA, tls.TLS_RSA_WITH_AES_128_CBC_SHA256, tls.TLS_RSA_WITH_AES_128_GCM_SHA256, tls.TLS_RSA_WITH_AES_256_GCM_SHA384, tls.TLS_ECDHE_ECDSA_WITH_RC4_128_SHA, tls.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA, tls.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA, tls.TLS_ECDHE_RSA_WITH_RC4_128_SHA, tls.TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA, tls.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA, tls.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA, tls.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256, tls.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256, tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, tls.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, tls.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384, tls.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305, tls.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305, }, } server := &http.Server{ Addr: ":8888", TLSConfig: conf, TLSNextProto: make(map[string]func(*http.Server, *tls.Conn, http.Handler), 0), WriteTimeout: time.Duration(10) * time.Second, ReadTimeout: time.Duration(10) * time.Second, } router := http.NewServeMux() server.Handler = router router.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { body, err := ioutil.ReadAll(r.Body) if err != nil { log.Fatalln(err) } var instances Instance json.Unmarshal(body, &instances) reader := base64.NewDecoder(base64.StdEncoding, strings.NewReader(instances.Instances[0].B64)) m, _, err := image.Decode(reader) if err != nil { log.Fatal(err) } // outputFile is a File type which satisfies Writer interface t := time.Now() fname := t.Format("20060102150405") outputFile, err := os.Create("images/" + fname + ".jpg") if err != nil { // Handle error } // Encode takes a writer interface and an image interface // We pass it the File and the RGBA jpeg.Encode(outputFile, m, &jpeg.Options{Quality: 80}) // Don't forget to close files outputFile.Close() res, err := tfxRequest(body) if err != nil { log.Fatal(err) } tfres := &TFResult{} json.Unmarshal(res, tfres) w.Header().Add("Access-Control-Allow-Origin", "*") // class := fmt.Sprintf("%f", tfres.Predictions[0].Detection_classes[0]) // score := fmt.Sprintf("%f", tfres.Predictions[0].Detection_scores[0]) // fmt.Println(class, score) class := tfres.Predictions[0].Detection_classes[0] score := tfres.Predictions[0].Detection_scores[0] * 100 className := "Unknown" fmt.Printf("%v, %v\n", class, score) if score > 70 { if class == 91 { className = "핑크가전놀이(세탁기)" } else if class == 92 { className = "백양나무슈즈스트레쳐(남성화용)" } else if class == 93 { className = "핑크가전놀이(전자렌지)" } else if class == 94 { className = "핑크가전놀이(전기밥솥)" } else if class == 95 { className = "핑크가전놀이(커피포트)" } else if class == 96 { className = "핑크가전놀이(청소기)" } else if class == 97 { className = "핑크가전놀이(반죽기)" } else if class == 98 { className = "PU퍼프4PSET(물방울)" } else if class == 99 { className = "PU퍼프4PSET(블렌딩)" } else if class == 100 { className = "에끌라깨끗한물티슈150매(캡형)" } } itm := &Item{ ClassName: className, Class: class, Score: score, } resBody, err := json.Marshal(itm) if err != nil { log.Fatal(err) } w.Write(resBody) }) log.Fatal(server.ListenAndServeTLS("certs/server.pem", "certs/server.key")) } func tfxRequest(data []byte) ([]byte, error) { buff := bytes.NewBuffer(data) // resp, err := http.Post("http://127.0.0.1:8501/v1/models/adgds/versions/2:predict", "application/json", buff) resp, err := http.Post("http://127.0.0.1:8501/v1/models/adgds:predict", "application/json", buff) if err != nil { log.Println(err) } defer resp.Body.Close() body, err := ioutil.ReadAll(resp.Body) if err != nil { return nil, err } return body, nil } // var payload = {'instances': [{'b64': inputString}]} type Instance struct { Instances []B64EncodedImageString `json:"instances"` } type B64EncodedImageString struct { B64 string `json:"b64"` } type Item struct { ClassName string `json:"class_name"` Class float64 `json:"class"` Score float64 `json:"score"` } type TFResult struct { Predictions PredictionList `json:"predictions"` } type Prediction struct { Raw_detection_boxes [][]float64 `json:"raw_detection_boxes"` Detection_scores []float64 `json:"detection_scores"` Detection_classes []float64 `json:"detection_classes"` } type PredictionList []Prediction
package audit import ( "encoding/json" "testing" ) func TestSchemaVersionMarshal(t *testing.T) { a := AuditLog{ SchemaVersion: -1000, } jsonBytes, err := json.Marshal(a) if err != nil { t.Fatalf("could not marshal audit log as JSON: %s", err) } err = json.Unmarshal(jsonBytes, &a) if err != nil { t.Fatalf("could not unmarshal audit log from JSON: %s", err) } if int(a.SchemaVersion) != CurrentSchemaVersion { t.Errorf("expected all audit log records to marshal with schema version %d, but this one had %d", CurrentSchemaVersion, a.SchemaVersion) } }
package cvetool import ( "glsamaker/pkg/app/handler/authentication" "glsamaker/pkg/app/handler/authentication/utils" "glsamaker/pkg/database/connection" "glsamaker/pkg/logger" "glsamaker/pkg/models/cve" "net/http" "strconv" "time" ) // Show renders a template to show the landing page of the application func New(w http.ResponseWriter, r *http.Request) { user := utils.GetAuthenticatedUser(r) if !user.Permissions.CVETool.AddCVE { authentication.AccessDenied(w, r) return } id, baseScore, summary, err := getNewCVEParams(r) parsedBaseScore, baseScorErr := strconv.ParseFloat(baseScore, 64) if r.Method == "GET" || err != nil || baseScorErr != nil || id == "" { renderNewCVETemplate(w, user) return } newCVE := &cve.DefCveItem{ Id: id, State: "New", Configurations: nil, Cve: cve.CVE{ Affects: nil, CVEDataMeta: nil, DataFormat: "", DataType: "", DataVersion: "", Description: nil, Problemtype: nil, References: &cve.References{ReferenceData: []*cve.Reference{}}, }, Description: summary, Impact: &cve.DefImpact{ BaseMetricV3: cve.BaseMetricV3{ CvssV3: cve.CvssV3{ BaseScore: parsedBaseScore, }, }, }, LastModifiedDate: time.Now().String(), PublishedDate: time.Now().String(), ManuallyCreated: true, Comments: nil, Packages: nil, Bugs: nil, } _, err = connection.DB.Model(newCVE).OnConflict("(id) DO UPDATE").Insert() if err != nil { logger.Error.Println("Err during CVE insert") logger.Error.Println(err) } http.Redirect(w, r, "/cve/tool", 301) } func getNewCVEParams(r *http.Request) (string, string, string, error) { err := r.ParseForm() if err != nil { return "", "", "", err } id := r.Form.Get("id") basescore := r.Form.Get("basescore") summary := r.Form.Get("summary") return id, basescore, summary, err }
package sql import "testing" func TestGetBulkInsertSqlStr(t *testing.T) { var columns []string var values [][]interface{} columns = append(columns, "`user_name`") columns = append(columns, "`user_age`") columns = append(columns, "`user_sex`") var oneColumn []interface{} oneColumn = append(oneColumn, "trump") oneColumn = append(oneColumn, 18.1) oneColumn = append(oneColumn, 0) var twoColumn []interface{} twoColumn = append(twoColumn, "trump2") twoColumn = append(twoColumn, 18.2) twoColumn = append(twoColumn, 1) var threeColumn []interface{} threeColumn = append(threeColumn, "trump3") threeColumn = append(threeColumn, 18.3) threeColumn = append(threeColumn, 2) values = append(values, oneColumn) values = append(values, twoColumn) values = append(values, threeColumn) type args struct { table string columns []string values [][]interface{} } tests := []struct { name string args args want string }{ {"case1", args{ table: "user", columns: columns, values: values, }, ""}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := GetBulkInsertSqlStr(tt.args.table, tt.args.columns, tt.args.values...); got == tt.want { t.Errorf("GetBulkInsertSqlStr() got empty") } }) } }
package main import ( "fmt" "github.com/go-chef/chef" "io/ioutil" "os" ) func main() { if len(os.Args) != 2 { printAndExit(fmt.Errorf("No argument given")) } variablePath := os.Args[1] bagName, bagItem, keyName, err := parsePath(variablePath) if err != nil { printAndExit(fmt.Errorf("Path '%s' is invalid", variablePath)) } nodeName := readEnvVar("CHEF_NODE_NAME") clientKeyPath := readEnvVar("CHEF_CLIENT_KEY_PATH") serverUrl := readEnvVar("CHEF_SERVER_URL") decryptionKeyPath := readEnvVar("CHEF_DECRYPTION_KEY_PATH") key, err := ioutil.ReadFile(clientKeyPath) if err != nil { printAndExit(err) } decryptionKey, err := ioutil.ReadFile(decryptionKeyPath) if err != nil { printAndExit(err) } client, err := chef.NewClient(&chef.Config{ Name: nodeName, Key: string(key), BaseURL: fmt.Sprintf("%s/foo", serverUrl), // /foo is needed here because of how URLs are parsed by go-chef SkipSSL: (os.Getenv("CHEF_SKIP_SSL") == "1"), }) if err != nil { printAndExit(err) } item, err := client.DataBags.GetItem(bagName, bagItem) if err != nil { printAndExit(err) } encrypted := NewEncryptedDataBagItem(item) unencrypted, err := encrypted.DecryptKey(keyName, decryptionKey) if err != nil { printAndExit(err) } fmt.Print(unencrypted) }
// Copyright (C) 2018 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package dependencygraph2 import ( "context" "testing" "github.com/google/gapid/core/assert" "github.com/google/gapid/core/memory/arena" "github.com/google/gapid/core/os/device" "github.com/google/gapid/gapis/api" "github.com/google/gapid/gapis/capture" "github.com/google/gapid/gapis/replay/builder" ) type TestCmd struct{} func (TestCmd) API() api.API { return nil } func (TestCmd) Caller() api.CmdID { return 0 } func (TestCmd) SetCaller(api.CmdID) {} func (TestCmd) Thread() uint64 { return 0 } func (TestCmd) SetThread(uint64) {} func (TestCmd) CmdName() string { return "TestCmd" } func (TestCmd) CmdFlags(context.Context, api.CmdID, *api.GlobalState) api.CmdFlags { return 0 } func (TestCmd) Extras() *api.CmdExtras { return &api.CmdExtras{} } func (TestCmd) Mutate(context.Context, api.CmdID, *api.GlobalState, *builder.Builder, api.StateWatcher) error { return nil } func (TestCmd) Clone(arena.Arena) api.Cmd { return TestCmd{} } func (TestCmd) Alive() bool { return false } func (TestCmd) CmdParams() api.Properties { return api.Properties{} } func (TestCmd) CmdResult() *api.Property { return nil } type TestRef struct { refID api.RefID } func (ref TestRef) RefID() api.RefID { return ref.refID } func newTestRef() TestRef { return TestRef{api.NewRefID()} } type FIELD_A_B struct{} func (FIELD_A_B) ClassName() string { return "A" } func (FIELD_A_B) FieldName() string { return "B" } type FIELD_B_C struct{} func (FIELD_B_C) ClassName() string { return "B" } func (FIELD_B_C) FieldName() string { return "C" } func TestBuilder(t *testing.T) { ctx := context.Background() // cmds := make([]api.Cmd, 6) c := &capture.Capture{ Name: "test", Header: &capture.Header{ ABI: device.LinuxX86_64, }, Commands: []api.Cmd{TestCmd{}, TestCmd{}, TestCmd{}, TestCmd{}, TestCmd{}, TestCmd{}}, InitialState: &capture.InitialState{}, } b := newDependencyGraphBuilder(ctx, DependencyGraphConfig{}, c, []api.Cmd{}) eg := newDependencyGraph(ctx, DependencyGraphConfig{}, c, []api.Cmd{}) // root := api.RefID(1) // b.AddRefRoot("R", root) getNodeID := func(cmdID uint64) NodeID { return b.graph.GetNodeID(CmdNode{api.SubCmdIdx{cmdID}}) } refA, refB, refC := newTestRef(), newTestRef(), newTestRef() b.OnBeginCmd(ctx, 0, TestCmd{}) b.OnSet(ctx, refA, api.FieldFragment{FIELD_A_B{}}, api.NilReference{}, refB) b.OnEndCmd(ctx, 0, TestCmd{}) b.OnBeginCmd(ctx, 1, TestCmd{}) b.OnSet(ctx, refB, api.FieldFragment{FIELD_B_C{}}, api.NilReference{}, refC) b.OnEndCmd(ctx, 1, TestCmd{}) b.OnBeginCmd(ctx, 2, TestCmd{}) b.OnGet(ctx, refA, api.FieldFragment{FIELD_A_B{}}, refB) b.OnGet(ctx, refB, api.FieldFragment{FIELD_B_C{}}, refC) b.OnEndCmd(ctx, 2, TestCmd{}) eg.setDependencies(getNodeID(2), []NodeID{getNodeID(0), getNodeID(1)}) // eg.Paths = b.graph.Paths assert.To(t).For("Reading struct should depend on writes to fields after last write to struct (0)").That( b.graph).DeepEquals(eg) b.OnBeginCmd(ctx, 3, TestCmd{}) b.OnGet(ctx, refB, api.FieldFragment{FIELD_B_C{}}, refC) b.OnEndCmd(ctx, 3, TestCmd{}) eg.setDependencies(getNodeID(3), []NodeID{getNodeID(1)}) // eg.Paths = b.graph.Paths assert.To(t).For("Reading field should not depend on write to struct before last write to field").That( b.graph).DeepEquals(eg) b.OnBeginCmd(ctx, 4, TestCmd{}) b.OnSet(ctx, refA, api.FieldFragment{FIELD_A_B{}}, refB, refB) b.OnEndCmd(ctx, 4, TestCmd{}) b.OnBeginCmd(ctx, 5, TestCmd{}) b.OnGet(ctx, refA, api.FieldFragment{FIELD_A_B{}}, refB) b.OnEndCmd(ctx, 5, TestCmd{}) eg.setDependencies(getNodeID(5), []NodeID{getNodeID(4)}) // eg.Paths = b.graph.Paths assert.To(t).For("Reading field should only depend on write to struct after list write to field").That( b.graph).DeepEquals(eg) }
package database import ( "time" ) type DB interface{ Connect() bool Disconnect() bool Find(key string) string Set(key string, val interface{}) bool Delete(key string) bool Expire(key string, val time.Duration) bool TTL(key string) time.Duration }
package migration import ( "context" "reflect" "testing" providerv1alpha1 "github.com/giantswarm/apiextensions/pkg/apis/provider/v1alpha1" "github.com/giantswarm/micrologger/microloggertest" ) func Test_migrateSpec(t *testing.T) { testCases := []struct { name string spec providerv1alpha1.AWSConfigSpec expectedSpec providerv1alpha1.AWSConfigSpec errorMatcher func(err error) bool }{ { name: "case 0: fill missing fields", spec: providerv1alpha1.AWSConfigSpec{ AWS: providerv1alpha1.AWSConfigSpecAWS{ Workers: []providerv1alpha1.AWSConfigSpecAWSNode{ providerv1alpha1.AWSConfigSpecAWSNode{}, }, }, Cluster: providerv1alpha1.Cluster{ Kubernetes: providerv1alpha1.ClusterKubernetes{ API: providerv1alpha1.ClusterKubernetesAPI{ Domain: "api.eggs2.k8s.gauss.eu-central-1.aws.gigantic.io", }, }, }, }, expectedSpec: providerv1alpha1.AWSConfigSpec{ AWS: providerv1alpha1.AWSConfigSpecAWS{ CredentialSecret: providerv1alpha1.CredentialSecret{ Name: "credential-default", Namespace: "giantswarm", }, HostedZones: providerv1alpha1.AWSConfigSpecAWSHostedZones{ API: providerv1alpha1.AWSConfigSpecAWSHostedZonesZone{ Name: "gauss.eu-central-1.aws.gigantic.io", }, Etcd: providerv1alpha1.AWSConfigSpecAWSHostedZonesZone{ Name: "gauss.eu-central-1.aws.gigantic.io", }, Ingress: providerv1alpha1.AWSConfigSpecAWSHostedZonesZone{ Name: "gauss.eu-central-1.aws.gigantic.io", }, }, Workers: []providerv1alpha1.AWSConfigSpecAWSNode{ providerv1alpha1.AWSConfigSpecAWSNode{}, }, }, Cluster: providerv1alpha1.Cluster{ Kubernetes: providerv1alpha1.ClusterKubernetes{ API: providerv1alpha1.ClusterKubernetesAPI{ Domain: "api.eggs2.k8s.gauss.eu-central-1.aws.gigantic.io", }, }, Scaling: providerv1alpha1.ClusterScaling{ Max: 1, Min: 1, }, }, }, errorMatcher: nil, }, { name: "case 1: not mess with fields already set", spec: providerv1alpha1.AWSConfigSpec{ AWS: providerv1alpha1.AWSConfigSpecAWS{ CredentialSecret: providerv1alpha1.CredentialSecret{ Name: "test-credential", Namespace: "test-credential-namespace", }, HostedZones: providerv1alpha1.AWSConfigSpecAWSHostedZones{ API: providerv1alpha1.AWSConfigSpecAWSHostedZonesZone{ Name: "test-api.gigantic.io", }, Etcd: providerv1alpha1.AWSConfigSpecAWSHostedZonesZone{ Name: "test-etcd.gigantic.io", }, Ingress: providerv1alpha1.AWSConfigSpecAWSHostedZonesZone{ Name: "test-ingress.gigantic.io", }, }, Workers: []providerv1alpha1.AWSConfigSpecAWSNode{ providerv1alpha1.AWSConfigSpecAWSNode{}, }, }, Cluster: providerv1alpha1.Cluster{ Kubernetes: providerv1alpha1.ClusterKubernetes{ API: providerv1alpha1.ClusterKubernetesAPI{ Domain: "api.eggs5.k8s.gauss.eu-central-1.aws.gigantic.io", }, }, Scaling: providerv1alpha1.ClusterScaling{ Max: 4, Min: 2, }, }, }, expectedSpec: providerv1alpha1.AWSConfigSpec{ AWS: providerv1alpha1.AWSConfigSpecAWS{ CredentialSecret: providerv1alpha1.CredentialSecret{ Name: "test-credential", Namespace: "test-credential-namespace", }, HostedZones: providerv1alpha1.AWSConfigSpecAWSHostedZones{ API: providerv1alpha1.AWSConfigSpecAWSHostedZonesZone{ Name: "test-api.gigantic.io", }, Etcd: providerv1alpha1.AWSConfigSpecAWSHostedZonesZone{ Name: "test-etcd.gigantic.io", }, Ingress: providerv1alpha1.AWSConfigSpecAWSHostedZonesZone{ Name: "test-ingress.gigantic.io", }, }, Workers: []providerv1alpha1.AWSConfigSpecAWSNode{ providerv1alpha1.AWSConfigSpecAWSNode{}, }, }, Cluster: providerv1alpha1.Cluster{ Kubernetes: providerv1alpha1.ClusterKubernetes{ API: providerv1alpha1.ClusterKubernetesAPI{ Domain: "api.eggs5.k8s.gauss.eu-central-1.aws.gigantic.io", }, }, Scaling: providerv1alpha1.ClusterScaling{ Max: 4, Min: 2, }, }, }, errorMatcher: nil, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { r := Resource{ logger: microloggertest.New(), } err := r.migrateSpec(context.Background(), &tc.spec) switch { case err == nil && tc.errorMatcher == nil: // correct; carry on case err != nil && tc.errorMatcher == nil: t.Fatalf("error == %#v, want nil", err) case err == nil && tc.errorMatcher != nil: t.Fatalf("error == nil, want non-nil") case !tc.errorMatcher(err): t.Fatalf("error == %#v, want matching", err) } if tc.errorMatcher != nil { return } if !reflect.DeepEqual(tc.spec, tc.expectedSpec) { t.Errorf("spec == %q, want %q", tc.spec, tc.expectedSpec) } }) } } func Test_zoneFromAPIDomain(t *testing.T) { testCases := []struct { name string apiDomain string expectedZone string errorMatcher func(err error) bool }{ { name: "case 0: normal case", apiDomain: "api.eggs2.k8s.gauss.eu-central-1.aws.gigantic.io", expectedZone: "gauss.eu-central-1.aws.gigantic.io", }, { name: "case 1: domain too short", apiDomain: "api.eggs2.k8s.gigantic", errorMatcher: IsMalformedDomain, }, { name: "case 1: minimal length domain", apiDomain: "api.eggs2.k8s.gigantic.io", expectedZone: "gigantic.io", }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { zone, err := zoneFromAPIDomain(tc.apiDomain) switch { case err == nil && tc.errorMatcher == nil: // correct; carry on case err != nil && tc.errorMatcher == nil: t.Fatalf("error == %#v, want nil", err) case err == nil && tc.errorMatcher != nil: t.Fatalf("error == nil, want non-nil") case !tc.errorMatcher(err): t.Fatalf("error == %#v, want matching", err) } if tc.errorMatcher != nil { return } if zone != tc.expectedZone { t.Fatalf("zone == %q, want %q", zone, tc.expectedZone) } }) } }
package main import "exchange_websocket/bitmex_websocket" func main() { bm := bitmex_websocket.BmWebsocketInit() bm.BmDepthFundingInstrumentWebsocket() for true { bm.WsConnect() go func() { bm.Ping() }() bm.Subscribe() bm.ReadMessage() } }
package sol import ( "testing" ) func TestPow(t *testing.T) { testcases := []struct { base, pos, want int }{ {base: 10, pos: 1, want: 10}, {base: 10, pos: 2, want: 100}, {base: -1, pos: 2, want: 1}, {base: -1, pos: 3, want: -1}, } for _, testcase := range testcases { result := pow(testcase.base, testcase.pos) if result != testcase.want { t.Fatal("expected:", testcase.want, "but got:", result) } } t.Log("passed") // intToRoman(1551) } func TestBasic(t *testing.T) { testcases := []struct { input int want string }{ {input: 3, want: "III"}, {input: 4, want: "IV"}, {input: 9, want: "IX"}, {input: 8, want: "VIII"}, {input: 58, want: "LVIII"}, {input: 1994, want: "MCMXCIV"}, {input: 1551, want: "MDLI"}, {input: 21, want: "XXI"}, } for i, testcase := range testcases { result := intToRoman(testcase.input) if result != testcase.want { t.Fatal("case:", i, "expected:", testcase.want, "but got:", result) } } }
package main import ( "context" "fmt" "github.com/gaurang2001/go-realtime-chat/client" "github.com/gaurang2001/go-realtime-chat/server" ) func main() { var arguments string ctx, cancel := context.WithCancel(context.Background()) fmt.Println("To start the server press 1 or to use the client press 2") fmt.Scan(&arguments) if arguments == "1" { defer cancel() var pass, addr string fmt.Printf("Enter the Server Password (leave empty for no password) : ") fmt.Scanln(&pass) fmt.Printf("Enter the Server Port (leave empty for default port) : ") fmt.Scanln(&addr) s := server.Server(pass, addr) done := make(chan bool) go s.Run(ctx, done) select { case <-done: } } else if arguments == "2" { var pass, name, addr string fmt.Printf("Enter the Client username (leave empty for default username) : ") fmt.Scanln(&name) fmt.Printf("Enter the Server Password : ") fmt.Scanln(&pass) fmt.Printf("Enter the Server Port you want the Client to attach to : ") fmt.Scan(&addr) term := make(chan bool) cli := client.Client(pass, addr, name) go cli.Run(ctx, term) select { case <-ctx.Done(): break case <-term: break } } }
// problem 12.14 package chapter12 import ( "container/list" "math" ) type MatchValue struct { kw string offset int } func SmallestSubarrayCoveringSet(A, Q []string) []string { bag := make(map[string]bool) for _, q := range Q { bag[q] = true } szBag := len(bag) queue := list.New() occ := make(map[string]int) min, i, j := math.MaxInt32, 0, -1 for p, w := range A { if bag[w] { occ[w] += 1 queue.PushBack(MatchValue{w, p}) for len(occ) == szBag { front := queue.Remove(queue.Front()).(MatchValue) if szCover := p - front.offset + 1; szCover < min { i, j = front.offset, p min = j - i + 1 } occ[front.kw] -= 1 if occ[front.kw] == 0 { delete(occ, front.kw) } } } } return A[i : j+1] }
package models type Address struct { Id uint `json:"id" gorm:"primaryKey;not null;autoIncrement;comment:'主键'"` Uid uint `json:"uid" gorm:"type:bigint(20);not null;comment:'userid'"` ReceiverName string `json:"receiver_name" gorm:"type:varchar(50);default:'';comment:'收货人姓名'" binding:"required" ` ReceiverMobile string `json:"receiver_mobile" gorm:"type:varchar(20);default:'';comment:'收货手机'" binding:"required,mobile" ` ReceiverProvince string `json:"receiver_province" gorm:"type:varchar(50);default:'';comment:'省份'" binding:"required" ` ReceiverCity string `json:"receiver_city" gorm:"type:varchar(50);default:'';comment:'市'" binding:"required" ` ReceiverDistrict string `json:"receiver_district" gorm:"type:varchar(50);default:'';comment:'区'" binding:"required" ` ReceiverAddress string `json:"receiver_address" gorm:"type:varchar(100);default:'';comment:'详细地址'" binding:"required" ` } // 根据条件获取用户详情 func GetAddressByWhere(where ...interface{}) (address Address,err error) { err = Db.First(&address, where...).Error return } func GetAddressBy(where ...interface{})(ads []Address,err error){ err = Db.Find(&ads, where...).Error return } func (a *Address)CreateAddress()error{ err := Db.Save(a).Error return err } func (a *Address)Count(uid int)(count int64){ Db.Model(a).Where("uid=?",uid).Count(&count) return } func (a *Address)DeleteAddressById()error{ err := Db.Delete(a).Error return err }
package realm import ( "encoding/json" "fmt" "golang.org/x/mod/semver" ) // Toggle is a feature switch/toggle structure for holding // its name, value, type and any overrides to be parsed by the applicable realm sdk type Toggle struct { Type string `json:"type"` Value interface{} `json:"value"` } type toggleAlias Toggle func (t *Toggle) UnmarshalJSON(b []byte) error { var raw json.RawMessage alias := toggleAlias{ Value: &raw, } if err := json.Unmarshal(b, &alias); err != nil { return err } *t = Toggle(alias) if t.Value == nil || len(raw) == 0 { return fmt.Errorf("value cannot be empty/nil with type specified as: %q", t.Type) } if err := t.assertType(raw); err != nil { return fmt.Errorf("%q of the specified type %q is incompatible: %w", string(raw), t.Type, err) } return nil } func (t *Toggle) assertType(data json.RawMessage) error { var err error switch t.Type { case "string": var s string if err = json.Unmarshal(data, &s); err != nil { return err } t.Value = s return nil case "number": var n float64 if err = json.Unmarshal(data, &n); err != nil { return err } t.Value = n return nil case "boolean": var b bool if err = json.Unmarshal(data, &b); err != nil { return err } t.Value = b return nil case "custom": // keep value as json.RawMessage for unmarshaling later return nil } return &UnsupportedTypeError{t.Type} } type OverrideableToggle struct { *Toggle Overrides []*Override `json:"overrides,omitempty"` } type UnsupportedTypeError struct { ToggleType string } func (ut *UnsupportedTypeError) Error() string { return fmt.Sprintf("type %q is currently not supported", ut.ToggleType) } // UnmarshalJSON Custom UnmarshalJSON method for validating toggle Value to the ToggleType func (t *OverrideableToggle) UnmarshalJSON(b []byte) error { var toggle Toggle err := json.Unmarshal(b, &toggle) if err != nil { return err } t.Toggle = &toggle var m map[string]json.RawMessage if err := json.Unmarshal(b, &m); err != nil { return err } if v, ok := m["overrides"]; ok { var overrides []*Override if err := json.Unmarshal(v, &overrides); err != nil { return err } t.Overrides = overrides } var previous *Override for _, override := range t.Overrides { // overrides should not overlap if previous != nil && semver.Compare(previous.MaximumVersion, override.MinimumVersion) == 1 { return fmt.Errorf("an override with maximum version %v is semantically greater than the next override's minimum version (%v) ", previous.MaximumVersion, override.MinimumVersion) } previous = override } return nil } // GetValueAt returns the value at the given version. // Will return default value if version is empty string or no override is present for the specified version func (t *OverrideableToggle) GetValueAt(version string) interface{} { v := t.Value if version != "" { for _, override := range t.Overrides { if semver.Compare(override.MinimumVersion, version) <= 0 && semver.Compare(override.MaximumVersion, version) >= 0 { v = override.Value break } } } return v } // StringValue retrieves a string value of the toggle // and returns the default value if it does not exist and a bool on whether or not the toggle exists func (t *OverrideableToggle) StringValue(version string, defaultValue string) (string, bool) { v, ok := t.GetValueAt(version).(string) if !ok { return defaultValue, ok } return v, ok } // BoolValue retrieves a bool value of the toggle // and returns the default value if it does not exist and a bool on whether or not the toggle exists func (t *OverrideableToggle) BoolValue(version string, defaultValue bool) (bool, bool) { v, ok := t.GetValueAt(version).(bool) if !ok { return defaultValue, ok } return v, ok } // Float64Value retrieves a float64 value of the toggle // and returns the default value if it does not exist and a bool on whether or not the toggle exists func (t *OverrideableToggle) Float64Value(version string, defaultValue float64) (float64, bool) { v, ok := t.GetValueAt(version).(float64) if !ok { return defaultValue, ok } return v, ok } // CustomValue unmarshals v into the value of the toggle func (t *OverrideableToggle) CustomValue(version string, v any) error { raw, ok := t.GetValueAt(version).(*json.RawMessage) if !ok { return fmt.Errorf("toggle with type %q could not be converted for unmarshalling", t.Type) } return json.Unmarshal(*raw, v) }
package leetcode import ( "reflect" "testing" ) func TestMinimumAbsDifference(t *testing.T) { if !reflect.DeepEqual(minimumAbsDifference([]int{4, 2, 1, 3}), [][]int{ []int{1, 2}, []int{2, 3}, []int{3, 4}, }) { t.Fatal() } if !reflect.DeepEqual(minimumAbsDifference([]int{1, 3, 6, 10, 15}), [][]int{ []int{1, 3}, }) { t.Fatal() } if !reflect.DeepEqual(minimumAbsDifference([]int{3, 8, -10, 23, 19, -4, -14, 27}), [][]int{ []int{-14, -10}, []int{19, 23}, []int{23, 27}, }) { t.Fatal() } }
package open_resource_discovery_test import ( "encoding/json" "fmt" "strings" "testing" "github.com/kyma-incubator/compass/components/director/internal/open_resource_discovery" "github.com/kyma-incubator/compass/components/director/pkg/str" "github.com/stretchr/testify/require" ) const ( invalidOpenResourceDiscovery = "invalidOpenResourceDiscovery" invalidUrl = "invalidUrl" invalidOrdID = "invalidOrdId" invalidDescriptionLength = 256 invalidVersion = "invalidVersion" invalidPolicyLevel = "invalidPolicyLevel" invalidVendor = "wrongVendor!" invalidType = "invalidType" invalidCustomType = "wrongCustomType" invalidMediaType = "invalid/type" unknownVendorOrdID = "vendor2" unknownProductOrdID = "ns:UNKNOWN_PRODUCT_ID" unknownPackageOrdID = "ns:package:UNKNOWN_PACKAGE_ID:v1" unknownBundleOrdID = "ns:consumptionBundle:UNKNOWN_BUNDLE_ID:v1" ) var ( invalidJson = `[ { foo: bar, } ]` invalidPackageLinkDueToMissingType = `[ { "url": "https://example.com/en/legal/terms-of-use.html" }, { "type": "client-registration", "url": "https://example2.com/en/legal/terms-of-use.html" } ]` invalidPackageLinkDueToWrongType = `[ { "type": "wrongType", "url": "https://example.com/en/legal/terms-of-use.html" }, { "type": "client-registration", "url": "https://example2.com/en/legal/terms-of-use.html" } ]` invalidPackageLinkDueToMissingURL = `[ { "type": "payment" }, { "type": "client-registration", "url": "https://example2.com/en/legal/terms-of-use.html" } ]` invalidPackageLinkDueToWrongURL = `[ { "type": "payment", "url": "wrongUrl" }, { "type": "client-registration", "url": "https://example2.com/en/legal/terms-of-use.html" } ]` invalidPackageLinkTypeWhenProvidedCustomType = `[ { "type": "payment", "url": "https://example2.com/en/legal/terms-of-use.html", "customType": "myCustomType" } ]` invalidPackageLinkCustomTypeWhenCustomTypeNotProvided = `[ { "type": "custom", "url": "https://example2.com/en/legal/terms-of-use.html", } ]` invalidLinkDueToMissingTitle = `[ { "url": "https://example2.com/en/legal/terms-of-use.html", "description": "foo bar" } ]` invalidLinkDueToMissingURL = `[ { "title": "myTitle" } ]` invalidLinkDueToWrongURL = `[ { "url": "wrongURL", "title": "myTitle" } ]` invalidPartOfProductsElement = `["invalidValue"]` invalidPartOfProductsIntegerElement = `["sap:S4HANA_OD", 992]` invalidTagsValue = `["invalid!@#"]` invalidTagsValueIntegerElement = `["storage", 992]` invalidLabelsWhenValueIsNotArray = `{ "label-key-1": "label-value-1" }` invalidLabelsWhenValuesAreNotArrayOfStrings = `{ "label-key-1": [ "label-value-1", 992 ] }` invalidLabelsWhenKeyIsWrong = `{ "invalidKey!@#": [ "label-value-1", "label-value-2" ] }` invalidCountriesElement = `["DE", "wrongCountry"]` invalidCountriesNonStringElement = `["DE", 992]` invalidLineOfBusinessElement = `["sales", "wrongLineOfBusiness!@#"]` invalidLineOfBusinessNonStringElement = `["sales", 992]` invalidIndustryElement = `["banking", "wrongIndustry!@#"]` invalidIndustryNonStringElement = `["banking", 992]` invalidBundleLinksDueToMissingTitle = `[ { "description": "foo bar", "url": "https://example.com/2018/04/11/testing/" } ]` invalidBundleLinksDueToMissingURL = `[ { "description": "foo bar", "title": "myTitle" } ]` invalidBundleLinksDueToWrongURL = `[ { "description": "foo bar", "title": "myTitle", "url": "wrongURL" } ]` invalidCredentialsExchangeStrategyDueToMissingType = `[ { "callbackUrl": "http://localhost:8080/credentials/relative" } ]` invalidCredentialsExchangeStrategyDueToWrongType = `[ { "type": "wrongType", "callbackUrl": "http://localhost:8080/credentials/relative" } ]` invalidCredentialsExchangeStrategyDueToMissingCustomType = `[ { "type": "wrongType", "customType": "ns:credential-exchange:v1", "customDescription": "foo bar" } ]` invalidCredentialsExchangeStrategyDueToWrongCustomType = `[ { "type": "custom", "customType": "wrongCustomType" } ]` invalidCredentialsExchangeStrategyDueToWrongCallbackURL = `[ { "type": "custom", "callbackUrl": "wrongURL" } ]` invalidApiResourceLinksDueToMissingType = `[ { "url": "https://example.com/shell/discover" }, { "type": "console", "url": "%s/shell/discover/relative" } ]` invalidApiResourceLinksDueToWrongType = `[ { "type": "wrongType", "url": "https://example.com/shell/discover" } ]` invalidApiResourceLinksDueToMissingCustomValueOfType = `[ { "type": "console", "customType": "foo", "url": "https://example.com/shell/discover" } ]` invalidApiResourceLinksDueToMissingURL = `[ { "type": "console" } ]` invalidApiResourceLinksDueToWrongURL = `[ { "type": "console", "url": "wrongURL" } ]` invalidChangeLogEntriesDueToMissingVersion = `[ { "date": "2020-04-29", "description": "lorem ipsum dolor sit amet", "releaseStatus": "active", "url": "https://example.com/changelog/v1" } ]` invalidChangeLogEntriesDueToWrongVersion = `[ { "date": "2020-04-29", "description": "lorem ipsum dolor sit amet", "releaseStatus": "active", "url": "https://example.com/changelog/v1", "version": "wrongValue" } ]` invalidChangeLogEntriesDueToMissingReleaseStatus = `[ { "date": "2020-04-29", "description": "lorem ipsum dolor sit amet", "url": "https://example.com/changelog/v1", "version": "1.0.0" } ]` invalidChangeLogEntriesDueToWrongReleaseStatus = `[ { "date": "2020-04-29", "description": "lorem ipsum dolor sit amet", "releaseStatus": "wrongValue", "url": "https://example.com/changelog/v1", "version": "1.0.0" } ]` invalidChangeLogEntriesDueToMissingDate = `[ { "description": "lorem ipsum dolor sit amet", "releaseStatus": "active", "url": "https://example.com/changelog/v1", "version": "1.0.0" } ]` invalidChangeLogEntriesDueToWrongDate = `[ { "date": "0000-00-00", "description": "lorem ipsum dolor sit amet", "releaseStatus": "active", "url": "https://example.com/changelog/v1", "version": "1.0.0" } ]` invalidChangeLogEntriesDueToWrongURL = `[ { "date": "2020-04-29", "description": "lorem ipsum dolor sit amet", "releaseStatus": "active", "url": "wrongValue", "version": "1.0.0" } ]` ) func TestDocuments_ValidatePackage(t *testing.T) { var tests = []struct { Name string DocumentProvider func() []*open_resource_discovery.Document ExpectedToBeValid bool }{ { Name: "Valid document", DocumentProvider: func() []*open_resource_discovery.Document { return []*open_resource_discovery.Document{fixORDDocument()} }, ExpectedToBeValid: true, }, { Name: "Missing `openResourceDiscovery` field for a Document", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.OpenResourceDiscovery = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `openResourceDiscovery` field for a Document", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.OpenResourceDiscovery = invalidOpenResourceDiscovery return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `baseUrl` of describedSystemInstance Document field", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.DescribedSystemInstance.BaseURL = str.Ptr(invalidUrl) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `ordID` field for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].OrdID = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `ordID` field for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].OrdID = invalidOrdID return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `title` field for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Title = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `shortDescription` field for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].ShortDescription = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Exceeded length of `shortDescription` field for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].ShortDescription = strings.Repeat("a", invalidDescriptionLength) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid empty `shortDescription` field for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].ShortDescription = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "New lines in `shortDescription` field for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].ShortDescription = `newLine\n` return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `description` filed for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Description = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `version` field for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Version = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `version` field for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Version = invalidVersion return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `policyLevel` field for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].PolicyLevel = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `policyLevel` field for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].PolicyLevel = invalidPolicyLevel return []*open_resource_discovery.Document{doc} }, }, { Name: "`policyLevel` field for Package is not of type `custom` when `customPolicyLevel` is set", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].CustomPolicyLevel = str.Ptr("myCustomPolicyLevel") doc.Packages[0].PolicyLevel = policyLevel return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `type` from `PackageLinks` for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].PackageLinks = json.RawMessage(invalidPackageLinkDueToMissingType) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `type` key in `PackageLinks` for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].PackageLinks = json.RawMessage(invalidPackageLinkDueToWrongType) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `url` from `PackageLinks` for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].PackageLinks = json.RawMessage(invalidPackageLinkDueToMissingURL) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `url` key in `PackageLinks` for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].PackageLinks = json.RawMessage(invalidPackageLinkDueToWrongURL) return []*open_resource_discovery.Document{doc} }, }, { Name: "Field `type` in `PackageLinks` is not set to `custom` when `customType` field is provided", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].PackageLinks = json.RawMessage(invalidPackageLinkTypeWhenProvidedCustomType) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `type` set to `custom` in `PackageLinks` when `customType` field is not provided", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].PackageLinks = json.RawMessage(invalidPackageLinkCustomTypeWhenCustomTypeNotProvided) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `PackageLinks` field when it is invalid JSON for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].PackageLinks = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `PackageLinks` field when it isn't a JSON array for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].PackageLinks = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `PackageLinks` field when it is an empty JSON array for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].PackageLinks = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `title` field in `Links` for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Links = json.RawMessage(invalidLinkDueToMissingTitle) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `url` field in `Links` for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Links = json.RawMessage(invalidLinkDueToMissingURL) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `links` field when it is invalid JSON for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Links = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `links` field when it isn't a JSON array for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Links = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `links` field when it is an empty JSON array for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Links = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `url` field in `Links` for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Links = json.RawMessage(invalidLinkDueToWrongURL) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `vendor` field for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Vendor = nil return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `vendor` field for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Vendor = str.Ptr(invalidVendor) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `partOfProducts` field for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].PartOfProducts = nil return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `partOfProducts` field when the JSON array is empty", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].PartOfProducts = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid element of `partOfProducts` array field for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].PartOfProducts = json.RawMessage(invalidPartOfProductsElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `partOfProducts` field when it is invalid JSON for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].PartOfProducts = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `partOfProducts` field when it isn't a JSON array for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].PartOfProducts = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `partOfProducts` field when it contains non string value", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].PartOfProducts = json.RawMessage(invalidPartOfProductsIntegerElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `tags` field element for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Tags = json.RawMessage(invalidTagsValue) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `tags` field when it is invalid JSON for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Tags = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `tags` field when it isn't a JSON array for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Tags = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `tags` field when the JSON array is empty", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Tags = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `tags` field when it contains non string value", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Tags = json.RawMessage(invalidTagsValueIntegerElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid JSON `Labels` field for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Labels = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid JSON object `Labels` field for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Labels = json.RawMessage(`[]`) return []*open_resource_discovery.Document{doc} }, }, { Name: "`Labels` values are not array for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Labels = json.RawMessage(invalidLabelsWhenValueIsNotArray) return []*open_resource_discovery.Document{doc} }, }, { Name: "`Labels` values are not array of strings for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Labels = json.RawMessage(invalidLabelsWhenValuesAreNotArrayOfStrings) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid key for JSON `Labels` field for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Labels = json.RawMessage(invalidLabelsWhenKeyIsWrong) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `countries` field element for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Countries = json.RawMessage(invalidCountriesElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `countries` field when JSON array contains non string element for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Countries = json.RawMessage(invalidCountriesNonStringElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `countries` field when it is invalid JSON for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Countries = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `countries` field when it isn't a JSON array for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Countries = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `countries` field when the JSON array is empty", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Countries = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `lineOfBusiness` field element for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].LineOfBusiness = json.RawMessage(invalidLineOfBusinessElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `lineOfBusiness` field when JSON array contains non string element for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].LineOfBusiness = json.RawMessage(invalidLineOfBusinessNonStringElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `lineOfBusiness` field when it is invalid JSON for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].LineOfBusiness = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `lineOfBusiness` field when it isn't a JSON array for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].LineOfBusiness = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `lineOfBusiness` field when the JSON array is empty", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].LineOfBusiness = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `industry` field element for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Industry = json.RawMessage(invalidIndustryElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `industry` field when JSON array contains non string element for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Industry = json.RawMessage(invalidIndustryNonStringElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `industry` field when it is invalid JSON for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Industry = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `industry` field when it isn't a JSON array for Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Industry = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `industry` field when the JSON array is empty", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Industry = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, // Test invalid entity relations { Name: "Package has a reference to unknown Vendor", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].Vendor = str.Ptr(unknownVendorOrdID) return []*open_resource_discovery.Document{doc} }, }, { Name: "Package has a reference to unknown Product", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Packages[0].PartOfProducts = json.RawMessage(fmt.Sprintf(`["%s"]`, unknownProductOrdID)) return []*open_resource_discovery.Document{doc} }, }, } for _, test := range tests { t.Run(test.Name, func(t *testing.T) { docs := open_resource_discovery.Documents{test.DocumentProvider()[0]} err := docs.Validate(baseURL) if test.ExpectedToBeValid { require.NoError(t, err) } else { require.Error(t, err) } }) } } func TestDocuments_ValidateBundle(t *testing.T) { var tests = []struct { Name string DocumentProvider func() []*open_resource_discovery.Document ExpectedToBeValid bool }{ { Name: "Missing `ordID` field for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].OrdID = nil return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `ordID` field for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].OrdID = str.Ptr(invalidOrdID) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `title` field for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].Name = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `shortDescription` field for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].ShortDescription = nil return []*open_resource_discovery.Document{doc} }, }, { Name: "Exceeded length of `shortDescription` field for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].ShortDescription = str.Ptr(strings.Repeat("a", invalidDescriptionLength)) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid empty `shortDescription` field for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].ShortDescription = str.Ptr("") return []*open_resource_discovery.Document{doc} }, }, { Name: "New lines in `shortDescription` field for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].ShortDescription = str.Ptr(`newLine\n`) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `description` field for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].Description = nil return []*open_resource_discovery.Document{doc} }, }, { Name: "Exceeded length of `description` field for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].Description = str.Ptr(strings.Repeat("a", invalidDescriptionLength)) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid empty `description` field for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].Description = str.Ptr("") return []*open_resource_discovery.Document{doc} }, }, { Name: "New lines in `description` field for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].Description = str.Ptr(`newLine\n`) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `title` field in `Links` field for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].Links = json.RawMessage(invalidBundleLinksDueToMissingTitle) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `url` field in `Links` field for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].Links = json.RawMessage(invalidBundleLinksDueToMissingURL) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `url` field in `Links` field for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].Links = json.RawMessage(invalidBundleLinksDueToWrongURL) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `Links` field when it is invalid JSON for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].Links = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `Links` field when it isn't a JSON array for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].Links = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `Links` field when it is an empty JSON array for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].Links = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid JSON `Labels` field for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].Labels = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid JSON object `Labels` field for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].Labels = json.RawMessage(`[]`) return []*open_resource_discovery.Document{doc} }, }, { Name: "`Labels` values are not array for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].Labels = json.RawMessage(invalidLabelsWhenValueIsNotArray) return []*open_resource_discovery.Document{doc} }, }, { Name: "`Labels` values are not array of strings for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].Labels = json.RawMessage(invalidLabelsWhenValuesAreNotArrayOfStrings) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid key for JSON `Labels` field for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].Labels = json.RawMessage(invalidLabelsWhenKeyIsWrong) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `type` field of `CredentialExchangeStrategies` field for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].CredentialExchangeStrategies = json.RawMessage(invalidCredentialsExchangeStrategyDueToMissingType) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `type` field of `CredentialExchangeStrategies` field for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].CredentialExchangeStrategies = json.RawMessage(invalidCredentialsExchangeStrategyDueToWrongType) return []*open_resource_discovery.Document{doc} }, }, { Name: "`type` field is not with value `custom` when `customType` field is provided for `CredentialExchangeStrategies` field for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].CredentialExchangeStrategies = json.RawMessage(invalidCredentialsExchangeStrategyDueToWrongCustomType) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `customType` field when `type` field is set to `custom` for `CredentialExchangeStrategies` field for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].CredentialExchangeStrategies = json.RawMessage(invalidCredentialsExchangeStrategyDueToWrongCustomType) return []*open_resource_discovery.Document{doc} }, }, { Name: "`type` field is not with value `custom` when `customDescription` field is provided for `CredentialExchangeStrategies` field for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].CredentialExchangeStrategies = json.RawMessage(invalidCredentialsExchangeStrategyDueToMissingCustomType) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `callbackURL` field of `CredentialExchangeStrategies` field for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].CredentialExchangeStrategies = json.RawMessage(invalidCredentialsExchangeStrategyDueToWrongCallbackURL) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `CredentialExchangeStrategies` field when it is invalid JSON for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].CredentialExchangeStrategies = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `CredentialExchangeStrategies` field when it isn't a JSON array for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].CredentialExchangeStrategies = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `CredentialExchangeStrategies` field when it is an empty JSON array for Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.ConsumptionBundles[0].CredentialExchangeStrategies = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, } for _, test := range tests { t.Run(test.Name, func(t *testing.T) { docs := open_resource_discovery.Documents{test.DocumentProvider()[0]} err := docs.Validate(baseURL) if test.ExpectedToBeValid { require.NoError(t, err) } else { require.Error(t, err) } }) } } func TestDocuments_ValidateAPI(t *testing.T) { var tests = []struct { Name string DocumentProvider func() []*open_resource_discovery.Document ExpectedToBeValid bool }{ { Name: "Missing `ordID` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].OrdID = nil return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `ordID` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].OrdID = str.Ptr(invalidOrdID) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `title` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Name = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `shortDescription` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ShortDescription = nil return []*open_resource_discovery.Document{doc} }, }, { Name: "Exceeded length of `shortDescription` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ShortDescription = str.Ptr(strings.Repeat("a", invalidDescriptionLength)) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid empty `shortDescription` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ShortDescription = str.Ptr("") return []*open_resource_discovery.Document{doc} }, }, { Name: "New lines in `shortDescription` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ShortDescription = str.Ptr(`newLine\n`) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `description` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Description = nil return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `version` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].VersionInput.Value = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `version` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].VersionInput.Value = invalidVersion return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `partOfPackage` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].OrdPackageID = nil return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `partOfPackage` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].OrdPackageID = str.Ptr(invalidOrdID) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `partOfConsumptionBundle` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].OrdBundleID = str.Ptr(invalidOrdID) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `apiProtocol` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ApiProtocol = nil return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `apiProtocol` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ApiProtocol = str.Ptr("wrongApiProtocol") return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `visibility` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Visibility = nil return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `visibility` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Visibility = str.Ptr("wrongVisibility") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid element of `partOfProducts` array field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].PartOfProducts = json.RawMessage(invalidPartOfProductsElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `partOfProducts` field when the JSON array is empty for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].PartOfProducts = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `partOfProducts` field when it is invalid JSON for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].PartOfProducts = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `partOfProducts` field when it isn't a JSON array for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].PartOfProducts = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `partOfProducts` field when it contains non string value for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].PartOfProducts = json.RawMessage(invalidPartOfProductsIntegerElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid value for `tags` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Tags = json.RawMessage(invalidTagsValue) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `tags` field when it is invalid JSON for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Tags = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `tags` field when it isn't a JSON array for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Tags = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `tags` field when the JSON array is empty for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Tags = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `tags` field when it contains non string value for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Tags = json.RawMessage(invalidTagsValueIntegerElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid value for `countries` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Countries = json.RawMessage(invalidCountriesElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `countries` field when it is invalid JSON for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Countries = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `countries` field when it isn't a JSON array for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Countries = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `countries` field when the JSON array is empty for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Countries = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `countries` field when it contains non string value for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Countries = json.RawMessage(invalidCountriesNonStringElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid value for `lineOfBusiness` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].LineOfBusiness = json.RawMessage(invalidLineOfBusinessElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `lineOfBusiness` field when it is invalid JSON for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].LineOfBusiness = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `lineOfBusiness` field when it isn't a JSON array for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].LineOfBusiness = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `lineOfBusiness` field when the JSON array is empty for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].LineOfBusiness = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `lineOfBusiness` field when it contains non string value for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].LineOfBusiness = json.RawMessage(invalidCountriesNonStringElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid value for `industry` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Industry = json.RawMessage(invalidIndustryElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `industry` field when it is invalid JSON for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Industry = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `industry` field when it isn't a JSON array for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Industry = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `industry` field when the JSON array is empty for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Industry = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `industry` field when it contains non string value for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Industry = json.RawMessage(invalidIndustryNonStringElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `resourceDefinitions` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ResourceDefinitions = nil return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing field `type` of `resourceDefinitions` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ResourceDefinitions[0].Type = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `type` of `resourceDefinitions` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ResourceDefinitions[0].Type = invalidType return []*open_resource_discovery.Document{doc} }, }, { Name: "Field `type` value is not `custom` when field `customType` is provided for `resourceDefinitions` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ResourceDefinitions[0].CustomType = "test:test:v1" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `customType` value when field `type` has value `custom`for `resourceDefinitions` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ResourceDefinitions[0].Type = "custom" doc.APIResources[0].ResourceDefinitions[0].CustomType = invalidCustomType return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing field `mediaType` of `resourceDefinitions` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ResourceDefinitions[0].MediaType = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `mediaType` of `resourceDefinitions` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ResourceDefinitions[0].MediaType = invalidMediaType return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `mediaType` when field `type` has value `openapi-v2` for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ResourceDefinitions[0].Type = "openapi-v2" doc.APIResources[0].ResourceDefinitions[0].MediaType = "application/xml" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `mediaType` when field `type` has value `openapi-v3` for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ResourceDefinitions[0].Type = "openapi-v3" doc.APIResources[0].ResourceDefinitions[0].MediaType = "application/xml" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `mediaType` when field `type` has value `raml-v1` for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ResourceDefinitions[0].Type = "raml-v1" doc.APIResources[0].ResourceDefinitions[0].MediaType = "application/xml" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `mediaType` when field `type` has value `edmx` for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ResourceDefinitions[0].Type = "edmx" doc.APIResources[0].ResourceDefinitions[0].MediaType = "application/json" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `mediaType` when field `type` has value `csdl-json` for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ResourceDefinitions[0].Type = "csdl-json" doc.APIResources[0].ResourceDefinitions[0].MediaType = "application/xml" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `mediaType` when field `type` has value `wsdl-v1` for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ResourceDefinitions[0].Type = "wsdl-v1" doc.APIResources[0].ResourceDefinitions[0].MediaType = "application/json" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `mediaType` when field `type` has value `wsdl-v2` for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ResourceDefinitions[0].Type = "wsdl-v2" doc.APIResources[0].ResourceDefinitions[0].MediaType = "application/json" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `mediaType` when field `type` has value `sap-rfc-metadata-v1` for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ResourceDefinitions[0].Type = "sap-rfc-metadata-v1" doc.APIResources[0].ResourceDefinitions[0].MediaType = "application/json" return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing field `url` of `resourceDefinitions` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ResourceDefinitions[0].URL = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `url` of `resourceDefinitions` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ResourceDefinitions[0].URL = invalidUrl return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing field `accessStrategies` of `resourceDefinitions` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ResourceDefinitions[0].AccessStrategy = nil return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing field `type` for `accessStrategies` of `resourceDefinitions` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ResourceDefinitions[0].AccessStrategy[0].Type = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `type` for `accessStrategies` of `resourceDefinitions` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ResourceDefinitions[0].AccessStrategy[0].Type = invalidType return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `customType` when field `type` is not `custom` for `accessStrategies` of `resourceDefinitions` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ResourceDefinitions[0].AccessStrategy[0].Type = "open" doc.APIResources[0].ResourceDefinitions[0].AccessStrategy[0].CustomType = "foo" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `customDescription` when field `type` is not `custom` for `accessStrategies` of `resourceDefinitions` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ResourceDefinitions[0].AccessStrategy[0].Type = "open" doc.APIResources[0].ResourceDefinitions[0].AccessStrategy[0].CustomDescription = "foo" return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `type` field for `apiResourceLink` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].APIResourceLinks = json.RawMessage(invalidApiResourceLinksDueToMissingType) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `type` field for `apiResourceLink` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].APIResourceLinks = json.RawMessage(invalidApiResourceLinksDueToWrongType) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `customType` when field `type` is not `custom` for `apiResourceLink` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].APIResourceLinks = json.RawMessage(invalidApiResourceLinksDueToMissingCustomValueOfType) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `url` field for `apiResourceLink` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].APIResourceLinks = json.RawMessage(invalidApiResourceLinksDueToMissingURL) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `url` field for `apiResourceLink` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].APIResourceLinks = json.RawMessage(invalidApiResourceLinksDueToWrongURL) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `apiResourceLink` field when it is invalid JSON for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].APIResourceLinks = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `apiResourceLink` field when it isn't a JSON array for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].APIResourceLinks = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `apiResourceLink` field when it is an empty JSON array for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].APIResourceLinks = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `title` field in `Links` for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Links = json.RawMessage(invalidLinkDueToMissingTitle) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `url` field in `Links` for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Links = json.RawMessage(invalidLinkDueToMissingURL) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `url` field in `Links` for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Links = json.RawMessage(invalidLinkDueToWrongURL) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `links` field when it is invalid JSON for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Links = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `links` field when it isn't a JSON array for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Links = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `links` field when it is an empty JSON array for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Links = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `releaseStatus` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ReleaseStatus = str.Ptr("") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `releaseStatus` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ReleaseStatus = str.Ptr("wrongValue") return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `sunsetDate` field when `releaseStatus` field has value `deprecated` for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ReleaseStatus = str.Ptr("deprecated") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `sunsetDate` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ReleaseStatus = str.Ptr("deprecated") doc.APIResources[0].SunsetDate = str.Ptr("0000-00-00T09:35:30+0000") doc.APIResources[0].Successor = str.Ptr(api2ORDID) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `successor` field when `releaseStatus` field has value `deprecated` for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ReleaseStatus = str.Ptr("deprecated") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `successor` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Successor = str.Ptr("invalidValue") return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing field `version` of field `changeLogEntries` for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ChangeLogEntries = json.RawMessage(invalidChangeLogEntriesDueToMissingVersion) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `version` of field `changeLogEntries` for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ChangeLogEntries = json.RawMessage(invalidChangeLogEntriesDueToWrongVersion) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing field `releaseStatus` of field `changeLogEntries` for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ChangeLogEntries = json.RawMessage(invalidChangeLogEntriesDueToMissingReleaseStatus) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `releaseStatus` of field `changeLogEntries` for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ChangeLogEntries = json.RawMessage(invalidChangeLogEntriesDueToWrongReleaseStatus) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing field `date` of field `changeLogEntries` for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ChangeLogEntries = json.RawMessage(invalidChangeLogEntriesDueToMissingDate) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `date` of field `changeLogEntries` for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ChangeLogEntries = json.RawMessage(invalidChangeLogEntriesDueToWrongDate) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `url` of field `changeLogEntries` for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ChangeLogEntries = json.RawMessage(invalidChangeLogEntriesDueToWrongURL) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `changeLogEntries` field when it is invalid JSON for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ChangeLogEntries = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `changeLogEntries` field when it isn't a JSON array for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ChangeLogEntries = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `changeLogEntries` field when it is an empty JSON array for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].ChangeLogEntries = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing field `entryPoint` for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].TargetURL = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `entryPoint` for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].TargetURL = invalidUrl return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid JSON `Labels` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Labels = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid JSON object `Labels` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Labels = json.RawMessage(`[]`) return []*open_resource_discovery.Document{doc} }, }, { Name: "`Labels` values are not array for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Labels = json.RawMessage(invalidLabelsWhenValueIsNotArray) return []*open_resource_discovery.Document{doc} }, }, { Name: "`Labels` values are not array of strings for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Labels = json.RawMessage(invalidLabelsWhenValuesAreNotArrayOfStrings) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid key for JSON `Labels` field for API", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].Labels = json.RawMessage(invalidLabelsWhenKeyIsWrong) return []*open_resource_discovery.Document{doc} }, }, // Test invalid entity relations { Name: "API has a reference to an unknown Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].OrdPackageID = str.Ptr(unknownPackageOrdID) return []*open_resource_discovery.Document{doc} }, }, { Name: "API has a reference to an unknown Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].OrdBundleID = str.Ptr(unknownBundleOrdID) return []*open_resource_discovery.Document{doc} }, }, { Name: "API has a reference to an unknown Product", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.APIResources[0].PartOfProducts = json.RawMessage(fmt.Sprintf(`["%s"]`, unknownProductOrdID)) return []*open_resource_discovery.Document{doc} }, }, } for _, test := range tests { t.Run(test.Name, func(t *testing.T) { docs := open_resource_discovery.Documents{test.DocumentProvider()[0]} err := docs.Validate(baseURL) if test.ExpectedToBeValid { require.NoError(t, err) } else { require.Error(t, err) } }) } } func TestDocuments_ValidateEvent(t *testing.T) { var tests = []struct { Name string DocumentProvider func() []*open_resource_discovery.Document ExpectedToBeValid bool }{ { Name: "Missing `ordID` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].OrdID = nil return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `ordID` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].OrdID = str.Ptr(invalidOrdID) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `title` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Name = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `shortDescription` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ShortDescription = nil return []*open_resource_discovery.Document{doc} }, }, { Name: "Exceeded length of `shortDescription` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ShortDescription = str.Ptr(strings.Repeat("a", invalidDescriptionLength)) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid empty `shortDescription` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ShortDescription = str.Ptr("") return []*open_resource_discovery.Document{doc} }, }, { Name: "New lines in `shortDescription` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ShortDescription = str.Ptr(`newLine\n`) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `description` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Description = nil return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `version` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].VersionInput.Value = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `version` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].VersionInput.Value = invalidVersion return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing field `version` of field `changeLogEntries` for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ChangeLogEntries = json.RawMessage(invalidChangeLogEntriesDueToMissingVersion) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `version` of field `changeLogEntries` for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ChangeLogEntries = json.RawMessage(invalidChangeLogEntriesDueToWrongVersion) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing field `releaseStatus` of field `changeLogEntries` for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ChangeLogEntries = json.RawMessage(invalidChangeLogEntriesDueToMissingReleaseStatus) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `releaseStatus` of field `changeLogEntries` for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ChangeLogEntries = json.RawMessage(invalidChangeLogEntriesDueToWrongReleaseStatus) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing field `date` of field `changeLogEntries` for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ChangeLogEntries = json.RawMessage(invalidChangeLogEntriesDueToMissingDate) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `date` of field `changeLogEntries` for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ChangeLogEntries = json.RawMessage(invalidChangeLogEntriesDueToWrongDate) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `url` of field `changeLogEntries` for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ChangeLogEntries = json.RawMessage(invalidChangeLogEntriesDueToWrongURL) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `changeLogEntries` field when it is invalid JSON for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ChangeLogEntries = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `changeLogEntries` field when it isn't a JSON array for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ChangeLogEntries = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `changeLogEntries` field when it is an empty JSON array for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ChangeLogEntries = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `partOfPackage` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].OrdPackageID = nil return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `partOfPackage` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].OrdPackageID = str.Ptr(invalidOrdID) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `partOfConsumptionBundle` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].OrdBundleID = str.Ptr(invalidOrdID) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `visibility` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Visibility = nil return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `visibility` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Visibility = str.Ptr("wrongVisibility") return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `title` field in `Links` for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Links = json.RawMessage(invalidLinkDueToMissingTitle) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `url` field in `Links` for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Links = json.RawMessage(invalidLinkDueToMissingURL) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `url` field in `Links` for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Links = json.RawMessage(invalidLinkDueToWrongURL) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `links` field when it is invalid JSON for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Links = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `links` field when it isn't a JSON array for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Links = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `links` field when it is an empty JSON array for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Links = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid element of `partOfProducts` array field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].PartOfProducts = json.RawMessage(invalidPartOfProductsElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `partOfProducts` field when the JSON array is empty for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].PartOfProducts = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `partOfProducts` field when it is invalid JSON for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].PartOfProducts = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `partOfProducts` field when it isn't a JSON array for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].PartOfProducts = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `partOfProducts` field when it contains non string value for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].PartOfProducts = json.RawMessage(invalidPartOfProductsIntegerElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `resourceDefinitions` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ResourceDefinitions = nil return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing field `type` of `resourceDefinitions` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ResourceDefinitions[0].Type = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `type` of `resourceDefinitions` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ResourceDefinitions[0].Type = invalidType return []*open_resource_discovery.Document{doc} }, }, { Name: "Field `type` value is not `custom` when field `customType` is provided for `resourceDefinitions` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ResourceDefinitions[0].CustomType = "test:test:v1" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `customType` value when field `type` has value `custom`for `resourceDefinitions` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ResourceDefinitions[0].Type = "custom" doc.EventResources[0].ResourceDefinitions[0].CustomType = invalidCustomType return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing field `mediaType` of `resourceDefinitions` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ResourceDefinitions[0].MediaType = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `mediaType` of `resourceDefinitions` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ResourceDefinitions[0].MediaType = invalidMediaType return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing field `url` of `resourceDefinitions` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ResourceDefinitions[0].URL = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `url` of `resourceDefinitions` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ResourceDefinitions[0].URL = invalidUrl return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing field `accessStrategies` of `resourceDefinitions` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ResourceDefinitions[0].AccessStrategy = nil return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing field `type` for `accessStrategies` of `resourceDefinitions` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ResourceDefinitions[0].AccessStrategy[0].Type = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `type` for `accessStrategies` of `resourceDefinitions` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ResourceDefinitions[0].AccessStrategy[0].Type = invalidType return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `customType` when field `type` is not `custom` for `accessStrategies` of `resourceDefinitions` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ResourceDefinitions[0].AccessStrategy[0].Type = "open" doc.EventResources[0].ResourceDefinitions[0].AccessStrategy[0].CustomType = "foo" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid field `customDescription` when field `type` is not `custom` for `accessStrategies` of `resourceDefinitions` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ResourceDefinitions[0].AccessStrategy[0].Type = "open" doc.EventResources[0].ResourceDefinitions[0].AccessStrategy[0].CustomDescription = "foo" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid value for `tags` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Tags = json.RawMessage(invalidTagsValue) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `tags` field when it is invalid JSON for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Tags = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `tags` field when it isn't a JSON array for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Tags = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `tags` field when the JSON array is empty for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Tags = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `tags` field when it contains non string value for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Tags = json.RawMessage(invalidTagsValueIntegerElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid JSON `Labels` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Labels = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid JSON object `Labels` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Labels = json.RawMessage(`[]`) return []*open_resource_discovery.Document{doc} }, }, { Name: "`Labels` values are not array for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Labels = json.RawMessage(invalidLabelsWhenValueIsNotArray) return []*open_resource_discovery.Document{doc} }, }, { Name: "`Labels` values are not array of strings for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Labels = json.RawMessage(invalidLabelsWhenValuesAreNotArrayOfStrings) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid key for JSON `Labels` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Labels = json.RawMessage(invalidLabelsWhenKeyIsWrong) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid value for `countries` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Countries = json.RawMessage(invalidCountriesElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `countries` field when it is invalid JSON for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Countries = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `countries` field when it isn't a JSON array for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Countries = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `countries` field when the JSON array is empty for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Countries = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `countries` field when it contains non string value for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Countries = json.RawMessage(invalidCountriesNonStringElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid value for `lineOfBusiness` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].LineOfBusiness = json.RawMessage(invalidLineOfBusinessElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `lineOfBusiness` field when it is invalid JSON for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].LineOfBusiness = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `lineOfBusiness` field when it isn't a JSON array for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].LineOfBusiness = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `lineOfBusiness` field when the JSON array is empty for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].LineOfBusiness = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `lineOfBusiness` field when it contains non string value for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].LineOfBusiness = json.RawMessage(invalidCountriesNonStringElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid value for `industry` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Industry = json.RawMessage(invalidIndustryElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `industry` field when it is invalid JSON for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Industry = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `industry` field when it isn't a JSON array for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Industry = json.RawMessage("{}") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `industry` field when the JSON array is empty for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Industry = json.RawMessage("[]") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `industry` field when it contains non string value for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Industry = json.RawMessage(invalidIndustryNonStringElement) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `releaseStatus` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ReleaseStatus = str.Ptr("") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `releaseStatus` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ReleaseStatus = str.Ptr("wrongValue") return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `sunsetDate` field when `releaseStatus` field has value `deprecated` for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ReleaseStatus = str.Ptr("deprecated") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `sunsetDate` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ReleaseStatus = str.Ptr("deprecated") doc.EventResources[0].SunsetDate = str.Ptr("0000-00-00T09:35:30+0000") doc.EventResources[0].Successor = str.Ptr(event2ORDID) return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `successor` field when `releaseStatus` field has value `deprecated` for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].ReleaseStatus = str.Ptr("deprecated") return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `successor` field for Event", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].Successor = str.Ptr("invalidValue") return []*open_resource_discovery.Document{doc} }, }, // Test invalid entity relations { Name: "Event has a reference to unknown Package", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].OrdPackageID = str.Ptr(unknownPackageOrdID) return []*open_resource_discovery.Document{doc} }, }, { Name: "Event has a reference to unknown Bundle", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].OrdBundleID = str.Ptr(unknownBundleOrdID) return []*open_resource_discovery.Document{doc} }, }, { Name: "Event has a reference to unknown Product", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.EventResources[0].PartOfProducts = json.RawMessage(fmt.Sprintf(`["%s"]`, unknownProductOrdID)) return []*open_resource_discovery.Document{doc} }, }, } for _, test := range tests { t.Run(test.Name, func(t *testing.T) { docs := open_resource_discovery.Documents{test.DocumentProvider()[0]} err := docs.Validate(baseURL) if test.ExpectedToBeValid { require.NoError(t, err) } else { require.Error(t, err) } }) } } func TestDocuments_ValidateProduct(t *testing.T) { var tests = []struct { Name string DocumentProvider func() []*open_resource_discovery.Document ExpectedToBeValid bool }{ { Name: "Missing `id` field for Product", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Products[0].OrdID = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `id` field for Product", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Products[0].OrdID = invalidOrdID return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `title` field for Product", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Products[0].Title = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `shortDescription` field for Product", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Products[0].ShortDescription = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Exceeded length of `shortDescription` field for Product", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Products[0].ShortDescription = strings.Repeat("a", invalidDescriptionLength) return []*open_resource_discovery.Document{doc} }, }, { Name: "New lines in `shortDescription` field for Product", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Products[0].ShortDescription = `newLine\n` return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `vendor` field for Product", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Products[0].Vendor = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `vendor` field for Product", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Products[0].Vendor = invalidOrdID return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `parent` field for Product", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Products[0].Parent = str.Ptr(invalidType) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `ppmsObjectId` field for Product", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Products[0].PPMSObjectID = str.Ptr(invalidType) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid JSON `Labels` field for Product", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Products[0].Labels = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid JSON object `Labels` field for Product", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Products[0].Labels = json.RawMessage(`[]`) return []*open_resource_discovery.Document{doc} }, }, { Name: "`Labels` values are not array for Product", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Products[0].Labels = json.RawMessage(invalidLabelsWhenValueIsNotArray) return []*open_resource_discovery.Document{doc} }, }, { Name: "`Labels` values are not array of strings for Product", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Products[0].Labels = json.RawMessage(invalidLabelsWhenValuesAreNotArrayOfStrings) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid key for JSON `Labels` field for Product", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Products[0].Labels = json.RawMessage(invalidLabelsWhenKeyIsWrong) return []*open_resource_discovery.Document{doc} }, }, // Test invalid entity relations { Name: "Product has a reference to unknown Vendor", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Products[0].Vendor = unknownVendorOrdID return []*open_resource_discovery.Document{doc} }, }, } for _, test := range tests { t.Run(test.Name, func(t *testing.T) { docs := open_resource_discovery.Documents{test.DocumentProvider()[0]} err := docs.Validate(baseURL) if test.ExpectedToBeValid { require.NoError(t, err) } else { require.Error(t, err) } }) } } func TestDocuments_ValidateVendor(t *testing.T) { var tests = []struct { Name string DocumentProvider func() []*open_resource_discovery.Document ExpectedToBeValid bool }{ { Name: "Missing `id` field for Vendor", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Vendors[0].OrdID = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `id` field for Vendor", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Vendors[0].OrdID = invalidOrdID return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `title` field for Vendor", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Vendors[0].Title = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Missing `type` field for Vendor", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Vendors[0].Type = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `type` field for Vendor", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Vendors[0].Type = invalidType return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid JSON `Labels` field for Product", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Products[0].Labels = json.RawMessage(invalidJson) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid JSON object `Labels` field for Vendor", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Vendors[0].Labels = json.RawMessage(`[]`) return []*open_resource_discovery.Document{doc} }, }, { Name: "`Labels` values are not array for Vendor", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Vendors[0].Labels = json.RawMessage(invalidLabelsWhenValueIsNotArray) return []*open_resource_discovery.Document{doc} }, }, { Name: "`Labels` values are not array of strings for Vendor", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Vendors[0].Labels = json.RawMessage(invalidLabelsWhenValuesAreNotArrayOfStrings) return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid key for JSON `Labels` field for Vendor", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Vendors[0].Labels = json.RawMessage(invalidLabelsWhenKeyIsWrong) return []*open_resource_discovery.Document{doc} }, }, } for _, test := range tests { t.Run(test.Name, func(t *testing.T) { docs := open_resource_discovery.Documents{test.DocumentProvider()[0]} err := docs.Validate(baseURL) if test.ExpectedToBeValid { require.NoError(t, err) } else { require.Error(t, err) } }) } } func TestDocuments_ValidateTombstone(t *testing.T) { var tests = []struct { Name string DocumentProvider func() []*open_resource_discovery.Document ExpectedToBeValid bool }{ //TODO: further clarification is needed as what is required by the spec /*{ Name: "Missing `ordId` field for Tombstone", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Tombstones[0].OrdID = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `ordId` field for Tombstone", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Tombstones[0].OrdID = invalidOrdID return []*open_resource_discovery.Document{doc} }, },*/{ Name: "Missing `removalDate` field for Tombstone", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Tombstones[0].RemovalDate = "" return []*open_resource_discovery.Document{doc} }, }, { Name: "Invalid `removalDate` field for Tombstone", DocumentProvider: func() []*open_resource_discovery.Document { doc := fixORDDocument() doc.Tombstones[0].RemovalDate = "0000-00-00T15:04:05Z" return []*open_resource_discovery.Document{doc} }, }, } for _, test := range tests { t.Run(test.Name, func(t *testing.T) { docs := open_resource_discovery.Documents{test.DocumentProvider()[0]} err := docs.Validate(baseURL) if test.ExpectedToBeValid { require.NoError(t, err) } else { require.Error(t, err) } }) } }
package main import ( "github.com/suapapa/go_devices/tm1638" "periph.io/x/periph/conn/gpio/gpioreg" "periph.io/x/periph/host" ) func main() { if _, err := host.Init(); err != nil { panic(err) } dev, err := tm1638.Open( gpioreg.ByName("17"), // data gpioreg.ByName("27"), // clk gpioreg.ByName("22"), // stb ) if err != nil { panic(err) } dev.SetString("HelloWrd") for i := 0; i < 8; i++ { if i%2 == 0 { dev.SetLed(i, tm1638.Green) } else { dev.SetLed(i, tm1638.Red) } } }
package price import ( "github.com/jchavannes/money/app/db" ) func GetHistory(investment *db.Investment) ([]*db.InvestmentPrice, error) { return db.GetAllInvestmentPricesForInvestment(investment) } func GetRecentPrice(investment *db.Investment) (*db.InvestmentPrice, error) { return db.GetLastInvestmentPrice(investment) }
package salsa20 import ( "bytes" "crypto/cipher" "crypto/rand" "math/big" "testing" "gx/ipfs/QmW7VUmSvhvSGbYbdsh7uRjhGmsYkc9fL8aJ5CorxxrU5N/go-crypto/salsa20" ) func TestRandom(t *testing.T) { var key [32]byte nonce := make([]byte, 8) for i := 0; i < 10000; i++ { msg := make([]byte, i) rand.Read(key[:]) rand.Read(nonce) rand.Read(msg) c0 := make([]byte, len(msg)) c1 := make([]byte, len(msg)) c2 := make([]byte, len(msg)) salsa20.XORKeyStream(c0, msg, nonce, &key) XORKeyStream(c1, msg, nonce, &key) XORKeyStreamWriter(c2, msg, nonce, &key) if !bytes.Equal(c0, c1) { t.Fatalf("key=%x nonce=%x msg=%x\n expected=%x\n actually=%x", key, nonce, msg, c0, c1) } if !bytes.Equal(c1, c2) { t.Fatalf("key=%x nonce=%x msg=%x\n expected=%x\n actually=%x", key, nonce, msg, c1, c2) } // test truncated dst x := randInt(len(msg)) c3 := make([]byte, len(msg)-x) c4 := make([]byte, len(msg)-x) salsa20.XORKeyStream(c3, msg, nonce, &key) XORKeyStream(c4, msg, nonce, &key) if !bytes.Equal(c3, c4) { t.Fatalf("key=%x nonce=%x msg=%x\n expected=%x\n actually=%x", key, nonce, msg, c3, c4) } } } func XORKeyStream(out, in []byte, nonce []byte, key *[32]byte) { c := New(key, nonce) c.XORKeyStream(out, in) } func XORKeyStreamWriter(out, in []byte, nonce []byte, key *[32]byte) { b := new(bytes.Buffer) w := &cipher.StreamWriter{S: New(key, nonce), W: b} for len(in) > 0 { i := randInt(len(in)) if _, err := w.Write(in[:i]); err != nil { panic(err) } in = in[i:] } if err := w.Close(); err != nil { panic(err) } copy(out, b.Bytes()) } func randInt(max int) int { m := big.NewInt(int64(max) + 1) n, err := rand.Int(rand.Reader, m) if err != nil { panic(err) } return int(n.Int64()) }
/* * Lists all hardware groups associated with a given location (and account alias). */ package main import ( "flag" "fmt" "os" "path" "github.com/grrtrr/clcv2/clcv2cli" "github.com/grrtrr/exit" "github.com/kr/pretty" "github.com/olekukonko/tablewriter" ) func main() { var simple = flag.Bool("simple", false, "Use simple (debugging) output format") flag.Usage = func() { fmt.Fprintf(os.Stderr, "usage: %s [options] <Location>\n", path.Base(os.Args[0])) flag.PrintDefaults() } flag.Parse() /* The Location argument is always required */ if flag.NArg() != 1 { flag.Usage() os.Exit(1) } client, err := clcv2cli.NewCLIClient() if err != nil { exit.Fatal(err.Error()) } rootNode, err := client.GetGroups(flag.Arg(0)) if err != nil { exit.Fatalf("failed to list hardware groups: %s", err) } if *simple { pretty.Println(rootNode) } else { fmt.Printf("%s in %s (%s, %d servers), ID %s:\n", rootNode.Name, rootNode.LocationId, rootNode.Status, rootNode.Serverscount, rootNode.Id) table := tablewriter.NewWriter(os.Stdout) table.SetAutoFormatHeaders(false) table.SetAlignment(tablewriter.ALIGN_LEFT) table.SetAutoWrapText(true) table.SetHeader([]string{"Name", "UUID", "Description", "#Servers", "Type"}) for _, g := range rootNode.Groups { table.Append([]string{g.Name, g.Id, g.Description, fmt.Sprint(g.Serverscount), g.Type}) } table.Render() } }
package ordersprices import ( "fmt" "strconv" . "go-sugar/db" "go-sugar/db/request" "github.com/gin-gonic/gin" ) // Columns const ( OrderID string = "order_id" UserID string = "user_id" PriceID string = "price_id" ) // Repository OrderPrices type Repository struct { tableName string Context *gin.Context } // GetAll OrderPrices func (r *Repository) GetAll() []OrderPrice { Request := request.New(DB) rows, err := Request. Select([]string{}). From(r.tableName).Query() if err != nil { fmt.Println(err) return []OrderPrice{} } return parseRows(rows) } // Create new OrderPrice func (r *Repository) Create(items []OrderPrice) ([]OrderPrice, error) { Request := request.New(DB) keys := []string{OrderID, UserID, PriceID} var values = [][]string{} for _, item := range items { orderID := strconv.Itoa(item.OrderID) values = append(values, []string{orderID, item.UserID.ToString(), item.PriceID.ToString()}) } _, err := Request.Insert(). Into(r.tableName). Values(keys, values). Exec() if err != nil { fmt.Println(err) return nil, err } return items, nil } // Validate return bool(valid or not) and ValidateError struct func (r *Repository) Validate(item *OrderPrice) (bool, ValidateError) { valid := true Request := request.New(DB) id := strconv.Itoa(item.OrderID) priceID := item.PriceID.ToString() validateError := ValidateError{} rows, err := Request. Select([]string{}). From(r.tableName). Where(Request.NewCondition(OrderID, "=", id, "AND", false)). Where(Request.NewCondition(PriceID, "=", priceID, "AND", false)). Query() if err == nil { selectedOrderPrices := parseRows(rows) if len(selectedOrderPrices) > 0 { validateError.OrderIDPriceID = "OrderPrice with this OrderID and PriceID already exist" validateError.AddToErrorMessage(validateError.OrderIDPriceID) valid = false } } else { valid = false validateError.ErrorMessage = err.Error() } return valid, validateError } // DeleteByOrderID - remove user from DB func (r *Repository) DeleteByOrderID(id string) bool { Request := request.New(DB) str, sqlErr := Request. Delete(). From(r.tableName). Where(Request.NewCond(OrderID, "=", id)). ToSQL() if sqlErr != nil { fmt.Println(sqlErr) return false } result, err := DB.Exec(str) if err != nil { fmt.Println(err) return false } fmt.Println(result.LastInsertId()) // id последнего удаленого объекта fmt.Println(result.RowsAffected()) // количество затронутых строк return true } // GetByOrderID - get all by IrderID from DB func (r *Repository) GetByOrderID(id string) []OrderPrice { Request := request.New(DB) rows, err := Request. Select([]string{}). From(r.tableName). Where(Request.NewCond(OrderID, "=", id)). Query() if err != nil { fmt.Println(err) return []OrderPrice{} } return parseRows(rows) }
package analyse import ( "github.com/valyala/fastjson" "io/ioutil" "log" "os" "path/filepath" "strings" ) var collectionJson *fastjson.Value var exampleJson *fastjson.Value var Profile="dev" func GetCollectionJson() *fastjson.Value { if collectionJson==nil{ log.Println("初始化Collection") initResource() }else{ log.Println("无需初始化Collection") } if collectionJson==nil{ //如果仍然为空,则异常 panic("无法初始化 Collection") } return collectionJson } func GetExampleJson() *fastjson.Value { if exampleJson==nil{ log.Println("初始化Example") initResource() }else{ log.Println("无需初始化Example") } if exampleJson==nil{ //如果仍然为空,则异常 panic("无法初始化 Collection") } return exampleJson } func initResource() { //读取 environment 源文件 var environmentSource *fastjson.Value envFileName:="postman_environment.json" dataFileName :="postman_collection.json" if Profile=="prod"{ envFileName=GetCurrentDirectory()+"/"+envFileName dataFileName =GetCurrentDirectory()+"/"+ dataFileName } if file, err := os.Open(envFileName);err==nil{ defer file.Close() if source,err:= ioutil.ReadAll(file);err==nil{ var parser fastjson.Parser if environmentSource, err= parser.ParseBytes(source);err!=nil{ panic(err) } }else{ panic(err) } }else{ panic(err) } //读取 collection 源文件 var collectionSource *fastjson.Value if file, err := os.Open(dataFileName);err==nil{ defer file.Close() if source,err:= ioutil.ReadAll(file);err==nil{ sourceText:=string(source) //设置环境变量 sourceText= processEnv(sourceText,environmentSource) var parser fastjson.Parser if collectionSource, err= parser.Parse(sourceText);err!=nil{ panic(err) } }else{ panic(err) } }else{ panic(err) } //获取Collection collection:= getCollection(collectionSource,environmentSource) //获取Example example:= getExample(collection) //输出至文件-Collection //collectionFileName:="collection-ruoli.json" //var collectionFile *os.File //defer collectionFile.Close() //if _,err := os.Stat(collectionFileName);err!=nil{ // //文件不存在 // collectionFile,_ = os.Create(collectionFileName) //}else{ // collectionFile, _ = os.OpenFile(collectionFileName, os.O_WRONLY|os.O_TRUNC, 0600) //} //collectionFile.Write([]byte(collection.String())) collectionJson=collection //输出至文件-Example //exampleFileName:="example-ruoli.json" //var exampleFile *os.File //defer exampleFile.Close() //if _,err := os.Stat(exampleFileName);err!=nil{ // //文件不存在 // exampleFile,_ = os.Create(exampleFileName) //}else{ // exampleFile, _ = os.OpenFile(exampleFileName, os.O_WRONLY|os.O_TRUNC, 0600) //} ////fmt.Println(example) //exampleFile.Write([]byte(example.String())) exampleJson=example } func GetCurrentDirectory() string { dir, err := filepath.Abs(filepath.Dir(os.Args[0])) //返回绝对路径 filepath.Dir(os.Args[0])去除最后一个元素的路径 if err != nil { log.Fatal(err) } return strings.Replace(dir, "\\", "/", -1) //将\替换成/ }
package models import ( "io/ioutil" ) // Article is something. type Article struct { Title string Body []byte } func (a *Article) save() error { filename := a.Title + ".txt" return ioutil.WriteFile(filename, a.Body, 0600) }
package main import ( "bytes" "fmt" ) func main() { countries := []byte("Australia Canada Japan Germany India") space := []byte{' '} splitExample := bytes.Split(countries, space) fmt.Println("Countries", string(countries)) fmt.Println("Split", splitExample) nums := [4]byte{1, 2, 4, 5} fmt.Println(nums) }
package main import ( "flag" "github.com/florian74/nats-provider/provider" "github.com/nats-io/nats.go" ) func main() { url := flag.String("url", nats.DefaultURL, "url") queueName := flag.String("queue", "default", "queue target name") topic := flag.String("topic", "default", "topic target name") providerName := flag.String("name", "provider", "provider name") reply := flag.String("reply", "", "provider reply topic") jetStream := flag.Bool("stream", false, "use JetStream") flag.Parse() var w provider.Provider w = &provider.HelloProvider{} w.Connect(*url, *queueName, *topic, *providerName, *reply, *jetStream) }
// Copyright 2020 The Ebiten Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build ignore package main var Time float var Mouse vec2 // viewportSize is a predefined function. func Vertex(position vec2, texCoord vec2, color vec4) vec4 { return mat4( 2/viewportSize().x, 0, 0, 0, 0, 2/viewportSize().y, 0, 0, 0, 0, 1, 0, -1, -1, 0, 1, ) * vec4(position, 0, 1) } func Fragment(position vec4) vec4 { pos := position.xy/viewportSize() + Mouse/viewportSize()/4 color := 0.0 color += sin(pos.x*cos(Time/15)*80) + cos(pos.y*cos(Time/15)*10) color += sin(pos.y*sin(Time/10)*40) + cos(pos.x*sin(Time/25)*40) color += sin(pos.x*sin(Time/5)*10) + sin(pos.y*sin(Time/35)*80) color *= sin(Time/10) * 0.5 return vec4(color, color*0.5, sin(color+Time/3)*0.75, 1) }
// Licensed to SolID under one or more contributor // license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright // ownership. SolID licenses this file to you under // the Apache License, Version 2.0 (the "License"); you may // not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package client import ( "bytes" "context" "encoding/json" "fmt" "net/url" "strings" "github.com/square/go-jose/v3" "google.golang.org/protobuf/types/known/wrapperspb" corev1 "zntr.io/solid/api/gen/go/oidc/core/v1" "zntr.io/solid/api/oidc" "zntr.io/solid/internal/services" "zntr.io/solid/pkg/sdk/rfcerrors" "zntr.io/solid/pkg/sdk/types" "zntr.io/solid/pkg/server/profile" "zntr.io/solid/pkg/server/storage" ) type service struct { clients storage.ClientWriter serverProfile profile.Server } // New build and returns a client service implementation. func New(clients storage.ClientWriter, serverProfile profile.Server) services.Client { return &service{ clients: clients, serverProfile: serverProfile, } } // ----------------------------------------------------------------------------- func (s *service) Register(ctx context.Context, req *corev1.ClientRegistrationRequest) (*corev1.ClientRegistrationResponse, error) { res := &corev1.ClientRegistrationResponse{} // Check req nullity if req == nil { res.Error = rfcerrors.InvalidRequest().Build() return res, fmt.Errorf("unable to process nil request") } if req.Metadata == nil { res.Error = rfcerrors.InvalidRequest().Build() return res, fmt.Errorf("unable to process nil metadata") } // Check application_type value if req.Metadata.ApplicationType == nil { // Default to web req.Metadata.ApplicationType = &wrapperspb.StringValue{Value: oidc.ApplicationTypeServerSideWeb} } // Validate authorization request publicErr, err := s.validateRegistration(ctx, req) if err != nil { res.Error = publicErr return res, err } // Create client c := &corev1.Client{ ApplicationType: req.Metadata.ApplicationType.Value, TokenEndpointAuthMethod: req.Metadata.TokenEndpointAuthMethod.Value, Contacts: req.Metadata.Contacts, GrantTypes: req.Metadata.GrantTypes, ResponseTypes: req.Metadata.ResponseTypes, RedirectUris: req.Metadata.RedirectUris, } // Assign attributes if err := s.applyRegistrationRequest(req, c, res); err != nil { return res, err } // Save client in persistence c.ClientId, err = s.clients.Register(ctx, c) if err != nil { res.Error = rfcerrors.ServerError().Build() return res, fmt.Errorf("unable to register client in persistence: %w", err) } // Assign client res.Client = c // No error return res, nil } // ----------------------------------------------------------------------------- func (s *service) applyRegistrationRequest(req *corev1.ClientRegistrationRequest, c *corev1.Client, res *corev1.ClientRegistrationResponse) error { // Check arguments if req == nil { res.Error = rfcerrors.InvalidRequest().Build() return fmt.Errorf("unable to process nil request") } if c == nil { res.Error = rfcerrors.InvalidRequest().Build() return fmt.Errorf("unable to process nil request") } if res == nil { res.Error = rfcerrors.InvalidRequest().Build() return fmt.Errorf("unable to process nil response") } if req.Metadata.ClientName != nil { // Assign to client c.ClientName = req.Metadata.ClientName.Value } if req.Metadata.ClientUri != nil { // Assign to client c.ClientUri = req.Metadata.ClientUri.Value } if req.Metadata.JwkUri != nil { // Assign to client c.JwksUri = req.Metadata.JwkUri.Value } if req.Metadata.PolicyUri != nil { // Assign to client c.PolicyUri = req.Metadata.PolicyUri.Value } if req.Metadata.TosUri != nil { // Assign to client c.TosUri = req.Metadata.TosUri.Value } if req.Metadata.LogoUri != nil { // Assign to client c.LogoUri = req.Metadata.LogoUri.Value } // JWKS if req.Metadata.Jwks != nil { // Assign to client c.Jwks = req.Metadata.Jwks.Value } // Subject type if req.Metadata.SubjectType != nil { subjectType := req.Metadata.SubjectType.Value // Check enumeration switch subjectType { case oidc.SubjectTypePublic, oidc.SubjectTypePairwise: c.SubjectType = subjectType default: res.Error = rfcerrors.InvalidClientMetadata().Build() return fmt.Errorf("subject_type contains invalid value") } // Sector identifier is mandatory with pairwise subject type if subjectType == oidc.SubjectTypePairwise { if req.Metadata.SectorIdentifier != nil { // Assign to client c.SectorIdentifier = req.Metadata.SectorIdentifier.Value } else { res.Error = rfcerrors.InvalidClientMetadata().Build() return fmt.Errorf("sector_identifier is mandatory with subject_type") } } } else { // Default to public c.SubjectType = oidc.SubjectTypePublic } // No error return nil } func (s *service) validateRegistration(ctx context.Context, req *corev1.ClientRegistrationRequest) (*corev1.Error, error) { // Check nil if req == nil { return rfcerrors.InvalidRequest().Build(), fmt.Errorf("unable to process nil request") } if req.Metadata == nil { return rfcerrors.InvalidRequest().Build(), fmt.Errorf("unable to process nil metadata") } if req.Metadata.ApplicationType == nil { return rfcerrors.InvalidRequest().Build(), fmt.Errorf("unable to process nil application type") } // Retrieve settings according to application type clientSettings, ok := s.serverProfile.ApplicationType(req.Metadata.ApplicationType.Value) if !ok { return rfcerrors.InvalidRequest().Description("application_type contains an invalid or unsupported value.").Build(), fmt.Errorf("server could not handle given application_type '%s'", req.Metadata.ApplicationType) } // Token endpoint auth methods if req.Metadata.TokenEndpointAuthMethod != nil { if !clientSettings.TokenEndpointAuthMethodsSupported().Contains(req.Metadata.TokenEndpointAuthMethod.Value) { return rfcerrors.InvalidClientMetadata().Description("token_endpoint_auth_method contains an invalid or unsupported value.").Build(), fmt.Errorf("token_endpoint_auth_method is invalid: '%s'", req.Metadata.TokenEndpointAuthMethod.Value) } } else { return rfcerrors.InvalidClientMetadata().Build(), fmt.Errorf("token_endpoint_auth_method should not be empty") } // Response Types if len(req.Metadata.ResponseTypes) > 0 { if !clientSettings.ResponseTypesSupported().HasAll(req.Metadata.ResponseTypes...) { return rfcerrors.InvalidClientMetadata().Description("response_types contains an invalid or unsupported value.").Build(), fmt.Errorf("a response_types element is invalid: '%s', supported '%s'", req.Metadata.ResponseTypes, clientSettings.ResponseTypesSupported()) } } else { // Assign default response types req.Metadata.ResponseTypes = clientSettings.ResponseTypesSupported() } // Grant types if len(req.Metadata.GrantTypes) > 0 { grantTypes := types.StringArray(req.Metadata.GrantTypes) // Supported grant_types if !clientSettings.GrantTypesSupported().HasOneOf(req.Metadata.GrantTypes...) { return rfcerrors.InvalidClientMetadata().Description("grant_types contains an invalid or unsupported value.").Build(), fmt.Errorf("a grant_types element is invalid: '%s', supported '%s'", req.Metadata.GrantTypes, clientSettings.GrantTypesSupported()) } // Code must be specified with `authorization_code` grant type if grantTypes.Contains(oidc.GrantTypeAuthorizationCode) { // Response_types should contain `code` if !types.StringArray(req.Metadata.ResponseTypes).Contains(oidc.ResponseTypeCode) { return rfcerrors.InvalidClientMetadata().Description("response_types contains an invalid or unsupported value for authorization code flow.").Build(), fmt.Errorf("response_types should contain `code`, supported '%s'", clientSettings.ResponseTypesSupported()) } // Validate redirect_uris if len(req.Metadata.RedirectUris) == 0 { return rfcerrors.InvalidClientMetadata().Build(), fmt.Errorf("redirect_uris should not be empty for `authorization_code` grant type") } // Check redirect uris syntax for i := range req.Metadata.RedirectUris { // Prepare redirection uri _, err := url.ParseRequestURI(req.Metadata.RedirectUris[i]) if err != nil { return rfcerrors.InvalidRedirectURI().Build(), fmt.Errorf("redirect_uri has an invalid syntax: %w", err) } } } // Code must be specified with `authorization_code` grant type if grantTypes.Contains(oidc.GrantTypeClientCredentials) || grantTypes.Contains(oidc.GrantTypeRefreshToken) { // Response_types should contain `code` if !types.StringArray(req.Metadata.ResponseTypes).HasAll(oidc.ResponseTypeToken) { return rfcerrors.InvalidClientMetadata().Description("response_types must contain `token` for compatible grant_types.").Build(), fmt.Errorf("response_types should contain `token` only") } } } else { // Assign default grant types req.Metadata.GrantTypes = clientSettings.GrantTypesSupported() } // JWKS if req.Metadata.Jwks != nil { // Try to decode JWKS var jwks jose.JSONWebKeySet if err := json.NewDecoder(bytes.NewBuffer(req.Metadata.Jwks.Value)).Decode(&jwks); err != nil { return rfcerrors.InvalidClientMetadata().Build(), fmt.Errorf("jwks is invalid: %w", err) } // JWKS should contain keys if len(jwks.Keys) == 0 { return rfcerrors.InvalidClientMetadata().Build(), fmt.Errorf("jwks is empty") } } else { // Check auth method if req.Metadata.TokenEndpointAuthMethod.Value == oidc.AuthMethodPrivateKeyJWT { return rfcerrors.InvalidClientMetadata().Build(), fmt.Errorf("jwks is mandatory for `private_key_jwt` authentication") } } if req.Metadata.Scope == nil { // Settings default scopes for client req.Metadata.Scope = &wrapperspb.StringValue{Value: strings.Join(clientSettings.DefaultScopes(), " ")} } // No error return nil, nil }
package requests import "time" type CreateTeam struct { } type UpdateTeam struct { } func (c *CreateTeam) Valid() error { return validate.Struct(c) } func (c *UpdateTeam) Valid() error { return validate.Struct(c) }
package rule import ( "net/http" "testing" ) type domainRuleTestConfig struct { domain string includeSubDomain bool testDomain string testResult bool } func TestDomainRule(t *testing.T) { var domainRuleTestConfigs []domainRuleTestConfig domainRuleTestConfigs = append(domainRuleTestConfigs, domainRuleTestConfig{ domain: "rocinax.com", includeSubDomain: true, testDomain: "rocinax.com", testResult: true, }) domainRuleTestConfigs = append(domainRuleTestConfigs, domainRuleTestConfig{ domain: "rocinax.com", includeSubDomain: false, testDomain: "rocinax.com", testResult: true, }) domainRuleTestConfigs = append(domainRuleTestConfigs, domainRuleTestConfig{ domain: "rocinax.com", includeSubDomain: true, testDomain: "rigis.rocinax.com", testResult: true, }) domainRuleTestConfigs = append(domainRuleTestConfigs, domainRuleTestConfig{ domain: "rocinax.com", includeSubDomain: false, testDomain: "rigis.rocinax.com", testResult: false, }) domainRuleTestConfigs = append(domainRuleTestConfigs, domainRuleTestConfig{ domain: "rocinax.com", includeSubDomain: true, testDomain: "rigis.rocinax.com:6443", testResult: false, }) domainRuleTestConfigs = append(domainRuleTestConfigs, domainRuleTestConfig{ domain: "rocinax.com", includeSubDomain: false, testDomain: "rigis.rocinax.com:6443", testResult: false, }) domainRuleTestConfigs = append(domainRuleTestConfigs, domainRuleTestConfig{ domain: "rocinax.com", includeSubDomain: true, testDomain: "ROCINAX.COM", testResult: true, }) domainRuleTestConfigs = append(domainRuleTestConfigs, domainRuleTestConfig{ domain: "rocinax.com", includeSubDomain: false, testDomain: "ROCINAX.COM", testResult: true, }) for i := 0; i < len(domainRuleTestConfigs); i++ { domainRule := NewDomainRule( DomainSetting{ Domain: domainRuleTestConfigs[i].domain, IncludeSubDomain: domainRuleTestConfigs[i].includeSubDomain, }, ) if domainRule.Execute(&http.Request{Host: domainRuleTestConfigs[i].testDomain}) != domainRuleTestConfigs[i].testResult { t.Errorf( "DomainRule{Domain: %s, IncludeSubdomain: %t -> RequestHost: %s ", domainRuleTestConfigs[i].domain, domainRuleTestConfigs[i].includeSubDomain, domainRuleTestConfigs[i].testDomain, ) } } }
package main import "fmt" func main() { var a int = 4 var b int32 var c float32 var ptr *int fmt.Printf("第一行 -a 变量类型为 %T\n", a) fmt.Printf("第二行 -b 变量类型为 %T\n", b) fmt.Printf("第三行 -c 变量类型为 %T\n", c) ptr = &a fmt.Printf("a的值为 %d \n", a) fmt.Printf("ptr为 %d\n", *ptr) fmt.Printf("ptr %d\n", ptr) }
package database import ( "errors" "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" "themis/models" "themis/utils" ) // AreaStorage is the storage backend for Areas. type AreaStorage struct { database *mgo.Database } // NewAreaStorage creates a new storage backend for Areas. func NewAreaStorage(database *mgo.Database) *AreaStorage { return &AreaStorage{database: database} } // IsRoot returns true if the entity is the root entity func (AreaStorage *AreaStorage) IsRoot(id bson.ObjectId) (bool, error) { area := new(models.Area) coll := AreaStorage.database.C(area.GetCollectionName()) if id == "" { utils.ErrorLog.Println("Given Area id is empty.") return false, errors.New("Given Area id is empty") } if err := coll.Find(bson.M{"_id": id}).One(area); err != nil { utils.ErrorLog.Printf("Error while retrieving Area with ID %s from database: %s", area.ID, err.Error()) return false, err } utils.DebugLog.Printf("Retrieved Area with ID %s from database.", area.ID.Hex()) return (area.ParentAreaID.Hex()==""), nil } // Insert creates a new record in the database and returns the new ID. func (AreaStorage *AreaStorage) Insert(area models.Area) (bson.ObjectId, error) { coll := AreaStorage.database.C(area.GetCollectionName()) if area.ID != "" { utils.ErrorLog.Printf("Given Area instance already has an ID %s. Can not insert into database.\n", area.ID.Hex()) return "", errors.New("Given Area instance already has an ID. Can not insert into database") } area.ID = bson.NewObjectId() if err := coll.Insert(area); err != nil { utils.ErrorLog.Printf("Error while inserting new Area with ID %s into database: %s", area.ID, err.Error()) return "", err } utils.DebugLog.Printf("Inserted new Area with ID %s into database.", area.ID.Hex()) return area.ID, nil } // Update updates an existing record in the database. func (AreaStorage *AreaStorage) Update(area models.Area) error { coll := AreaStorage.database.C(area.GetCollectionName()) if area.ID == "" { utils.ErrorLog.Println("Given Area instance has an empty ID. Can not be updated in the database.") return errors.New("Given Area instance has an empty ID. Can not be updated in the database") } if err := coll.UpdateId(area.ID, area); err != nil { utils.ErrorLog.Printf("Error while updating Area with ID %s in database: %s", area.ID, err.Error()) return err } utils.DebugLog.Printf("Updated Area with ID %s in database.", area.ID.Hex()) return nil } // Delete removes a record from the database. func (AreaStorage *AreaStorage) Delete(id bson.ObjectId) error { coll := AreaStorage.database.C(models.AreaName) // TODO this should not use memory if id == "" { utils.ErrorLog.Println("Given Area instance has an empty ID. Can not be deleted from database.") return errors.New("Given Area instance has an empty ID. Can not be updated from database") } info, err := coll.RemoveAll(bson.M{"_id": id}) if err != nil { utils.ErrorLog.Printf("Error while deleting Area with ID %s in database: %s", id, err.Error()) return err } utils.DebugLog.Printf("Deleted %d Area with ID %s from database.", info.Removed, id) return nil } // GetOne returns an entity from the database based on a given ID. func (AreaStorage *AreaStorage) GetOne(id bson.ObjectId) (models.Area, error) { area := new(models.Area) coll := AreaStorage.database.C(area.GetCollectionName()) if id == "" { utils.ErrorLog.Println("Given Area id is empty.") return *area, errors.New("Given Area id is empty") } if err := coll.Find(bson.M{"_id": id}).One(area); err != nil { utils.ErrorLog.Printf("Error while retrieving Area with ID %s from database: %s", area.ID, err.Error()) return *area, err } utils.DebugLog.Printf("Retrieved Area with ID %s from database.", area.ID.Hex()) return *area, nil } // GetAll returns an entity from the database based on a given ID. func (AreaStorage *AreaStorage) GetAll(queryExpression interface{}) ([]models.Area, error) { allAreas := new([]models.Area) coll := AreaStorage.database.C(models.AreaName) if err := coll.Find(queryExpression).All(allAreas); err != nil { utils.ErrorLog.Printf("Error while retrieving all Areas from database: %s", err.Error()) return nil, err } utils.DebugLog.Printf("Retrieved Areas from database with filter %s.", queryExpression) return *allAreas, nil } // GetAllPaged returns a subset based on offset and limit. func (AreaStorage *AreaStorage) GetAllPaged(queryExpression interface{}, offset int, limit int) ([]models.Area, error) { // TODO there might be performance issues with this approach. See here: // https://stackoverflow.com/questions/40634865/efficient-paging-in-mongodb-using-mgo allAreas := new([]models.Area) coll := AreaStorage.database.C(models.AreaName) query := coll.Find(queryExpression).Sort("updated_at").Limit(limit) query = query.Skip(offset) if err := query.All(allAreas); err != nil { utils.ErrorLog.Printf("Error while retrieving paged Areas from database: %s", err.Error()) return nil, err } utils.DebugLog.Printf("Retrieved Areas from database with filter %s.", queryExpression) return *allAreas, nil } // GetAllCount returns the number of elements in the database. func (AreaStorage *AreaStorage) GetAllCount(queryExpression interface{}) (int, error) { coll := AreaStorage.database.C(models.AreaName) allCount, err := coll.Find(queryExpression).Count() if err != nil { utils.ErrorLog.Printf("Error while retrieving number of Areas from database: %s", err.Error()) return -1, err } utils.DebugLog.Printf("Retrieved Areas count from database with filter %s.", queryExpression) return allCount, nil }
package po_test import ( "testing" "github.com/yino/AgentSpider/po" ) func TestBatchFindIp(t *testing.T) { po.InitDB() po.BatchFindIp([]string{"127.0.0.1", "128.0.0.1"}) }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package config import ( "net" "reflect" "testing" "time" "github.com/alipay/sofa-mosn/pkg/api/v2" ) func TestParseClusterHealthCheckConf(t *testing.T) { healthCheckConfigStr := `{ "protocol": "SofaRpc", "timeout": "90s", "healthy_threshold": 2, "unhealthy_threshold": 2, "interval": "5s", "interval_jitter": 0, "check_path": "" }` var ccc ClusterHealthCheckConfig json.Unmarshal([]byte(healthCheckConfigStr), &ccc) want := v2.HealthCheck{ Protocol: "SofaRpc", Timeout: 90 * time.Second, HealthyThreshold: 2, UnhealthyThreshold: 2, Interval: 5 * time.Second, IntervalJitter: 0, CheckPath: "", ServiceName: "", } type args struct { c *ClusterHealthCheckConfig } tests := []struct { name string args args want v2.HealthCheck }{ // TODO: Add test cases. { name: "test1", args: args{ c: &ccc, }, want: want, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := parseClusterHealthCheckConf(tt.args.c); !reflect.DeepEqual(got, tt.want) { t.Errorf("parseClusterHealthCheckConf() = %v, want %v", got, tt.want) } }) } } func TestParseFilterChainJSONFile(t *testing.T) { var filterchan FilterChain filterchanStr := `{ "match":"test", "tls_context":{ "status": true, "inspector": true, "server_name": "hello.com", "ca_cert": "-----BEGIN CERTIFICATE-----\nMIIDMjCCAhoCCQDaFC8PcSS5qTANBgkqhkiG9w0BAQsFADBbMQswCQYDVQQGEwJD\nTjEKMAgGA1UECAwBYTEKMAgGA1UEBwwBYTEKMAgGA1UECgwBYTEKMAgGA1UECwwB\nYTEKMAgGA1UEAwwBYTEQMA4GCSqGSIb3DQEJARYBYTAeFw0xODA2MTQwMjQyMjVa\nFw0xOTA2MTQwMjQyMjVaMFsxCzAJBgNVBAYTAkNOMQowCAYDVQQIDAFhMQowCAYD\nVQQHDAFhMQowCAYDVQQKDAFhMQowCAYDVQQLDAFhMQowCAYDVQQDDAFhMRAwDgYJ\nKoZIhvcNAQkBFgFhMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArbNc\nmOXvvOqZgJdxMIiklE8lykVvz5d7QZ0+LDVG8phshq9/woigfB1aFBAI36/S5LZQ\n5Fd0znblSa+LOY06jdHTkbIBYFlxH4tdRaD0B7DbFzR5bpzLv2Q+Zf5u5RI73Nky\nH8CjW9QJjboArHkwm0YNeENaoR/96nYillgYLnunol4h0pxY7ZC6PpaB1EBaTXcz\n0iIUX4ktUJQmYZ/DFzB0oQl9IWOj18ml2wYzu9rYsySzj7EPnDOOebsRfS5hl3fz\nHi4TC4PDh0mQwHqDQ4ncztkybuRSXFQ6RzEPdR5qtp9NN/G/TlfyB0CET3AFmGkp\nE2irGoF/JoZXEDeXmQIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQApzhQLS7fAcExZ\nx1S+hcy7lLF8QcPlsiH32SnLFg5LPy4prz71mebUchmt97t4T3tSWzwXi8job7Q2\nONYc6sr1LvaFtg7qoCfz5fPP5x+kKDkEPwCDJSTVPcXP+UtA407pxX8KPRN8Roay\ne3oGcmNqVu/DkkufkIL3PBg41JEMovWtKD+PXmeBafc4vGCHSJHJBmzMe5QtwHA0\nss/A9LHPaq3aLcIyFr8x7clxc7zZVaim+lVfNV3oPBnB4gU7kLFVT0zOhkM+V1A4\nQ5GVbGAu4R7ItY8kJ2b7slre0ajPUp2FMregt4mnUM3mu1nbltVhtoknXqHHMGgN\n4Lh4JfNx\n-----END CERTIFICATE-----\n", "cert_chain": "-----BEGIN CERTIFICATE-----\nMIIDJTCCAg0CAQEwDQYJKoZIhvcNAQELBQAwWzELMAkGA1UEBhMCQ04xCjAIBgNV\nBAgMAWExCjAIBgNVBAcMAWExCjAIBgNVBAoMAWExCjAIBgNVBAsMAWExCjAIBgNV\nBAMMAWExEDAOBgkqhkiG9w0BCQEWAWEwHhcNMTgwNjE0MDMxMzQyWhcNMTkwNjE0\nMDMxMzQyWjBWMQswCQYDVQQGEwJDTjEKMAgGA1UECAwBYTEKMAgGA1UECgwBYTEK\nMAgGA1UECwwBYTERMA8GA1UEAwwIdGVzdC5jb20xEDAOBgkqhkiG9w0BCQEWAWEw\nggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrPq+Mo0nS3dJU1qGFwlIB\ni9HqRm5RGcfps+0W5LjEhqUKxKUweRrwDaIxpiSqjKeehz9DtLUpXBD29pHuxODU\nVsMH2U1AGWn9l4jMnP6G5iTMPJ3ZTXszeqALe8lm/f807ZA0C7moc+t7/d3+b6d2\nlnwR+yWbIZJUu2qw+HrR0qPpNlBP3EMtlQBOqf4kCl6TfpqrGfc9lW0JjuE6Taq3\ngSIIgzCsoUFe30Yemho/Pp4zA9US97DZjScQLQAGiTsCRDBASxXGzODQOfZL3bCs\n2w//1lqGjmhp+tU1nR4MRN+euyNX42ioEz111iB8y0VzuTIsFBWwRTKK1SF7YSEb\nAgMBAAEwDQYJKoZIhvcNAQELBQADggEBABnRM9JJ21ZaujOTunONyVLHtmxUmrdr\n74OJW8xlXYEMFu57Wi40+4UoeEIUXHviBnONEfcITJITYUdqve2JjQsH2Qw3iBUr\nmsFrWS25t/Krk2FS2cKg8B9azW2+p1mBNm/FneMv2DMWHReGW0cBp3YncWD7OwQL\n9NcYfXfgBgHdhykctEQ97SgLHDKUCU8cPJv14eZ+ehIPiv8cDWw0mMdMeVK9q71Y\nWn2EgP7HzVgdbj17nP9JJjNvets39gD8bU0g2Lw3wuyb/j7CHPBBzqxh+a8pihI5\n3dRRchuVeMQkMuukyR+/A8UrBMA/gCTkXIcP6jKO1SkKq5ZwlMmapPc=\n-----END CERTIFICATE-----\n", "private_key": "-----BEGIN RSA PRIVATE KEY-----\nMIIEpQIBAAKCAQEAqz6vjKNJ0t3SVNahhcJSAYvR6kZuURnH6bPtFuS4xIalCsSl\nMHka8A2iMaYkqoynnoc/Q7S1KVwQ9vaR7sTg1FbDB9lNQBlp/ZeIzJz+huYkzDyd\n2U17M3qgC3vJZv3/NO2QNAu5qHPre/3d/m+ndpZ8EfslmyGSVLtqsPh60dKj6TZQ\nT9xDLZUATqn+JApek36aqxn3PZVtCY7hOk2qt4EiCIMwrKFBXt9GHpoaPz6eMwPV\nEvew2Y0nEC0ABok7AkQwQEsVxszg0Dn2S92wrNsP/9Zaho5oafrVNZ0eDETfnrsj\nV+NoqBM9ddYgfMtFc7kyLBQVsEUyitUhe2EhGwIDAQABAoIBAG2Bj5ca0Fmk+hzA\nh9fWdMSCWgE7es4n81wyb/nE15btF1t0dsIxn5VE0qR3P1lEyueoSz+LrpG9Syfy\nc03B3phKxzscrbbAybOeFJ/sASPYxk1IshRE5PT9hJzzUs6mvG1nQWDW4qmjP0Iy\nDKTpV6iRANQqy1iRtlay5r42l6vWwHfRfwAv4ExSS+RgkYcavqOp3e9If2JnFJuo\n7Zds2i7Ux8dURX7lHqKxTt6phgoMmMpvO3lFYVGos7F13OR9NKElzjiefAQbweAt\nt8R+6A1rlIwnfywxET9ZXglfOFK6Q0nqCJhcEcKzT/Xfkd+h9XPACjOObJh3a2+o\nwg9GBFECgYEA2a6JYuFanKzvajFPbSeN1csfI9jPpK2+tB5+BB72dE74B4rjygiG\n0Rb26UjovkYfJJqKuKr4zDL5ziSlJk199Ae2f6T7t7zmyhMlWQtVT12iTQvBINTz\nNerKi5HNVBsCSGj0snbwo8u4QRgTjaIoVqTlOlUQuGqYuZ75l8g35IkCgYEAyWOM\nKagzpGmHWq/0ThN4kkwWOdujxuqrPf4un2WXsir+L90UV7X9wY4mO19pe5Ga2Upu\nXFDsxAZsanf8SbzkTGHvzUobFL7eqsiwaUSGB/cGEtkIyVlAdyW9DhiZFt3i9mEF\nbBsHnEDHPHL4tu+BB8G3WahHjWOnbWZ3NTtP94MCgYEAi3XRmSLtjYER5cPvsevs\nZ7M5oRqvdT7G9divPW6k0MEjEJn/9BjgXqbKy4ylZ/m+zBGinEsVGKXz+wjpMY/m\nCOjEGCUYC5AfgAkiHVkwb6d6asgEFEe6BaoF18MyfBbNsJxlYMzowNeslS+an1vr\nYg9EuMl06+GHNSzPlVl1zZkCgYEAxXx8N2F9eu4NUK4ZafMIGpbIeOZdHbSERp+b\nAq5yasJkT3WB/F04QXVvImv3Gbj4W7r0rEyjUbtm16Vf3sOAMTMdIHhaRCbEXj+9\nVw1eTjM8XoE8b465e92jHk6a2WSvq6IK2i9LcDvJ5QptwZ7uLjgV37L4r7sYtVx0\n69uFGJcCgYEAot7im+Yi7nNsh1dJsDI48liKDjC6rbZoCeF7Tslp8Lt+4J9CA9Ux\nSHyKjg9ujbjkzCWrPU9hkugOidDOmu7tJAxB5cS00qJLRqB5lcPxjOWcBXCdpBkO\n0tdT/xRY/MYLf3wbT95enaPlhfeqBBXKNQDya6nISbfwbMLfNxdZPJ8=\n-----END RSA PRIVATE KEY-----\n", "verify_client": true, "cipher_suites": "ECDHE-RSA-AES256-GCM-SHA384", "ecdh_curves": "P256" }, "filters": [ { "type": "proxy", "config": { "downstream_protocol": "SofaRpc", "name": "proxy_config", "support_dynamic_route": true, "upstream_protocol": "SofaRpc", "virtual_hosts": [ { "name": "sofa", "require_tls": "no", "domains":[ "*testwilccard" ], "routers": [ { "match": { "headers": [ { "name": "service", "value": "com.alipay.rpc.common.service.facade.pb.SampleServicePb:1.0", "regex":false } ] }, "route": { "cluster_name": "test_cpp", "cluster_header": { "filter_metadata": { "mosn.lb": { "version":"1.1", "stage":"pre-release", "label": "gray" } } } } } ] } ] } } ] }` json.Unmarshal([]byte(filterchanStr), &filterchan) if filterchan.FilterChainMatch != "test" || len(filterchan.Filters) != 1 || filterchan.Filters[0].Type != "proxy" || filterchan.TLS.ServerName != "hello.com" { t.Errorf("TestParseFilterChain Failure") } } func TestParseProxyFilterJSONFile(t *testing.T) { var proxy Proxy filterchanStr := `{ "downstream_protocol": "SofaRpc", "name": "proxy_config", "support_dynamic_route": true, "upstream_protocol": "SofaRpc", "virtual_hosts": [ { "name": "sofa", "require_tls": "no", "domains":[ "*testwilccard" ], "routers": [ { "match": { "headers": [ { "name": "service", "value": "com.alipay.rpc.common.service.facade.pb.SampleServicePb:1.0", "regex":false } ] }, "route": { "cluster_name": "test_cpp", "metadata_match": { "filter_metadata": { "mosn.lb": { "version":"1.1", "stage":"pre-release", "label": "gray" } } } } } ] } ] }` json.Unmarshal([]byte(filterchanStr), &proxy) if proxy.Name != "proxy_config" || len(proxy.VirtualHosts) != 1 || proxy.VirtualHosts[0].Name != "sofa" { t.Errorf("TestParseProxyFilterJSON Failure") } } func TestParseXProxyFilterJSONFile(t *testing.T) { var proxy Proxy filterchanStr := `{ "downstream_protocol": "X", "name": "proxy_config", "support_dynamic_route": true, "upstream_protocol": "Http2", "extend_config": { "sub_protocol": "sofa" }, "virtual_hosts": [ { "name": "sofa", "require_tls": "no", "domains":[ "*testwilccard" ], "routers": [ { "match": { "headers": [ { "name": "service", "value": "com.alipay.rpc.common.service.facade.pb.SampleServicePb:1.0", "regex":false } ] }, "route": { "cluster_name": "test_cpp", "metadata_match": { "filter_metadata": { "mosn.lb": { "version":"1.1", "stage":"pre-release", "label": "gray" } } } } } ] } ] }` json.Unmarshal([]byte(filterchanStr), &proxy) if proxy.Name != "proxy_config" || len(proxy.VirtualHosts) != 1 || proxy.VirtualHosts[0].Name != "sofa" || proxy.ExtendConfig["sub_protocol"] != "sofa" { t.Errorf("TestParseProxyFilterJSON Failure") } } func TestParseTlsJsonFile(t *testing.T) { tlscon := TLSConfig{} test := `{ "status": true, "inspector": true, "server_name": "hello.com", "ca_cert": "-----BEGIN CERTIFICATE-----\nMIIDMjCCAhoCCQDaFC8PcSS5qTANBgkqhkiG9w0BAQsFADBbMQswCQYDVQQGEwJD\nTjEKMAgGA1UECAwBYTEKMAgGA1UEBwwBYTEKMAgGA1UECgwBYTEKMAgGA1UECwwB\nYTEKMAgGA1UEAwwBYTEQMA4GCSqGSIb3DQEJARYBYTAeFw0xODA2MTQwMjQyMjVa\nFw0xOTA2MTQwMjQyMjVaMFsxCzAJBgNVBAYTAkNOMQowCAYDVQQIDAFhMQowCAYD\nVQQHDAFhMQowCAYDVQQKDAFhMQowCAYDVQQLDAFhMQowCAYDVQQDDAFhMRAwDgYJ\nKoZIhvcNAQkBFgFhMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArbNc\nmOXvvOqZgJdxMIiklE8lykVvz5d7QZ0+LDVG8phshq9/woigfB1aFBAI36/S5LZQ\n5Fd0znblSa+LOY06jdHTkbIBYFlxH4tdRaD0B7DbFzR5bpzLv2Q+Zf5u5RI73Nky\nH8CjW9QJjboArHkwm0YNeENaoR/96nYillgYLnunol4h0pxY7ZC6PpaB1EBaTXcz\n0iIUX4ktUJQmYZ/DFzB0oQl9IWOj18ml2wYzu9rYsySzj7EPnDOOebsRfS5hl3fz\nHi4TC4PDh0mQwHqDQ4ncztkybuRSXFQ6RzEPdR5qtp9NN/G/TlfyB0CET3AFmGkp\nE2irGoF/JoZXEDeXmQIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQApzhQLS7fAcExZ\nx1S+hcy7lLF8QcPlsiH32SnLFg5LPy4prz71mebUchmt97t4T3tSWzwXi8job7Q2\nONYc6sr1LvaFtg7qoCfz5fPP5x+kKDkEPwCDJSTVPcXP+UtA407pxX8KPRN8Roay\ne3oGcmNqVu/DkkufkIL3PBg41JEMovWtKD+PXmeBafc4vGCHSJHJBmzMe5QtwHA0\nss/A9LHPaq3aLcIyFr8x7clxc7zZVaim+lVfNV3oPBnB4gU7kLFVT0zOhkM+V1A4\nQ5GVbGAu4R7ItY8kJ2b7slre0ajPUp2FMregt4mnUM3mu1nbltVhtoknXqHHMGgN\n4Lh4JfNx\n-----END CERTIFICATE-----\n", "cert_chain": "-----BEGIN CERTIFICATE-----\nMIIDJTCCAg0CAQEwDQYJKoZIhvcNAQELBQAwWzELMAkGA1UEBhMCQ04xCjAIBgNV\nBAgMAWExCjAIBgNVBAcMAWExCjAIBgNVBAoMAWExCjAIBgNVBAsMAWExCjAIBgNV\nBAMMAWExEDAOBgkqhkiG9w0BCQEWAWEwHhcNMTgwNjE0MDMxMzQyWhcNMTkwNjE0\nMDMxMzQyWjBWMQswCQYDVQQGEwJDTjEKMAgGA1UECAwBYTEKMAgGA1UECgwBYTEK\nMAgGA1UECwwBYTERMA8GA1UEAwwIdGVzdC5jb20xEDAOBgkqhkiG9w0BCQEWAWEw\nggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrPq+Mo0nS3dJU1qGFwlIB\ni9HqRm5RGcfps+0W5LjEhqUKxKUweRrwDaIxpiSqjKeehz9DtLUpXBD29pHuxODU\nVsMH2U1AGWn9l4jMnP6G5iTMPJ3ZTXszeqALe8lm/f807ZA0C7moc+t7/d3+b6d2\nlnwR+yWbIZJUu2qw+HrR0qPpNlBP3EMtlQBOqf4kCl6TfpqrGfc9lW0JjuE6Taq3\ngSIIgzCsoUFe30Yemho/Pp4zA9US97DZjScQLQAGiTsCRDBASxXGzODQOfZL3bCs\n2w//1lqGjmhp+tU1nR4MRN+euyNX42ioEz111iB8y0VzuTIsFBWwRTKK1SF7YSEb\nAgMBAAEwDQYJKoZIhvcNAQELBQADggEBABnRM9JJ21ZaujOTunONyVLHtmxUmrdr\n74OJW8xlXYEMFu57Wi40+4UoeEIUXHviBnONEfcITJITYUdqve2JjQsH2Qw3iBUr\nmsFrWS25t/Krk2FS2cKg8B9azW2+p1mBNm/FneMv2DMWHReGW0cBp3YncWD7OwQL\n9NcYfXfgBgHdhykctEQ97SgLHDKUCU8cPJv14eZ+ehIPiv8cDWw0mMdMeVK9q71Y\nWn2EgP7HzVgdbj17nP9JJjNvets39gD8bU0g2Lw3wuyb/j7CHPBBzqxh+a8pihI5\n3dRRchuVeMQkMuukyR+/A8UrBMA/gCTkXIcP6jKO1SkKq5ZwlMmapPc=\n-----END CERTIFICATE-----\n", "private_key": "-----BEGIN RSA PRIVATE KEY-----\nMIIEpQIBAAKCAQEAqz6vjKNJ0t3SVNahhcJSAYvR6kZuURnH6bPtFuS4xIalCsSl\nMHka8A2iMaYkqoynnoc/Q7S1KVwQ9vaR7sTg1FbDB9lNQBlp/ZeIzJz+huYkzDyd\n2U17M3qgC3vJZv3/NO2QNAu5qHPre/3d/m+ndpZ8EfslmyGSVLtqsPh60dKj6TZQ\nT9xDLZUATqn+JApek36aqxn3PZVtCY7hOk2qt4EiCIMwrKFBXt9GHpoaPz6eMwPV\nEvew2Y0nEC0ABok7AkQwQEsVxszg0Dn2S92wrNsP/9Zaho5oafrVNZ0eDETfnrsj\nV+NoqBM9ddYgfMtFc7kyLBQVsEUyitUhe2EhGwIDAQABAoIBAG2Bj5ca0Fmk+hzA\nh9fWdMSCWgE7es4n81wyb/nE15btF1t0dsIxn5VE0qR3P1lEyueoSz+LrpG9Syfy\nc03B3phKxzscrbbAybOeFJ/sASPYxk1IshRE5PT9hJzzUs6mvG1nQWDW4qmjP0Iy\nDKTpV6iRANQqy1iRtlay5r42l6vWwHfRfwAv4ExSS+RgkYcavqOp3e9If2JnFJuo\n7Zds2i7Ux8dURX7lHqKxTt6phgoMmMpvO3lFYVGos7F13OR9NKElzjiefAQbweAt\nt8R+6A1rlIwnfywxET9ZXglfOFK6Q0nqCJhcEcKzT/Xfkd+h9XPACjOObJh3a2+o\nwg9GBFECgYEA2a6JYuFanKzvajFPbSeN1csfI9jPpK2+tB5+BB72dE74B4rjygiG\n0Rb26UjovkYfJJqKuKr4zDL5ziSlJk199Ae2f6T7t7zmyhMlWQtVT12iTQvBINTz\nNerKi5HNVBsCSGj0snbwo8u4QRgTjaIoVqTlOlUQuGqYuZ75l8g35IkCgYEAyWOM\nKagzpGmHWq/0ThN4kkwWOdujxuqrPf4un2WXsir+L90UV7X9wY4mO19pe5Ga2Upu\nXFDsxAZsanf8SbzkTGHvzUobFL7eqsiwaUSGB/cGEtkIyVlAdyW9DhiZFt3i9mEF\nbBsHnEDHPHL4tu+BB8G3WahHjWOnbWZ3NTtP94MCgYEAi3XRmSLtjYER5cPvsevs\nZ7M5oRqvdT7G9divPW6k0MEjEJn/9BjgXqbKy4ylZ/m+zBGinEsVGKXz+wjpMY/m\nCOjEGCUYC5AfgAkiHVkwb6d6asgEFEe6BaoF18MyfBbNsJxlYMzowNeslS+an1vr\nYg9EuMl06+GHNSzPlVl1zZkCgYEAxXx8N2F9eu4NUK4ZafMIGpbIeOZdHbSERp+b\nAq5yasJkT3WB/F04QXVvImv3Gbj4W7r0rEyjUbtm16Vf3sOAMTMdIHhaRCbEXj+9\nVw1eTjM8XoE8b465e92jHk6a2WSvq6IK2i9LcDvJ5QptwZ7uLjgV37L4r7sYtVx0\n69uFGJcCgYEAot7im+Yi7nNsh1dJsDI48liKDjC6rbZoCeF7Tslp8Lt+4J9CA9Ux\nSHyKjg9ujbjkzCWrPU9hkugOidDOmu7tJAxB5cS00qJLRqB5lcPxjOWcBXCdpBkO\n0tdT/xRY/MYLf3wbT95enaPlhfeqBBXKNQDya6nISbfwbMLfNxdZPJ8=\n-----END RSA PRIVATE KEY-----\n", "verify_client": true, "cipher_suites": "ECDHE-RSA-AES256-GCM-SHA384", "ecdh_curves": "P256" }` json.Unmarshal([]byte(test), &tlscon) if tlscon.ServerName != "hello.com" { t.Errorf("TestTlsParse failure, want hello.com but got %s", tlscon.ServerName) } } func Test_parseRouters(t *testing.T) { Router := []Router{ { Route: RouteAction{ WeightedClusters: []WeightedCluster{ { Cluster: ClusterWeight{ Name: "c1", Weight: 90, }, }, { Cluster: ClusterWeight{ Name: "c2", Weight: 10, }, }, }, }, }, } if got := parseRouters(Router); len(got) != 1 || len(got[0].Route.WeightedClusters) != 2 { t.Errorf("parseRouters() = %v error", got) } } func Test_parseWeightClusters(t *testing.T) { tests := []struct { name string args []string want []v2.WeightedCluster }{ { name: "validCluster", args: []string{ `{ "name":"c1", "weight":90,"metadata_match":{"filter_metadata": {"mosn.lb": {"version": "v1"}}}}`, `{ "name":"c2", "weight":10,"metadata_match":{"filter_metadata": {"mosn.lb": {"version": "v2"}}}}`, }, want: []v2.WeightedCluster{ { Cluster: v2.ClusterWeight{ Name: "c1", Weight: 90, MetadataMatch: v2.Metadata{ "version": "v1", }, }, }, { Cluster: v2.ClusterWeight{ Name: "c2", Weight: 10, MetadataMatch: v2.Metadata{ "version": "v2", }, }, }, }, }, { name: "emptyCluster", args: []string{ `{ "name":"c1", "weight":90,"metadata_match":{"filter_metadata": {"mosn.lb":{} }}}`, `{ "name":"c2", "weight":10,"metadata_match":{"filter_metadata": {"mosn.lb": {}}}}`, }, want: []v2.WeightedCluster{ { Cluster: v2.ClusterWeight{ Name: "c1", Weight: 90, MetadataMatch: v2.Metadata{}, }, }, { Cluster: v2.ClusterWeight{ Name: "c2", Weight: 10, MetadataMatch: v2.Metadata{}, }, }, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { var weightClusters []WeightedCluster for _, clusterString := range tt.args { var cluster ClusterWeight json.Unmarshal([]byte(clusterString), &cluster) weightClusters = append(weightClusters, WeightedCluster{Cluster: cluster}) } if got := parseWeightClusters(weightClusters); !reflect.DeepEqual(got, tt.want) { t.Errorf("parseWeightClusters() = %v, want %v", got, tt.want) } }) } } func TestParseRouterMetadata(t *testing.T) { var envoyvalue = map[string]interface{}{"label": "gray", "stage": "pre-release"} var lbvalue = map[string]interface{}{"mosn.lb": envoyvalue} var envoyvalue2 = map[string]interface{}{} var lbvalue2 = map[string]interface{}{"mosn.lb": envoyvalue2} testCases := []struct { name string args map[string]interface{} want v2.Metadata }{ { name: "validCase", args: map[string]interface{}{"filter_metadata": lbvalue}, want: v2.Metadata{"label": "gray", "stage": "pre-release"}, }, { name: "emptyCase", args: map[string]interface{}{"filter_metadata": lbvalue2}, want: v2.Metadata{}, }, } for _, tt := range testCases { got := parseRouterMetadata(tt.args) if !reflect.DeepEqual(got, tt.want) { t.Errorf("parse route medata error,want = %+v, but got = %+v, case = %s", tt.want, got, tt.name) } } } func TestParseTCPProxy(t *testing.T) { cfgStr := `{ "routes": [ { "cluster": "www", "source_addrs":[ "127.0.0.1:80", "192.168.1.1:80" ], "destination_addrs":[ "127.0.0.1:80", "192.168.1.1:80" ] }, { "cluster": "www2", "source_addrs":[ "127.0.0.1:80", "192.168.1.1:80" ], "destination_addrs":[ "127.0.0.1:80", "192.168.1.1:80" ] } ] }` cfgMap := make(map[string]interface{}) if err := json.Unmarshal([]byte(cfgStr), &cfgMap); err != nil { t.Error(err) return } var err error cfgMap, err = ConvertTCPProxyToV2(cfgMap) if err != nil { t.Error(err) return } proxy, err := ParseTCPProxy(cfgMap) if err != nil { t.Error(err) return } addr1 := &net.TCPAddr{ IP: net.IPv4(127, 0, 0, 1), Port: 80, } addr2 := &net.TCPAddr{ IP: net.IPv4(192, 168, 1, 1), Port: 80, } route1 := &v2.TCPRoute{ Cluster: "www", SourceAddrs: []net.Addr{addr1, addr2}, DestinationAddrs: []net.Addr{addr1, addr2}, } route2 := &v2.TCPRoute{ Cluster: "www2", SourceAddrs: []net.Addr{addr1, addr2}, DestinationAddrs: []net.Addr{addr1, addr2}, } expected := &v2.TCPProxy{ Routes: []*v2.TCPRoute{route1, route2}, } compare := func(p1, p2 *v2.TCPProxy) bool { if len(p1.Routes) != len(p2.Routes) { return false } for i := range p1.Routes { r1 := p1.Routes[i] r2 := p2.Routes[i] if r1.Cluster != r2.Cluster { return false } if len(r1.SourceAddrs) != len(r2.SourceAddrs) { return false } if len(r1.DestinationAddrs) != len(r2.DestinationAddrs) { return false } for j := range r1.SourceAddrs { s1 := r1.SourceAddrs[j] s2 := r2.SourceAddrs[j] if s1.String() != s2.String() { return false } } for j := range r1.DestinationAddrs { d1 := r1.DestinationAddrs[j] d2 := r2.DestinationAddrs[j] if d1.String() != d2.String() { return false } } } return true } if !compare(expected, proxy) { t.Error("generate tcp proxy unexpected") } }
package migrate import ( "testing" "time" mockRepository "github.com/neuronlabs/neuron-mocks" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/neuronlabs/neuron-core/config" "github.com/neuronlabs/neuron-core/controller" ) type someModel struct { ID int `neuron:"type=primary"` Attr string `neuron:"type=attr" db:"name=attribute"` SnakeCased string `neuron:"type=attr"` CreatedAt time.Time `neuron:"type=attr"` UpdatedAt time.Time `neuron:"type=attr"` DeletedAt *time.Time `neuron:"type=attr"` } // TestParseModel tests the extraction of the pq tags func TestParseModel(t *testing.T) { t.Run("WithTimeFields", func(t *testing.T) { // type the some model some := &someModel{} c := tCtrl(t, some) mStruct, err := c.ModelStruct(some) require.NoError(t, err) for _, field := range mStruct.StructFields() { cl, ok := field.StoreGet(ColumnKey) switch field.Name() { case "ID": if assert.True(t, ok) { col, ok := cl.(*Column) if assert.True(t, ok) { assert.Equal(t, "id", col.Name) } } case "Attr": if assert.True(t, ok) { col, ok := cl.(*Column) if assert.True(t, ok) { assert.Equal(t, "attribute", col.Name) } } case "SnakeCased": if assert.True(t, ok) { col, ok := cl.(*Column) if assert.True(t, ok) { assert.Equal(t, "snake_cased", col.Name) } } case "Nested": } } }) } func tCtrl(t *testing.T, models ...interface{}) *controller.Controller { t.Helper() cfg := config.Default() cfg.DefaultRepositoryName = "mockery" c, err := controller.New(cfg) require.NoError(t, err) err = c.RegisterRepository("mockery", &config.Repository{ DriverName: mockRepository.DriverName, }) require.NoError(t, err) require.NoError(t, c.RegisterModels(models...)) require.NoError(t, prepareModels(c.ModelMap.Models()...)) return c }
package ormlite import ( "context" "database/sql" "fmt" "math/rand" "os" "reflect" "strings" "time" "unsafe" "github.com/pkg/errors" ) type relationType int const ( queryTimeout = time.Second * 30 packageTagName = "ormlite" defaultRelationDepth = 1 noRelation relationType = 1 << iota hasMany hasOne manyToMany letterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" letterIdxBits = 6 // 6 bits to represent a letter index letterIdxMask = 1<<letterIdxBits - 1 // All 1-bits, as many as letterIdxBits letterIdxMax = 63 / letterIdxBits // # of letter indices fitting in 63 bits tempTableNameLength = 2 << 2 ) var ( // ErrNoRowsAffected is an error to return when no rows were affected ErrNoRowsAffected = errors.New("no rows affected") src = rand.NewSource(time.Now().UnixNano()) ) // Error is a custom struct that contains sql error, query and arguments type Error struct { SQLError error Query string Args []interface{} } // Error implements error interface func (e *Error) Error() string { return e.SQLError.Error() } // OrderBy describes ordering rule type OrderBy struct { Field string `json:"field"` Order string `json:"order"` } // Where is a map containing fields and their values to meet in the result type Where map[string]interface{} type Greater float64 type Less float64 type GreaterOrEqual float64 type LessOrEqual float64 type NotEqual float64 type BitwiseAND float64 type BitwiseANDStrict float64 type StrictString string const ( // AND is a glue between multiple statements after `where` AND = " and " // OR is a glue between multiple statements after `where` OR = " or " ) // Options represents query options type Options struct { Where Where `json:"where"` Divider string `json:"divider"` Limit int `json:"limit"` Offset int `json:"offset"` OrderBy *OrderBy `json:"order_by"` RelationDepth int `json:"relation_depth"` RelatedTo []IModel `json:"related"` // Columns contains map with string keys of columns to include to the query // instead of querying all model fields Columns map[string]struct{} `json:"columns"` joins []string } // DefaultOptions returns default options for query func DefaultOptions() *Options { return &Options{RelationDepth: defaultRelationDepth, Divider: AND} } // WithWhere modifies existing options by adding where clause to them func WithWhere(options *Options, where Where) *Options { options.Where = where return options } // WithLimit modifies existing options by adding limit parameter to them func WithLimit(options *Options, limit int) *Options { options.Limit = limit return options } // WithOffset modifies existing options by adding offset parameter to them. // If options does not have positive limit parameter the offset will remain unchanged // to avoid sql query correctness. func WithOffset(options *Options, offset int) *Options { if options.Limit != 0 { options.Offset = offset } return options } // WithOrder modifies existing options by adding ordering options to them func WithOrder(options *Options, by OrderBy) *Options { options.OrderBy = &by return options } // Model is an interface that represents model of database type Model interface { Table() string } type relationInfo struct { Table string Type relationType RelatedType reflect.Type FieldName string Condition string RefPkValue interface{} } type columnInfo struct { RelationInfo relationInfo Name string Index int Primary bool } func isExportedField(f reflect.StructField) bool { return f.IsExported() } func lookForSettingWithSep(s, setting, sep string) string { pairs := strings.Split(s, ",") for _, pair := range pairs { kvs := strings.SplitN(pair, sep, 2) if len(kvs) == 1 && kvs[0] == setting { return setting } else if len(kvs) == 2 && kvs[0] == setting { return kvs[1] } } return "" } func getTempTableName(n int) string { b := make([]byte, n) // A src.Int63() generates 63 random bits, enough for letterIdxMax characters! for i, cache, remain := n-1, src.Int63(), letterIdxMax; i >= 0; { if remain == 0 { cache, remain = src.Int63(), letterIdxMax } if idx := int(cache & letterIdxMask); idx < len(letterBytes) { b[i] = letterBytes[idx] i-- } cache >>= letterIdxBits remain-- } return *(*string)(unsafe.Pointer(&b)) } func lookForSetting(s, setting string) string { return lookForSettingWithSep(s, setting, "=") } func getColumnInfo(t reflect.Type) ([]columnInfo, error) { var ( columns []columnInfo v = reflect.New(t) ) for i := 0; i < t.NumField(); i++ { if !isExportedField(t.Field(i)) { continue } tag := t.Field(i).Tag.Get(packageTagName) if tag == "-" { continue } var ci = columnInfo{Index: i} if exp, ok := v.Elem().Field(i).Interface().(Expression); ok { ci.Name = exp.Column() } else { ci.Name = getFieldColumnName(t.Field(i)) } if ri := extractRelationInfo(t.Field(i)); ri != nil { ci.RelationInfo = *ri } else { ci.RelationInfo = relationInfo{Type: noRelation} } if lookForSetting(tag, "primary") != "" { ci.Primary = true } columns = append(columns, ci) } return columns, nil } func extractRelationInfo(field reflect.StructField) *relationInfo { var info = relationInfo{Type: noRelation} t, ok := field.Tag.Lookup(packageTagName) if !ok { return nil } if strings.Contains(t, "has_one") { info.Type = hasOne info.RelatedType = field.Type info.FieldName = getFieldColumnName(field) for i := 0; i < field.Type.Elem().NumField(); i++ { if lookForSetting(field.Type.Elem().Field(i).Tag.Get(packageTagName), "primary") == "primary" { info.RefPkValue = reflect.New(field.Type.Elem().Field(i).Type).Elem().Interface() } } if info.RefPkValue == nil { return nil // maybe we need to return an error here } } else if strings.Contains(t, "many_to_many") { info.Type = manyToMany info.RelatedType = field.Type.Elem() tOption := lookForSetting(t, "table") info.Condition = lookForSettingWithSep(t, "condition", ":") info.Table = tOption info.FieldName = lookForSetting(t, "field") } else if strings.Contains(t, "has_many") { info.RelatedType = field.Type.Elem() info.Type = hasMany } else { return nil } return &info } func queryWithOptions(ctx context.Context, db *sql.DB, table string, columns []string, opts *Options, count *int) (*sql.Rows, error) { var ( values []interface{} q string tableName = getTempTableName(tempTableNameLength) ) q = fmt.Sprintf("select %s from %s", strings.Join(columns, ","), table) if count != nil { q = fmt.Sprintf("create temp table %s as ", tableName) + q } if opts != nil { if len(opts.joins) != 0 { q += strings.Join(opts.joins, " ") } if opts.Where != nil && len(opts.Where) != 0 { var keys []string for k, v := range opts.Where { if v != nil { value := reflect.ValueOf(v) switch value.Kind() { case reflect.Slice: if strings.Contains(k, ",") { rowValueCount := len(strings.Split(k, ",")) for i := 0; i < value.Len()/rowValueCount; i++ { keys = append(keys, fmt.Sprintf("(%s) = (%s)", k, strings.Trim(strings.Repeat("?,", rowValueCount), ","))) } opts.Divider = OR } else { count := value.Len() if opts.Limit != 0 && opts.Limit < count { count = opts.Limit } keys = append(keys, fmt.Sprintf("%s in (%s)", k, strings.Trim(strings.Repeat("?,", count), ","))) } for i := 0; i < value.Len(); i++ { values = append(values, value.Index(i).Interface()) } case reflect.String: switch v.(type) { case StrictString: keys = append(keys, fmt.Sprintf("%s = ?", k)) values = append(values, v) default: keys = append(keys, fmt.Sprintf("%s like ?", k)) values = append(values, fmt.Sprintf("%%%s%%", v)) } default: switch v.(type) { case Greater: keys = append(keys, fmt.Sprintf("%s > ?", k)) case GreaterOrEqual: keys = append(keys, fmt.Sprintf("%s >= ?", k)) case Less: keys = append(keys, fmt.Sprintf("%s < ?", k)) case LessOrEqual: keys = append(keys, fmt.Sprintf("%s <= ?", k)) case NotEqual: keys = append(keys, fmt.Sprintf("%s != ?", k)) case BitwiseAND: keys = append(keys, fmt.Sprintf("%s&? > 0", k)) case BitwiseANDStrict: keys = append(keys, fmt.Sprintf("%s&? = ?", k)) values = append(values, v) default: keys = append(keys, fmt.Sprintf("%s = ?", k)) } values = append(values, v) } } else { keys = append(keys, fmt.Sprintf("%s is null", k)) } } if len(keys) > 0 { q += fmt.Sprintf(" where %s", strings.Join(keys, opts.Divider)) } } if opts.OrderBy != nil { q += fmt.Sprintf(" order by %s %s", opts.OrderBy.Field, opts.OrderBy.Order) } if opts.Limit != 0 { q += fmt.Sprintf(" limit %d", opts.Limit) if opts.Offset != 0 { q += fmt.Sprintf(" offset %d", opts.Offset) } } } if os.Getenv("ORMLITE_DEBUG") == "1" { fmt.Println(q) fmt.Println(values) } if count != nil { _, err := db.Exec(q, values...) if err != nil { return nil, &Error{errors.Wrap(err, "failed to get rows count from temp table"), q, []any{tableName}} } row := db.QueryRow(fmt.Sprintf("select count() from %s", tableName)) if err := row.Scan(count); err != nil { return nil, &Error{errors.Wrap(err, "failed to execute count on a temp table"), "", []any{tableName}} } for i, colName := range columns { if strings.HasPrefix(colName, table) { columns[i] = colName[len(table)+1:] } } q = fmt.Sprintf("select %s from %s", strings.Join(columns, ","), tableName) } rows, err := db.QueryContext(ctx, q, values...) if err != nil { return nil, &Error{err, q, values} } return rows, nil } func getPrimaryFieldsInfo(value reflect.Value) ([]pkFieldInfo, error) { var pkFields []pkFieldInfo for k := 0; k < value.NumField(); k++ { fv := value.Field(k) ft := value.Type().Field(k) if lookForSetting(ft.Tag.Get(packageTagName), "primary") == "primary" { var info pkFieldInfo info.name = getFieldColumnName(ft) info.field = fv info.relationName = lookForSetting(ft.Tag.Get(packageTagName), "ref") pkFields = append(pkFields, info) } } return pkFields, nil } func loadRelationsForSlice(ctx context.Context, db *sql.DB, opts *Options, slicePtr reflect.Value, colInfoPerEntry [][]columnInfo) error { if opts != nil && opts.RelationDepth != 0 { for i := 0; i < slicePtr.Len(); i++ { for _, ci := range colInfoPerEntry[i] { if ci.RelationInfo.Type != noRelation { var modelValue = slicePtr.Index(i).Elem() switch ci.RelationInfo.Type { case hasOne: if err := loadHasOneRelation(ctx, db, &ci.RelationInfo, modelValue.Field(ci.Index), opts); err != nil { return err } case hasMany: pkFields, err := getPrimaryFieldsInfo(modelValue) if err != nil { return err } if err := loadHasManyRelation(ctx, db, ci.RelationInfo, modelValue.Field(ci.Index), pkFields, slicePtr.Index(i).Type(), opts); err != nil { return err } case manyToMany: pkFields, err := getPrimaryFieldsInfo(modelValue) if err != nil { return err } if err := loadManyToManyRelation(ctx, db, &ci.RelationInfo, modelValue.Field(ci.Index), pkFields, opts); err != nil { return err } } } } } } return nil } func loadStructRelations(ctx context.Context, db *sql.DB, opts *Options, out Model, pkField []pkFieldInfo, relations map[*relationInfo]reflect.Value) error { if opts == nil || opts.RelationDepth != 0 { for ri, rv := range relations { if ri.Type == manyToMany { if err := loadManyToManyRelation(ctx, db, ri, rv, pkField, opts); err != nil { return err } } else if ri.Type == hasOne { if err := loadHasOneRelation(ctx, db, ri, rv, opts); err != nil { return err } } else if ri.Type == hasMany { if err := loadHasManyRelation(ctx, db, *ri, rv, pkField, reflect.TypeOf(out), opts); err != nil { return err } } } } return nil } func loadHasManyRelation(ctx context.Context, db *sql.DB, ri relationInfo, fieldValue reflect.Value, pkFields []pkFieldInfo, parentType reflect.Type, options *Options) error { if fieldValue.Kind() != reflect.Slice { return fmt.Errorf("can't load relations: wrong field type: %v", fieldValue.Type()) } rvt := fieldValue.Type().Elem() if rvt.Kind() != reflect.Ptr { return fmt.Errorf("can't load relations: wrong field type: %v", rvt) } rve := rvt.Elem() if rve.Kind() != reflect.Struct { return fmt.Errorf("can't load relations: wrong field type: %v", rve) } where := Where{} for i := 0; i < rve.NumField(); i++ { f := rve.Field(i) if f.Type.AssignableTo(parentType) { for _, pkf := range pkFields { where[getFieldColumnName(f)] = pkf.field.Interface() } } } if len(where) == 0 { return errors.New("failed to load has many relation since none fields of related type meet parent type") } return QuerySliceContext(ctx, db, WithWhere(&Options{RelationDepth: options.RelationDepth - 1, Limit: options.Limit, Divider: OR}, where), fieldValue.Addr().Interface()) } func loadHasOneRelation(ctx context.Context, db *sql.DB, ri *relationInfo, rv reflect.Value, options *Options) error { if ri.RefPkValue == nil { return nil } _, ok := rv.Interface().(Model) if !ok { return fmt.Errorf("incorrect field value of one_to_one relation, expected ormlite.Model") } refObj := reflect.New(rv.Type().Elem()) var refPkField string for i := 0; i < rv.Type().Elem().NumField(); i++ { tag := rv.Type().Elem().Field(i).Tag.Get(packageTagName) if lookForSetting(tag, "primary") == "primary" { refPkField = getFieldColumnName(rv.Type().Elem().Field(i)) } } if refPkField == "" { return errors.New("referenced model does not have primary key") } if err := QueryStructContext(ctx, db, WithWhere(&Options{ RelationDepth: options.RelationDepth - 1, }, Where{refPkField: ri.RefPkValue}), refObj.Interface().(Model)); err != nil { return err } rv.Set(refObj) return nil } func loadManyToManyRelation(ctx context.Context, db *sql.DB, ri *relationInfo, rv reflect.Value, pkFields []pkFieldInfo, options *Options) error { var ( refPkField, PkField, where []string args []interface{} relatedQueryConditions = make(Where) ) if rv.Kind() != reflect.Slice { return fmt.Errorf("can't load relations: wrong field type: %v", rv.Type()) } rvt := rv.Type().Elem() if rvt.Kind() != reflect.Ptr { return fmt.Errorf("can't load relations: wrong field type: %v", rvt) } rve := rvt.Elem() if rve.Kind() != reflect.Struct { return fmt.Errorf("can't load relations: wrong field type: %v", rve) } for i := 0; i < rve.NumField(); i++ { t, ok := rve.Field(i).Tag.Lookup(packageTagName) if !ok { continue } if lookForSetting(t, "primary") == "primary" { refPkField = append(refPkField, lookForSetting(t, "ref")) PkField = append(PkField, getFieldColumnName(rve.Field(i))) } } if len(refPkField) < 1 { return errors.New("can't load relations: related struct does not have primary key") } for i, pkField := range pkFields { fNames := strings.Split(ri.FieldName, ",") if ri.FieldName != "" { if len(fNames) != len(pkFields) { return errors.New("field count does not match count of primary fields") } where = append(where, fmt.Sprintf("%s = ?", fNames[i])) } else { where = append(where, fmt.Sprintf("%s = ?", pkField.relationName)) } args = append(args, pkFields[0].field.Interface()) } if ri.Condition != "" { where = append(where, ri.Condition) } var whereClause string if len(pkFields) != 0 { whereClause = " where " + strings.Join(where, AND) } query := fmt.Sprintf("select %s from %s%s", strings.Join(refPkField, ","), ri.Table, whereClause) rows, err := db.QueryContext(ctx, query, args...) if err != nil { return &Error{err, query, args} } for rows.Next() { var relatedPrimaryKeyValues []interface{} for i := 0; i < len(PkField); i++ { var relatedPk interface{} relatedPrimaryKeyValues = append(relatedPrimaryKeyValues, &relatedPk) } if err := rows.Scan(relatedPrimaryKeyValues...); err != nil { return err } if _, ok := relatedQueryConditions[strings.Join(PkField, ",")]; !ok { relatedQueryConditions[strings.Join(PkField, ",")] = relatedPrimaryKeyValues } else { relatedQueryConditions[strings.Join(PkField, ",")] = append( relatedQueryConditions[strings.Join(PkField, ",")].([]interface{}), relatedPrimaryKeyValues...) } } if len(relatedQueryConditions) == 0 { return nil // query has no rows so there is no need to load any model } return QuerySliceContext( ctx, db, WithWhere(&Options{ RelationDepth: options.RelationDepth - 1, Divider: options.Divider, Limit: options.Limit}, relatedQueryConditions), rv.Addr().Interface(), ) } // QueryStruct looks up for rows in given table and scans it to provided struct or slice of structs func QueryStruct(db *sql.DB, opts *Options, out Model) error { ctx, cancel := context.WithTimeout(context.Background(), queryTimeout) defer cancel() return QueryStructContext(ctx, db, opts, out) } // QueryStructContext looks up for rows in given table and scans it to provided struct or slice of structs func QueryStructContext(ctx context.Context, db *sql.DB, opts *Options, out Model) error { model := reflect.ValueOf(out).Elem() if model.Type().Kind() != reflect.Struct { return fmt.Errorf("expected pointer to struct, got %T", model.Type()) } var ( pkFields []pkFieldInfo columns []string fieldPTRs []interface{} relations = make(map[*relationInfo]reflect.Value) ) pkFields, err := getPrimaryFieldsInfo(model) if err != nil { return errors.Wrap(err, "failed to load struct") } for i := 0; i < model.NumField(); i++ { if !isExportedField(model.Type().Field(i)) { continue } tag := model.Type().Field(i).Tag.Get(packageTagName) if tag == "-" { continue } if opts != nil && opts.Columns != nil { var colName string if exp, ok := model.Field(i).Interface().(Expression); ok { colName = exp.Column() } else { colName = getFieldColumnName(model.Type().Field(i)) } if _, ok := opts.Columns[colName]; !ok && !strings.Contains(tag, "primary") { continue } } if ri := extractRelationInfo(model.Type().Field(i)); ri != nil { if ri.Type == hasOne { columns = append(columns, getFieldColumnName(model.Type().Field(i))) fieldPTRs = append(fieldPTRs, &ri.RefPkValue) } relations[ri] = model.Field(i) continue } if exp, ok := model.Field(i).Interface().(Expression); ok { columns = append(columns, exp.Column()) } else { columns = append(columns, getFieldColumnName(model.Type().Field(i))) } fieldPTRs = append(fieldPTRs, model.Field(i).Addr().Interface()) } if len(columns) == 0 && len(relations) != 0 { goto Relations } { if opts != nil && len(opts.RelatedTo) != 0 { searchModels := map[reflect.Type][]Model{} for _, sm := range opts.RelatedTo { mt := reflect.TypeOf(sm) if slice, ok := searchModels[mt]; ok { slice = append(slice, sm) } else { searchModels[mt] = []Model{sm} } } } rows, err := queryWithOptions(ctx, db, out.Table(), columns, opts, nil) if err != nil { return err } for rows.Next() { if err := rows.Scan(fieldPTRs...); err != nil { return err } } } Relations: return loadStructRelations(ctx, db, opts, out, pkFields, relations) } // QuerySlice scans rows into the slice of structs func QuerySlice(db *sql.DB, opts *Options, out interface{}) error { ctx, cancel := context.WithTimeout(context.Background(), queryTimeout) defer cancel() return QuerySliceContext(ctx, db, opts, out) } // QuerySliceCount scans rows into the slice of structs also returning count of matched rows func QuerySliceCount(db *sql.DB, opts *Options, out any, count *int) error { return QuerySliceCountContext(context.Background(), db, opts, out, count) } // QuerySliceContext scans rows into the slice of structs with given context func QuerySliceContext(ctx context.Context, db *sql.DB, opts *Options, out any) error { return QuerySliceCountContext(ctx, db, opts, out, nil) } // QuerySliceCountContext scans rows into the slice of structs with given context and also returning count of matched rows func QuerySliceCountContext(ctx context.Context, db *sql.DB, opts *Options, out any, count *int) error { slicePtr := reflect.ValueOf(out).Elem() if !slicePtr.Type().Elem().Implements(reflect.TypeOf((*Model)(nil)).Elem()) { return errors.New("slice contain type that does not implement Model interface") } modelInfo, err := getModelInfo(reflect.New(slicePtr.Type().Elem().Elem()).Interface()) if err != nil { return errors.New("slice contain type that does not implement Model interface") } var ( modelType = slicePtr.Type().Elem().Elem() colNames []string colInfoPerEntry [][]columnInfo ) colInfo, err := getColumnInfo(modelType) if err != nil { return fmt.Errorf("failed to get column info for type: %v", modelType) } if opts != nil && opts.Columns != nil { var selected []columnInfo for _, ci := range colInfo { if _, ok := opts.Columns[ci.Name]; ok || ci.Primary { selected = append(selected, ci) } } colInfo = selected } for _, ci := range colInfo { if ci.RelationInfo.Type == noRelation || ci.RelationInfo.Type == hasOne { if ci.Primary { colNames = append(colNames, fmt.Sprintf("%s.%s", modelInfo.table, ci.Name)) } else { colNames = append(colNames, ci.Name) } } } if opts != nil && len(opts.RelatedTo) != 0 { searchModels := map[reflect.Type][]Model{} for _, sm := range opts.RelatedTo { mt := reflect.TypeOf(sm) if slice, ok := searchModels[mt]; ok { slice = append(slice, sm) } else { searchModels[mt] = []Model{sm} } } for _, ci := range colInfo { if slice, ok := searchModels[ci.RelationInfo.RelatedType]; ok { switch ci.RelationInfo.Type { case hasMany: modelStructType := ci.RelationInfo.RelatedType.Elem() relModelInfo, err := getModelInfo(reflect.New(modelStructType).Interface().(IModel)) if err != nil { return errors.Wrap(err, "can't search related to") } var ( joinQuery strings.Builder conditions []string ) for _, field := range modelInfo.fields { if isPkField(field) { joinQuery.WriteString(" left join " + relModelInfo.table + " on ") for _, relField := range relModelInfo.fields { if modelInfo.value.Addr().Type().AssignableTo(relField.value.Type()) { conditions = append(conditions, fmt.Sprintf( "%s.%s = %s.%s", modelInfo.table, field.column, relModelInfo.table, relField.column)) } if isPkField(relField) { for _, sm := range slice { // add where conditions val, err := getModelValue(sm) if err != nil { return errors.Wrap(err, "can't get model value of related one") } pFields, err := getPrimaryFieldsInfo(val) if err != nil { return errors.Wrap(err, "can't get related model primary fields") } for _, pField := range pFields { addWhereClause(opts, fmt.Sprintf("%s.%s", relModelInfo.table, pField.name), pField.field) } } } } } } if len(conditions) != 0 { joinQuery.WriteString(strings.Join(conditions, OR)) opts.joins = append(opts.joins, joinQuery.String()) } case manyToMany: modelStructType := ci.RelationInfo.RelatedType.Elem() relModelInfo, err := getModelInfo(reflect.New(modelStructType).Interface().(IModel)) if err != nil { return errors.Wrap(err, "can't search related to") } var ( joinQuery strings.Builder conditions []string ) for _, field := range modelInfo.fields { if isPkField(field) { joinQuery.WriteString(" left join " + ci.RelationInfo.Table + " on ") for _, relField := range relModelInfo.fields { if isPkField(relField) { conditions = append(conditions, fmt.Sprintf( "%s.%s = %s.%s", modelInfo.table, field.column, ci.RelationInfo.Table, field.reference.column)) for _, sm := range slice { // add where conditions val, err := getModelValue(sm) if err != nil { return errors.Wrap(err, "can't get model value of related one") } pFields, err := getPrimaryFieldsInfo(val) if err != nil { return errors.Wrap(err, "can't get related model primary fields") } for _, pField := range pFields { addWhereClause(opts, fmt.Sprintf("%s.%s", ci.RelationInfo.Table, pField.relationName), pField.field) } } } } } } if len(conditions) != 0 { joinQuery.WriteString(strings.Join(conditions, OR)) opts.joins = append(opts.joins, joinQuery.String()) } } } } } rows, err := queryWithOptions( ctx, db, reflect.New(modelType).Interface().(Model).Table(), colNames, opts, count) if err != nil { return err } if opts != nil { opts.joins = nil } for rows.Next() { var ( se = reflect.New(modelType) fPtrs []interface{} entryColInfo = make([]columnInfo, len(colInfo)) ) copy(entryColInfo, colInfo) colInfoPerEntry = append(colInfoPerEntry, entryColInfo) for i := 0; i < se.Elem().NumField(); i++ { for k, ci := range colInfo { if ci.Index == i { if ci.RelationInfo.Type == hasOne { pToPk := &entryColInfo[k].RelationInfo.RefPkValue fPtrs = append(fPtrs, pToPk) } else if ci.RelationInfo.Type == hasMany || ci.RelationInfo.Type == manyToMany { continue } else { fPtrs = append(fPtrs, se.Elem().Field(i).Addr().Interface()) } } } } if err := rows.Scan(fPtrs...); err != nil { return err } slicePtr.Set(reflect.Append(slicePtr, se)) } return loadRelationsForSlice(ctx, db, opts, slicePtr, colInfoPerEntry) } func addWhereClause(options *Options, s string, value reflect.Value) { if options == nil { options = new(Options) } if options.Where == nil { options.Where = make(Where) } switch value.Kind() { case reflect.Int, reflect.Float64, reflect.Int64: if isZeroField(value) { options.Where[s] = nil } else { options.Where[s] = value.Interface() } default: options.Where[s] = value.Interface() } } // Delete removes model object from database by its primary key func Delete(db *sql.DB, m Model) (sql.Result, error) { modelValue := reflect.ValueOf(m).Elem() var ( where []string args []interface{} pkFields []pkFieldInfo ) for i := 0; i < modelValue.NumField(); i++ { fv := modelValue.Field(i) ft := modelValue.Type().Field(i) if lookForSetting(ft.Tag.Get(packageTagName), "primary") == "primary" { var info pkFieldInfo info.name = getFieldColumnName(ft) info.field = fv pkFields = append(pkFields, info) } } if len(pkFields) == 0 { return nil, errors.New("delete failed: model does not have primary key") } for _, pkField := range pkFields { if reflect.Zero(pkField.field.Type()).Interface() == pkField.field.Interface() { return nil, errors.New("delete failed: model's primary key has zero value") } where = append(where, fmt.Sprintf("%s = ?", pkField.name)) args = append(args, pkField.field.Interface()) } ctx, cancel := context.WithTimeout(context.Background(), queryTimeout) defer cancel() query := fmt.Sprintf("delete from %s where %s", m.Table(), strings.Join(where, " and ")) res, err := db.ExecContext(ctx, query, args...) if err != nil { return nil, &Error{err, query, args} } return res, err } type pkFieldInfo struct { relationName string name string field reflect.Value } // Count models in database with search options func Count(db *sql.DB, m Model, opts *Options) (count int64, err error) { mInfo, err := getModelInfo(m) if err != nil { return } var ( query strings.Builder args []interface{} divider string ) colInfo, err := getColumnInfo(mInfo.value.Type()) if err != nil { return } if opts != nil && len(opts.RelatedTo) != 0 { searchModels := map[reflect.Type][]Model{} for _, sm := range opts.RelatedTo { mt := reflect.TypeOf(sm) if slice, ok := searchModels[mt]; ok { slice = append(slice, sm) } else { searchModels[mt] = []Model{sm} } } for _, ci := range colInfo { if slice, ok := searchModels[ci.RelationInfo.RelatedType]; ok { switch ci.RelationInfo.Type { case hasMany: modelStructType := ci.RelationInfo.RelatedType.Elem() relModelInfo, err := getModelInfo(reflect.New(modelStructType).Interface().(IModel)) if err != nil { return 0, err } var ( joinQuery strings.Builder conditions []string ) for _, field := range mInfo.fields { if isPkField(field) { joinQuery.WriteString(" left join " + relModelInfo.table + " on ") for _, relField := range relModelInfo.fields { if mInfo.value.Addr().Type().AssignableTo(relField.value.Type()) { conditions = append(conditions, fmt.Sprintf( "%s.%s = %s.%s", mInfo.table, field.column, relModelInfo.table, relField.column)) } if isPkField(relField) { for _, sm := range slice { // add where conditions val, err := getModelValue(sm) if err != nil { return 0, err } pFields, err := getPrimaryFieldsInfo(val) if err != nil { return 0, err } for _, pField := range pFields { addWhereClause(opts, fmt.Sprintf("%s.%s", relModelInfo.table, pField.name), pField.field) } } } } } } if len(conditions) != 0 { joinQuery.WriteString(strings.Join(conditions, OR)) opts.joins = append(opts.joins, joinQuery.String()) } case manyToMany: modelStructType := ci.RelationInfo.RelatedType.Elem() relModelInfo, err := getModelInfo(reflect.New(modelStructType).Interface().(IModel)) if err != nil { return 0, err } var ( joinQuery strings.Builder conditions []string ) for _, field := range mInfo.fields { if isPkField(field) { joinQuery.WriteString(" left join " + ci.RelationInfo.Table + " on ") for _, relField := range relModelInfo.fields { if isPkField(relField) { conditions = append(conditions, fmt.Sprintf( "%s.%s = %s.%s", mInfo.table, field.column, ci.RelationInfo.Table, field.reference.column)) for _, sm := range slice { // add where conditions val, err := getModelValue(sm) if err != nil { return 0, err } pFields, err := getPrimaryFieldsInfo(val) if err != nil { return 0, err } for _, pField := range pFields { addWhereClause(opts, fmt.Sprintf("%s.%s", ci.RelationInfo.Table, pField.relationName), pField.field) } } } } } } if len(conditions) != 0 { joinQuery.WriteString(strings.Join(conditions, OR)) opts.joins = append(opts.joins, joinQuery.String()) } } } } } query.WriteString("select count() from ") query.WriteString(m.Table()) if opts != nil { if len(opts.joins) != 0 { query.WriteString(strings.Join(opts.joins, " ")) } if opts.Where != nil && len(opts.Where) > 0 { query.WriteString(" where ") if len(opts.Where) > 1 && opts.Divider == "" { return 0, errors.New("empty divider with multiple conditions") } divider = opts.Divider for f, v := range opts.Where { if v != nil { value := reflect.ValueOf(v) switch value.Kind() { case reflect.Slice: if strings.Contains(f, ",") { rowValueCount := len(strings.Split(f, ",")) for i := 0; i < value.Len()/rowValueCount; i++ { query.WriteString("(" + f + ") = (" + strings.Trim(strings.Repeat("?,", rowValueCount), ",") + ")" + divider) } opts.Divider = OR } else { count := value.Len() if opts.Limit != 0 && opts.Limit < count { count = opts.Limit } query.WriteString(f + " in (" + strings.Trim(strings.Repeat("?,", count), ",") + ")" + divider) } for i := 0; i < value.Len(); i++ { args = append(args, value.Index(i).Interface()) } case reflect.String: switch v.(type) { case StrictString: query.WriteString(f + " = ?" + divider) args = append(args, v) default: query.WriteString(f + " like ?" + divider) args = append(args, fmt.Sprintf("%%%s%%", v)) } default: switch v.(type) { case Greater: query.WriteString(f + " > ?" + divider) case GreaterOrEqual: query.WriteString(f + " >= ?" + divider) case Less: query.WriteString(f + " < ?" + divider) case LessOrEqual: query.WriteString(f + " <= ?" + divider) case NotEqual: query.WriteString(f + " != ?" + divider) case BitwiseAND: query.WriteString(f + "&? > 0" + divider) case BitwiseANDStrict: query.WriteString(f + "&? = ?" + divider) args = append(args, v) default: query.WriteString(f + " = ?" + divider) } args = append(args, v) } } else { query.WriteString(f + " is null" + divider) } } } } row := db.QueryRow(strings.TrimSuffix(query.String(), divider), args...) if err := row.Scan(&count); err != nil { return 0, err } return count, nil }
package main import ( "os" "fmt" "strconv" "bufio" "github.com/fogcreek/mini" ) var D_PROXY_LIST []string const D_CAPTCHA_URL = "http://topofgames.com/imageverify.php" var Target string var ConfigStruct struct { ProxyListFilepath string `json:"proxy_list_filepath"` TargetID int `json:"target_id"` WorkerCount int `json:"worker_count"` DBCUsername string `json:"dbc_username"` DBCPassword string `json:"dbc_password"` ProxyType string `json:"proxy_type"` Timeout int `json:"timeout"` } func LoadConfig() error { //read config configFile, err := mini.LoadConfiguration("settings.ini") if err != nil { return err } ConfigStruct.ProxyListFilepath = configFile.String("proxy_list_path","proxies.txt") ConfigStruct.TargetID = int(configFile.Integer("target_id",80397)) ConfigStruct.WorkerCount = int(configFile.Integer("worker_count", 20)) ConfigStruct.DBCUsername = configFile.String("deathbycaptcha_username","username") ConfigStruct.DBCPassword = configFile.String("deathbycaptcha_password","password") ConfigStruct.ProxyType = configFile.String("proxy_type","http") ConfigStruct.Timeout = int(configFile.Integer("timeout", 20)) Target = "http://topofgames.com/index.php?do=votes&id="+strconv.Itoa(ConfigStruct.TargetID) return nil } func LoadProxies() error{ //load proxies if _, err := os.Stat(ConfigStruct.ProxyListFilepath); err != nil { err := fmt.Errorf("Proxy file parsing error.") return err } proxyFile, err := os.Open(ConfigStruct.ProxyListFilepath) if err != nil { return err } defer proxyFile.Close() scanner := bufio.NewScanner(proxyFile) for scanner.Scan() { D_PROXY_LIST = append(D_PROXY_LIST, scanner.Text()) } return nil }
package main import ( "bufio" // "database/sql" "github.com/garyburd/redigo/redis" // _ "github.com/go-sql-driver/mysql" "github.com/huichen/sego" "log" "os" "strconv" "time" "github.com/huichen/pinyin" ) const ( indexKeywordPre = "atat:index:keyword:" indexScorePre = "atat:index:_score_:" indexConfigPre = "atat:index:config:" indexInstancePre = "atat:index:instance:" dictionaryFilePath = "./data/dictionary.txt" stopTokenFilePath = "./data/stop_tokens.txt" pinyinTableFilePath = "./data/pinyin_table.txt" ) // var ( // segmenter = sego.Segmenter{} // numThreads = runtime.NumCPU() // task = make(chan BookIndex, numThreads) // numRuns = 20 // st StopTokens // ) var engine Engine type StopTokens struct { stopTokens map[string]bool } type Engine struct { segmenter sego.Segmenter st StopTokens py pinyin.Pinyin numThreads int task chan BookEngine numRuns int pinyinMatch bool indexComplete bool } type BookEngine struct { Id int Isbn string Author string Title string Summary string Rank float64 } func (engine *Engine) Init() { // Load Dictionary engine.segmenter.LoadDictionary(ExpandPath(dictionaryFilePath)) // Load StopToken engine.st.Init(ExpandPath(stopTokenFilePath)) engine.py.Init(ExpandPath(pinyinTableFilePath)) engine.numThreads = config.server.cpuCore engine.task = make(chan BookEngine, engine.numThreads) engine.numRuns = 20 engine.pinyinMatch = true key := indexConfigPre + "indexcomplete" rdb := cache.redispool.Get() existindexStatus,err := redis.Int(rdb.Do("EXISTS", key)) if existindexStatus == 0 { rdb.Do("SET", key, 0) } indexStatus, err := redis.Int(rdb.Do("GET", key)) checkErr(err) if indexStatus == 1 { engine.indexComplete = true } else { engine.indexComplete = false } rdb.Close() } func (engine *Engine) IndexAll() { logFile := getLogFile(config.global.logFile) indexLogger := serverIndexInfoLog(logFile) if engine.indexComplete { indexLogger.Println("Don't repeat create index") } else { // Load database document to Memory query := "select id, isbn, author, title, summary, (average*100+num_raters*0.1) as newrank from book_items" rows, err := model.db.Query(query) checkErr(err) var booklist []BookEngine var book BookEngine size := 0 for rows.Next() { err := rows.Scan( &book.Id, &book.Isbn, &book.Author, &book.Title, &book.Summary, &book.Rank) checkErr(err) size += len([]byte(book.Author)) size += len([]byte(book.Title)) size += len([]byte(book.Summary)) booklist = append(booklist, book) } // Lanuch splitWorker goroutine for i := 0; i < engine.numThreads; i++ { go engine.splitWorker() } indexLogger.Println("分词开始") // record time t0 := time.Now() // Parallel to split the documents for i := 0; i < engine.numRuns; i++ { for _, value := range booklist { engine.task <- value } } // record time and calculate the speed of split documents t1 := time.Now() indexLogger.Printf("分词花费时间 %v", t1.Sub(t0)) indexLogger.Printf("分词速度 %f MB/s", float64(size*engine.numRuns)/t1.Sub(t0).Seconds()/(1024*1024)) // Set the index status // rdb := redispool.Get() rdb := cache.redispool.Get() key := indexConfigPre + "indexcomplete" rdb.Do("SET", key, 1) rdb.Close() } } func (engine *Engine) splitWorker() { for { rdb := cache.redispool.Get() book := <-engine.task socreKey := indexScorePre + strconv.Itoa(book.Id) rdb.Send("SET", socreKey, book.Rank) if isValidIsbn13(book.Isbn) { isbnKey := indexKeywordPre + book.Isbn rdb.Send("SADD", isbnKey, book.Id) } text := []byte(book.Author + "," + book.Title + "," + book.Summary) for _, word := range sego.SegmentsToSlice(engine.segmenter.Segment(text), true) { if !engine.st.IsStopToken(word) { key := indexKeywordPre + word rdb.Send("SADD", key, book.Id) if engine.pinyinMatch { pinyinWord := engine.pinyinConverter(word) if pinyinWord != "" { pinyinKey := indexKeywordPre + pinyinWord rdb.Send("SADD", pinyinKey, book.Id) } } } } rdb.Flush() rdb.Close() } } func (engine *Engine) pinyinConverter(zhstr string) (string) { var pystr string for _,word := range zhstr { pystr += engine.py.GetPinyin(word, false) } return pystr } func (st *StopTokens) Init(stopTokenFile string) { st.stopTokens = make(map[string]bool) if stopTokenFile == "" { return } file, err := os.Open(ExpandPath(stopTokenFile)) if err != nil { log.Fatal(err) } defer file.Close() scanner := bufio.NewScanner(file) for scanner.Scan() { text := scanner.Text() if text != "" { st.stopTokens[text] = true } } } func (st *StopTokens) IsStopToken(token string) bool { _, found := st.stopTokens[token] return found }
package api import ( "bytes" "github.com/gorilla/mux" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "io" "net/http" "net/http/httptest" "testing" ) type TestAPIRequest struct { Recorder *httptest.ResponseRecorder hasRun bool headers map[string]string Request *http.Request handler http.Handler } func handler() http.Handler { router := mux.NewRouter() router.NotFoundHandler = http.HandlerFunc(func(http.ResponseWriter, *http.Request) {}) router.HandleFunc("/word", postWord).Methods("POST").Name("get_history") router.HandleFunc("/sentence", postSentence).Methods("POST").Name("get_history") router.HandleFunc("/history", getHistory).Methods("GET").Name("get_history") return router } func (r *TestAPIRequest) run() { if r.hasRun { return } r.handler.ServeHTTP(r.Recorder, r.Request) r.hasRun = true } func ExpectRequest(handler http.Handler, method, path string, bodyString string) *TestAPIRequest { var body io.Reader if bodyString != "" { body = bytes.NewReader([]byte(bodyString)) } Request, err := http.NewRequest(method, path, body) if err != nil { panic(err) } recorder := httptest.NewRecorder() return &TestAPIRequest{ Recorder: recorder, headers: map[string]string{}, Request: Request, handler: handler, } } func TestAPI(t *testing.T) { RegisterFailHandler(Fail) RunSpecs(t, "API Suite") }
package leetcode // Write a program that checks whether an integer is a palindrome. For example, 121 is a palindrome, as well as 888. 678 is not a palindrome. Do not convert the integer into a string. func isPalindrome(x int) bool { if x < 0 { return false } number, reversed := x, 0 for x/10 > 0 { reversed *= 10 reversed += x % 10 x /= 10 } reversed *= 10 reversed += x % 10 return number == reversed }