text stringlengths 11 4.05M |
|---|
package _51_N_Queens
import "testing"
func TestSolveNQueens(t *testing.T) {
var ret [][]string
ret = solveNQueens(1)
t.Log(ret)
ret = solveNQueens(4)
t.Log(ret)
}
|
package design
import (
. "github.com/goadesign/goa/design"
. "github.com/goadesign/goa/design/apidsl"
)
var UserPayload = Type("UserPayload", func() {
Attribute("name", func() {
MinLength(2)
Example("James Brown")
})
Attribute("email", func() {
Format("email")
})
Attribute("password", func() {
MinLength(8)
})
Required("name", "email", "password")
})
var AccountPayload = Type("AccountPayload", func() {
Attribute("name", func() {
MinLength(1)
})
Attribute("description", func() {
Default("")
})
Attribute("accountType", func() {
Enum("wallet", "bank", "credit-card")
})
Attribute("hasBalance", Boolean, func() {
Default(false)
})
Attribute("balance", Integer, func() {
Default(0)
})
Required("name", "description", "accountType", "hasBalance", "balance")
})
var TransactionPayload = Type("TransactionPayload", func() {
Attribute("accountId", UUID)
Attribute("amount", Integer)
Attribute("transactionType", func() {
Enum("expense", "income", "transfer", "balance-adjustment")
})
Attribute("title")
Attribute("originalTitle", func() {
Default("")
})
Attribute("description", func() {
Default("")
})
Attribute("categoryId", UUID)
Attribute("date", func() {
Pattern(`^\d{1,4}-\d{2}-\d{2}$`)
})
Required("accountId", "amount", "transactionType", "title", "date")
})
var CategoryPayload = Type("CategoryPayload", func() {
Attribute("name", func() {
MinLength(1)
})
Attribute("parentCategoryId", UUID)
Required("name")
})
|
package main
import (
"coolGame/lib"
"fmt"
"time"
)
func main() {
player := lib.NewPlayer("jack")
fmt.Println(player)
boss := lib.NewBoss()
battle := lib.NewBattle(boss, player)
fmt.Println(battle.Boss)
// battleDone := make(chan bool)
t := time.Tick(1 * time.Second)
i := 0
for now := range t {
fmt.Println(i, now)
i += 1
}
}
|
package bosh
import (
"reflect"
"testing"
"time"
"github.com/skriptble/nine/element"
"github.com/skriptble/nine/namespace"
)
func TestBodyTransformElement(t *testing.T) {
t.Parallel()
// Adds proper attributes
body1 := Body{
To: "foo@bar",
From: "baz@quux",
Lang: "en-gb",
Ver: Version{Major: 1, Minor: 4},
Wait: 5 * time.Second,
Hold: 14,
Ack: 1,
Content: "application/xml; charset=utf-8",
RID: 619727392817,
XMPPVer: Version{Major: 2, Minor: 0},
RestartLogic: true,
Restart: true,
SID: "bo12345sh",
Requests: 7,
Polling: 3 * time.Second,
Inactivity: 37 * time.Second,
Accept: "deflate,gzip",
MaxPause: 93 * time.Second,
Children: []element.Element{element.New("message")},
HoldSet: true,
}
want := body.
AddAttr("to", "foo@bar").
AddAttr("from", "baz@quux").
AddAttr("xml:lang", "en-gb").
AddAttr("ver", "1.4").
AddAttr("wait", "5").
AddAttr("xmpp:version", "2.0").
AddAttr("xmpp:restartlogic", "true").
AddAttr("xmpp:restart", "true").
AddAttr("hold", "14").
AddAttr("ack", "1").
AddAttr("content", "application/xml; charset=utf-8").
AddAttr("rid", "619727392817").
AddAttr("sid", "bo12345sh").
AddAttr("requests", "7").
AddAttr("polling", "3").
AddAttr("inactivity", "37").
AddAttr("accept", "deflate,gzip").
AddAttr("maxpause", "93").
AddAttr("xmlns:xmpp", namespace.XMPP).
AddChild(element.New("message"))
got := body1.TransformElement()
if !reflect.DeepEqual(want, got) {
t.Error("Should properly transform body into an element.Element")
t.Errorf("\nWant:%+v\nGot :%+v", want, got)
}
// Adds the stream namespace if a child element has a stream namespace
body1.Children = append(body1.Children, element.New("stream:error"))
want = want.AddChild(element.New("stream:error")).AddAttr("xmlns:stream", namespace.Stream)
got = body1.TransformElement()
if !reflect.DeepEqual(want, got) {
t.Error("Should add stream namespace if a child is in the stream namespace")
t.Errorf("\nWant:%+v\nGot :%+v", want, got)
}
}
func TestBodyTransformer(t *testing.T) {
t.Parallel()
elem1 := element.New("body").
AddAttr("to", "foo@bar").
AddAttr("from", "baz@quux").
AddAttr("xml:lang", "en-gb").
AddAttr("ver", "1.4").
AddAttr("wait", "5").
AddAttr("hold", "14").
AddAttr("ack", "1").
AddAttr("content", "application/xml; charset=utf-8").
AddAttr("sid", "bo12345sh").
AddAttr("rid", "619727392817").
AddAttr("xmpp:version", "2.0").
AddAttr("xmpp:restartlogic", "true").
AddAttr("xmpp:restart", "true").
AddAttr("requests", "7").
AddAttr("polling", "3").
AddAttr("inactivity", "37").
AddAttr("accept", "deflate,gzip").
AddAttr("maxpause", "93").
AddChild(element.New("message"))
elem2 := element.New("body")
body1 := Body{
To: "foo@bar",
From: "baz@quux",
Lang: "en-gb",
Ver: Version{Major: 1, Minor: 4},
Wait: 5 * time.Second,
Hold: 14,
Ack: 1,
Content: "application/xml; charset=utf-8",
RID: 619727392817,
XMPPVer: Version{Major: 2, Minor: 0},
RestartLogic: true,
Restart: true,
SID: "bo12345sh",
Requests: 7,
Polling: 3 * time.Second,
Inactivity: 37 * time.Second,
Accept: "deflate,gzip",
MaxPause: 93 * time.Second,
Children: []element.Element{element.New("message")},
}
body2 := Body{
Lang: "en-us",
Ver: Version{Major: 1, Minor: 6},
Wait: 45 * time.Second,
Hold: 1,
Ack: 1,
Content: "text/xml; charset=utf-8",
XMPPVer: Version{Major: 1, Minor: 0},
Requests: 2,
Polling: 5 * time.Second,
Inactivity: 75 * time.Second,
MaxPause: 120 * time.Second,
}
bt := NewBodyTransformer(body2)
got1 := bt.TransformBody(elem1)
got2 := bt.TransformBody(elem2)
body2.Hold = -1
body2.Ack = 0
if !reflect.DeepEqual(body1, got1) {
t.Error("TransformBody should set attributes from the body element")
t.Errorf("\nWant:%+v\nGot :%+v\n", body1, got1)
}
if !reflect.DeepEqual(body2, got2) {
t.Error("TransformBody should use the defaults")
t.Errorf("\nWant:%+v\nGot :%+v\n", body2, got2)
}
}
|
package image
import (
"bufio"
"bytes"
_ "golang.org/x/image/webp"
"image"
_ "image/gif"
_ "image/jpeg"
_ "image/png"
"os"
"github.com/disintegration/imaging"
)
func DecodeFile(file *os.File) (image.Image, error) {
img, _, err := image.Decode(bufio.NewReader(file))
if err != nil {
return nil, err
}
return img, nil
}
func Thumbnail(data []byte, width, height int) (image.Image, error) {
src, _, err := image.Decode(bytes.NewReader(data))
if err != nil {
return nil, err
}
return imaging.Thumbnail(src, width, height, imaging.Lanczos), nil
}
func EncodeJPEG(img image.Image) ([]byte, error) {
buf := &bytes.Buffer{}
if err := imaging.Encode(buf, img, imaging.JPEG); err != nil {
return nil, err
}
return buf.Bytes(), nil
}
|
package UI
import (
"fmt"
"io/ioutil"
"strings"
"log"
"encoding/json"
"github.com/veandco/go-sdl2/sdl"
"github.com/veandco/go-sdl2/ttf"
"github.com/cuu/gogame/display"
"github.com/cuu/gogame/surface"
"github.com/cuu/gogame/draw"
"github.com/cuu/gogame/color"
"github.com/cuu/gogame/rect"
"github.com/cuu/gogame/font"
"github.com/cuu/gogame/time"
"github.com/cuu/gogame/event"
"github.com/cuu/LauncherGo/sysgo/DBUS"
)
var (
emulator_flag = "action.config"
plugin_flag = "plugin.config"
)
type ActionConifg struct {
ROM string `json:"ROM"`
ROM_SO string `json:"ROM_SO"`
EXT []string `json:"EXT"`
EXCLUDE []string `json:"EXCLUDE"`
FILETYPE string `json:"FILETYPE"` // defalut is file
LAUNCHER string `json:"LAUNCHER"`
TITLE string `json:"TITLE"` // defaut is Game
SO_URL string `json:"SO_URL"`
RETRO_CONFIG string `json:"RETRO_CONFIG"`
}
type PluginConfig struct {
NAME string `json:"NAME"` // plugin name,default could be the same as Plugin Folder's name
SO_FILE string `json:"SO_FILE"`
}
type MessageBox struct {
Label
Parent *MainScreen
HWND *sdl.Surface
}
func NewMessageBox() *MessageBox {
m := &MessageBox{}
m.Color = &color.Color{83,83,83,255}
return m
}
func (self *MessageBox) Init( text string, font_obj *ttf.Font, col *color.Color) {
if col != nil {
self.Color = col
}
self.Text = text
self.FontObj = font_obj
self.Width = 0
self.Height = 0
self.CanvasHWND = surface.Surface(self.Parent.Width, self.Parent.Height)
self.HWND = self.Parent.CanvasHWND
}
func (self *MessageBox) SetText( text string) {
self.Text = text
}
func (self *MessageBox) Draw() {
self.Width = 0
self.Height = 0
surface.Fill(self.CanvasHWND, &color.Color{255,255,255,255} )
words := strings.Split(self.Text," ")
space,_ := font.Size(self.FontObj," ")
max_width := self.Parent.Width - 40
x := 0
y := 0
row_total_width := 0
lines := 0
for _,word := range words {
word_surface := font.Render( self.FontObj, word, true, self.Color,nil)
word_width := int(word_surface.W)
word_height := int(word_surface.H)
row_total_width += word_width
if lines == 0 {
lines += word_height
}
if (row_total_width + space ) >= max_width {
x = 0
y += word_height
row_total_width = word_width
lines+=word_height
}
dest_rect := rect.Rect(x,y, word_width,word_height)
surface.Blit(self.CanvasHWND, word_surface, &dest_rect,nil)
if len(words) == 1 {
x+=word_width
} else {
x += word_width+space
}
if x > self.Width {
self.Width = x
}
if lines >= self.Parent.Height - 40 {
break
}
}
self.Height = lines
padding := 5
x = (self.Parent.Width - self.Width) / 2
y = (self.Parent.Height - self.Height) /2
rect_ := rect.Rect(x-padding,y-padding, self.Width+padding*2, self.Height+padding*2)
if self.HWND != nil {
draw.Rect(self.HWND , &color.Color{255,255,255,255},&rect_,0)
rect__ := draw.MidRect(self.Parent.Width/2, self.Parent.Height/2,self.Width,self.Height,Width,Height)
dest_rect := rect.Rect(0,0,self.Width,self.Height)
surface.Blit(self.HWND, self.CanvasHWND, rect__, &dest_rect)
draw.Rect(self.HWND , &color.Color{0,0,0,255},&rect_,1)
}
}
type MainScreen struct {
Pages []PageInterface
PageMax int
PageIndex int
PosX int
PosY int
Width int
Height int
MyPageStack *PageStack
CurrentPage PageInterface
CanvasHWND *sdl.Surface
HWND *sdl.Surface
TitleBar *TitleBar
FootBar *FootBar
MsgBox *MessageBox
MsgBoxFont *ttf.Font
IconFont *ttf.Font
SkinManager *SkinManager
DBusManager *DBUS.DBus
}
func NewMainScreen() *MainScreen {
m := &MainScreen{}
m.PosY = TitleBar_BarHeight+1
m.Width = Width
m.Height = Height - FootBar_BarHeight - TitleBar_BarHeight - 1
m.MyPageStack = NewPageStack()
m.MsgBoxFont = Fonts["veramono20"]
m.IconFont = Fonts["varela15"]
return m
}
func (self *MainScreen) Init() {
self.CanvasHWND = surface.Surface(self.Width,self.Height)
self.MsgBox = NewMessageBox()
self.MsgBox.Parent = self
self.MsgBox.Init(" ", self.MsgBoxFont, &color.Color{83,83,83,255})
self.SkinManager = NewSkinManager()
self.SkinManager.Init()
self.DBusManager = DBUS.NewDBus()
self.DBusManager.Init()
}
func (self *MainScreen) FartherPages() { // right after ReadTheDirIntoPages
self.PageMax = len(self.Pages)
for i:=0;i< self.PageMax; i++ {
self.Pages[i].SetIndex(i)
self.Pages[i].SetCanvasHWND(self.CanvasHWND)
self.Pages[i].UpdateIconNumbers() // IconNumbers always == len(Pages[i].Icons)
self.Pages[i].SetScreen(self)
self.Pages[i].Adjust()
if self.Pages[i].GetIconNumbers() > 1 {
self.Pages[i].SetPsIndex(1)
self.Pages[i].SetIconIndex( 1 )
}
}
self.CurrentPage = self.Pages[ self.PageIndex ]
self.CurrentPage.SetOnShow(true)
}
func (self *MainScreen) CurPage() PageInterface {
return self.CurrentPage
}
func (self *MainScreen) PushCurPage() {
self.MyPageStack.Push(self.CurrentPage)
}
func (self *MainScreen) SetCurPage( pg PageInterface) {
self.CurrentPage = pg
pg.OnLoadCb()
}
func (self *MainScreen) PushPage( pg PageInterface) {
self.PushCurPage()
self.SetCurPage(pg)
}
func (self *MainScreen) AppendPage( pg PageInterface ) {
self.Pages = append(self.Pages,pg)
}
func (self *MainScreen) ClearCanvas() {
surface.Fill(self.CanvasHWND, &color.Color{255,255,255,255} )
}
func (self *MainScreen) SwapAndShow() {
if self.HWND != nil {
rect_ := rect.Rect( self.PosX,self.PosY,self.Width,self.Height)
surface.Blit(self.HWND,self.CanvasHWND,&rect_, nil)
}
display.Flip()
}
func (self *MainScreen) ExtraName(name string) string {
parts := strings.Split(name,"_")
if len(parts) > 1 {
return parts[1]
}else if len(parts) == 1 {
return parts[0]
}else {
return name
}
}
func (self *MainScreen) IsPluginPackage(dirname string ) bool {
ret := false
files,err := ioutil.ReadDir(dirname)
if err != nil {
log.Fatal(err)
return false
}
for _,f := range files {
if f.IsDir() {
//pass
}else {
if strings.HasSuffix(f.Name(),plugin_flag) == true {
ret = true
break
}
}
}
return ret
}
func (self *MainScreen) IsEmulatorPackage(dirname string ) bool {
ret := false
files,err := ioutil.ReadDir(dirname)
if err != nil {
log.Fatal(err)
return false
}
for _,f := range files {
if f.IsDir() {
//pass
}else {
if strings.HasSuffix(f.Name(),emulator_flag) == true {
ret = true
break
}
}
}
return ret
}
func (self *MainScreen) ReadTheDirIntoPages(_dir string, pglevel int, cur_page PageInterface) {
if FileExists(_dir) == false && IsDirectory(_dir) == false {
return
}
files,err := ioutil.ReadDir(_dir)
if err != nil {
log.Fatal(err)
return
}
for _,f := range files { // already sorted
if IsDirectory( _dir +"/"+f.Name()) {
if pglevel == 0 {
page := NewPage()
page.Name = self.ExtraName(f.Name())
self.Pages = append(self.Pages, page)
self.ReadTheDirIntoPages(_dir+"/"+f.Name(),pglevel+1, self.Pages[ len(self.Pages) - 1] )
}else{ // on cur_page now
i2:= self.ExtraName(f.Name())
iconitem := NewIconItem()
iconitem.AddLabel(i2,self.IconFont)
if FileExists( SkinMap(_dir+"/"+i2+".png")) {
iconitem.ImageName = SkinMap(_dir+"/"+i2+".png")
}else {
fmt.Println( SkinMap(_dir+"/"+i2+".png") )
untitled := NewUntitledIcon()
untitled.Init()
if len(i2) > 1 {
untitled.SetWords(string(i2[0]),string(i2[1]))
}else if len(i2) == 1 {
untitled.SetWords(string(i2[0]),string(i2[0]))
}else {
untitled.SetWords("G","s")
}
iconitem.ImgSurf = untitled.Surface()
iconitem.ImageName = ""
}
if self.IsPluginPackage(_dir+"/"+f.Name()) {
p_c := PluginConfig{}
dat, err := ioutil.ReadFile(_dir+"/"+f.Name()+"/" +plugin_flag)
ShowErr(err)
err = json.Unmarshal(dat, &p_c)
if err == nil {
if p_c.NAME == "" {
p_c.NAME = f.Name()
}
pi,err := LoadPlugin(_dir+"/"+f.Name()+"/"+p_c.SO_FILE)
Assert(err)
iconitem.CmdInvoke = InitPlugin(pi,self)
if iconitem.CmdInvoke != nil {
iconitem.MyType = ICON_TYPES["FUNC"]
iconitem.CmdPath = f.Name()
cur_page.AppendIcon(iconitem)
}
}
//Init it
}else {
iconitem.MyType = ICON_TYPES["DIR"]
linkpage := NewPage()
linkpage.Name = i2
iconitem.LinkPage = linkpage
cur_page.AppendIcon(iconitem)
self.ReadTheDirIntoPages(_dir+"/"+f.Name(),pglevel+1, iconitem.LinkPage)
}
}
} else if IsAFile(_dir+"/"+f.Name()) && (pglevel > 0) {
if strings.HasSuffix(strings.ToLower(f.Name()),IconExt) {
i2 := self.ExtraName(f.Name())
iconitem := NewIconItem()
iconitem.CmdPath = _dir+"/"+f.Name()
MakeExecutable( iconitem.CmdPath )
iconitem.MyType = ICON_TYPES["EXE"]
if FileExists( SkinMap( _dir+"/"+ ReplaceSuffix(i2,"png"))) {
iconitem.ImageName = SkinMap( _dir+"/"+ ReplaceSuffix(i2,"png"))
}else {
untitled:= NewUntitledIcon()
untitled.Init()
if len(i2) > 1 {
untitled.SetWords(string(i2[0]),string(i2[1]))
}else if len(i2) == 1 {
untitled.SetWords(string(i2[0]),string(i2[0]))
}else {
untitled.SetWords("G","s")
}
iconitem.ImgSurf = untitled.Surface()
iconitem.ImageName = ""
}
iconitem.AddLabel(strings.Split(i2,".")[0], self.IconFont)
iconitem.LinkPage = nil
cur_page.AppendIcon(iconitem)
}
}
}
}
func (self *MainScreen) RunEXE( cmdpath string) {
self.DrawRun()
self.SwapAndShow()
time.BlockDelay(1000)
cmdpath = strings.Trim(cmdpath," ")
cmdpath = CmdClean(cmdpath)
event.Post(RUNEVT,cmdpath)
}
func (self *MainScreen) OnExitCb() {
self.CurrentPage.OnExitCb()
}
func (self *MainScreen) KeyDown(ev *event.Event) {
if ev.Data["Key"] == "T" {
self.DrawRun()
self.SwapAndShow()
return
}
if ev.Data["Key"] == "Space" {
self.Draw()
self.SwapAndShow()
}
self.CurrentPage.KeyDown(ev)
}
func (self *MainScreen) DrawRun() {
self.MsgBox.SetText("Launching....")
self.MsgBox.Draw()
}
func (self *MainScreen) Draw() {
if self.CurrentPage != nil {
self.CurrentPage.Draw()
}
if self.TitleBar != nil {
self.TitleBar.Draw( self.CurrentPage.GetName())
}
if self.FootBar != nil {
self.FootBar.SetLabelTexts( self.CurrentPage.GetFootMsg())
self.FootBar.Draw()
}
}
|
package main
import (
"math/rand"
"time"
)
type Row [14]int
type Board struct {
Black Row
White Row
}
type Game struct {
WhiteStones int
BlackStones int
Board Board
Pot int
}
func New() *Game {
rand.Seed(time.Now().UnixNano())
return &Game{
WhiteStones: 7,
BlackStones: 7,
Pot: 0,
}
}
|
package linter
import (
"context"
"os"
"github.com/openllb/hlb/checker"
"github.com/openllb/hlb/codegen"
"github.com/openllb/hlb/diagnostic"
"github.com/openllb/hlb/errdefs"
"github.com/openllb/hlb/parser"
)
type Linter struct {
Recursive bool
errs []error
}
type LintOption func(*Linter)
func WithRecursive() LintOption {
return func(l *Linter) {
l.Recursive = true
}
}
func Lint(ctx context.Context, mod *parser.Module, opts ...LintOption) error {
l := Linter{}
for _, opt := range opts {
opt(&l)
}
l.Lint(ctx, mod)
if len(l.errs) > 0 {
return &diagnostic.Error{Diagnostics: l.errs}
}
return nil
}
func (l *Linter) Lint(ctx context.Context, mod *parser.Module) {
parser.Match(mod, parser.MatchOpts{},
func(id *parser.ImportDecl) {
if id.DeprecatedPath != nil {
l.errs = append(l.errs, errdefs.WithDeprecated(
mod, id.DeprecatedPath,
`import path without keyword "from" is deprecated`,
))
id.From = &parser.From{Text: "from"}
id.Expr = &parser.Expr{
BasicLit: &parser.BasicLit{
Str: id.DeprecatedPath,
},
}
}
if l.Recursive {
l.LintRecursive(ctx, mod, id.Expr)
}
},
func(t *parser.Type) {
if string(t.Kind) == "group" {
l.errs = append(l.errs, errdefs.WithDeprecated(
mod, t,
"type `group` is deprecated, use `pipeline` instead",
))
t.Kind = parser.Pipeline
}
},
func(call *parser.CallStmt) {
if call.Name != nil && call.Name.Ident.Text == "parallel" {
l.errs = append(l.errs, errdefs.WithDeprecated(
mod, call.Name,
"function `parallel` is deprecated, use `stage` instead",
))
call.Name.Ident.Text = "stage"
}
},
)
}
func (l *Linter) LintRecursive(ctx context.Context, mod *parser.Module, expr *parser.Expr) {
ctx = codegen.WithProgramCounter(ctx, mod)
cg, err := codegen.New(nil)
if err != nil {
return
}
ret := codegen.NewRegister()
err = cg.EmitExpr(ctx, mod.Scope, expr, nil, nil, nil, ret)
if err != nil {
return
}
if ret.Kind() != parser.String {
return
}
relPath, err := ret.String()
if err != nil {
return
}
filename, err := parser.ResolvePath(codegen.ModuleDir(ctx), relPath)
if err != nil {
return
}
f, err := os.Open(filename)
if err != nil {
return
}
defer f.Close()
imod, err := parser.Parse(ctx, f)
if err != nil {
return
}
err = checker.SemanticPass(imod)
if err != nil {
return
}
l.Lint(ctx, imod)
}
|
package main
import (
"log"
"net/http"
"github.com/Bobochka/thumbnail_service/lib/service"
)
func main() {
cfg, err := ReadConfig()
if err != nil {
log.Fatal(err)
}
svc := service.New(cfg)
app := &App{service: svc}
http.HandleFunc("/thumbnail", app.thumbnail)
log.Fatal(http.ListenAndServe(bindPort(), nil))
}
|
package lbfactory
import (
"errors"
"testing"
"github.com/bryanl/dolb/entity"
"github.com/bryanl/dolb/kvs"
"github.com/bryanl/dolb/pkg/app"
. "github.com/smartystreets/goconvey/convey"
)
func TestLoadBalancerFactoryBuild(t *testing.T) {
Convey("Given a LoadBalancerFactory", t, func() {
mockEntityManager := &entity.MockManager{}
mockKVS := &kvs.MockKVS{}
mockCluster := &app.MockCluster{}
idGen := func() string { return "12345" }
clusterFactoryFn := func(*entity.LoadBalancer, *app.BootstrapConfig, entity.Manager) app.Cluster {
return mockCluster
}
lbf := New(mockKVS, mockEntityManager, ClusterFactory(clusterFactoryFn), GenerateRandomID(idGen))
bootstrapConfig := &app.BootstrapConfig{
Name: "mylb",
Region: "dev0",
DigitalOceanToken: "token",
}
newLB := entity.LoadBalancer{
ID: "12345",
Name: bootstrapConfig.Name,
Region: bootstrapConfig.Region,
DigitaloceanAccessToken: bootstrapConfig.DigitalOceanToken,
State: "initialized",
}
Convey("When there are no cluster errors", func() {
mockEntityManager.On("Create", &newLB).Return(nil)
mockCluster.On("Bootstrap", &newLB, bootstrapConfig).Return(nil, nil)
var setOpts *kvs.SetOptions
node := &kvs.Node{}
mockKVS.On("Set", "/dolb/cluster/12345", "12345", setOpts).Return(node, nil)
lb, err := lbf.Build(bootstrapConfig)
Convey("It returns no error", func() {
So(err, ShouldBeNil)
})
Convey("It returns a load balancer", func() {
So(lb, ShouldNotBeNil)
So(lb.ID, ShouldEqual, "12345")
So(lb.Name, ShouldEqual, "mylb")
So(lb.Region, ShouldEqual, "dev0")
So(lb.DigitaloceanAccessToken, ShouldEqual, "token")
So(lb.State, ShouldEqual, "initialized")
})
})
Convey("With a missing DigitalOcean token", func() {
bootstrapConfig.DigitalOceanToken = ""
_, err := lbf.Build(bootstrapConfig)
Convey("It returns an error", func() {
So(err, ShouldNotBeNil)
})
})
Convey("Unable to save load balancer", func() {
mockEntityManager.On("Create", &newLB).Return(errors.New("failure")).Once()
invalidLB := newLB
invalidLB.State = "invalid"
mockEntityManager.On("Save", &invalidLB).Return(nil).Once()
_, err := lbf.Build(bootstrapConfig)
Convey("It returns an error", func() {
So(err, ShouldNotBeNil)
})
})
Reset(func() {
mockEntityManager = &entity.MockManager{}
mockKVS = &kvs.MockKVS{}
mockCluster = &app.MockCluster{}
})
})
}
|
package migrations
import (
"database/sql"
"os"
"path"
_ "github.com/mutecomm/go-sqlcipher"
)
type Minor002 struct{}
func (Minor002) Up(repoPath string, pinCode string, testnet bool) error {
var dbPath string
if testnet {
dbPath = path.Join(repoPath, "datastore", "testnet.db")
} else {
dbPath = path.Join(repoPath, "datastore", "mainnet.db")
}
db, err := sql.Open("sqlite3", dbPath)
if err != nil {
return err
}
if pinCode != "" {
if _, err := db.Exec("pragma key='" + pinCode + "';"); err != nil {
return err
}
}
// add notifications table and indexes
tx, err := db.Begin()
if err != nil {
return err
}
query := `
create table notifications (id text primary key not null, date integer not null, actorId text not null, targetId text not null, type integer not null, read integer not null, body text not null);
create index notification_targetId on notifications (targetId);
create index notification_actorId on notifications (actorId);
create index notification_read on notifications (read);
`
stmt, err := tx.Prepare(query)
if err != nil {
return err
}
defer stmt.Close()
_, err = stmt.Exec()
if err != nil {
_ = tx.Rollback()
return err
}
_ = tx.Commit()
// update version
f3, err := os.Create(path.Join(repoPath, "repover"))
if err != nil {
return err
}
defer f3.Close()
if _, err = f3.Write([]byte("3")); err != nil {
return err
}
return nil
}
func (Minor002) Down(repoPath string, pinCode string, testnet bool) error {
return nil
}
func (Minor002) Major() bool {
return false
}
|
// Package main defines a command line interface for the sqlboiler package
package main
import (
"fmt"
"os"
"path/filepath"
"strings"
"github.com/friendsofgo/errors"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"github.com/volatiletech/sqlboiler/v4/boilingcore"
"github.com/volatiletech/sqlboiler/v4/drivers"
"github.com/volatiletech/sqlboiler/v4/importers"
)
const sqlBoilerVersion = "4.15.0"
var (
flagConfigFile string
cmdState *boilingcore.State
cmdConfig *boilingcore.Config
)
func initConfig() {
if len(flagConfigFile) != 0 {
viper.SetConfigFile(flagConfigFile)
if err := viper.ReadInConfig(); err != nil {
fmt.Println("Can't read config:", err)
os.Exit(1)
}
return
}
var err error
viper.SetConfigName("sqlboiler")
configHome := os.Getenv("XDG_CONFIG_HOME")
homePath := os.Getenv("HOME")
wd, err := os.Getwd()
if err != nil {
wd = "."
}
configPaths := []string{wd}
if len(configHome) > 0 {
configPaths = append(configPaths, filepath.Join(configHome, "sqlboiler"))
} else {
configPaths = append(configPaths, filepath.Join(homePath, ".config/sqlboiler"))
}
for _, p := range configPaths {
viper.AddConfigPath(p)
}
// Ignore errors here, fallback to other validation methods.
// Users can use environment variables if a config is not found.
_ = viper.ReadInConfig()
}
func main() {
// Too much happens between here and cobra's argument handling, for
// something so simple just do it immediately.
for _, arg := range os.Args {
if arg == "--version" {
fmt.Println("SQLBoiler v" + sqlBoilerVersion)
return
}
}
// Set up the cobra root command
rootCmd := &cobra.Command{
Use: "sqlboiler [flags] <driver>",
Short: "SQL Boiler generates an ORM tailored to your database schema.",
Long: "SQL Boiler generates a Go ORM from template files, tailored to your database schema.\n" +
`Complete documentation is available at http://github.com/volatiletech/sqlboiler`,
Example: `sqlboiler psql`,
PreRunE: preRun,
RunE: run,
PostRunE: postRun,
SilenceErrors: true,
SilenceUsage: true,
}
cobra.OnInitialize(initConfig)
// Set up the cobra root command flags
rootCmd.PersistentFlags().StringVarP(&flagConfigFile, "config", "c", "", "Filename of config file to override default lookup")
rootCmd.PersistentFlags().StringP("output", "o", "models", "The name of the folder to output to")
rootCmd.PersistentFlags().StringP("pkgname", "p", "models", "The name you wish to assign to your generated package")
rootCmd.PersistentFlags().StringSliceP("templates", "", nil, "A templates directory, overrides the embedded template folders in sqlboiler")
rootCmd.PersistentFlags().StringSliceP("tag", "t", nil, "Struct tags to be included on your models in addition to json, yaml, toml")
rootCmd.PersistentFlags().StringSliceP("replace", "", nil, "Replace templates by directory: relpath/to_file.tpl:relpath/to_replacement.tpl")
rootCmd.PersistentFlags().BoolP("debug", "d", false, "Debug mode prints stack traces on error")
rootCmd.PersistentFlags().BoolP("no-context", "", false, "Disable context.Context usage in the generated code")
rootCmd.PersistentFlags().BoolP("no-tests", "", false, "Disable generated go test files")
rootCmd.PersistentFlags().BoolP("no-hooks", "", false, "Disable hooks feature for your models")
rootCmd.PersistentFlags().BoolP("no-rows-affected", "", false, "Disable rows affected in the generated API")
rootCmd.PersistentFlags().BoolP("no-auto-timestamps", "", false, "Disable automatic timestamps for created_at/updated_at")
rootCmd.PersistentFlags().BoolP("no-driver-templates", "", false, "Disable parsing of templates defined by the database driver")
rootCmd.PersistentFlags().BoolP("no-back-referencing", "", false, "Disable back referencing in the loaded relationship structs")
rootCmd.PersistentFlags().BoolP("always-wrap-errors", "", false, "Wrap all returned errors with stacktraces, also sql.ErrNoRows")
rootCmd.PersistentFlags().BoolP("add-global-variants", "", false, "Enable generation for global variants")
rootCmd.PersistentFlags().BoolP("add-panic-variants", "", false, "Enable generation for panic variants")
rootCmd.PersistentFlags().BoolP("add-soft-deletes", "", false, "Enable soft deletion by updating deleted_at timestamp")
rootCmd.PersistentFlags().BoolP("add-enum-types", "", false, "Enable generation of types for enums")
rootCmd.PersistentFlags().StringP("enum-null-prefix", "", "Null", "Name prefix of nullable enum types")
rootCmd.PersistentFlags().BoolP("version", "", false, "Print the version")
rootCmd.PersistentFlags().BoolP("wipe", "", false, "Delete the output folder (rm -rf) before generation to ensure sanity")
rootCmd.PersistentFlags().StringP("struct-tag-casing", "", "snake", "Decides the casing for go structure tag names. camel, title or snake (default snake)")
rootCmd.PersistentFlags().StringP("relation-tag", "r", "-", "Relationship struct tag name")
rootCmd.PersistentFlags().StringSliceP("tag-ignore", "", nil, "List of column names that should have tags values set to '-' (ignored during parsing)")
// hide flags not recommended for use
rootCmd.PersistentFlags().MarkHidden("replace")
viper.BindPFlags(rootCmd.PersistentFlags())
viper.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
viper.AutomaticEnv()
if err := rootCmd.Execute(); err != nil {
if e, ok := err.(commandFailure); ok {
fmt.Printf("Error: %v\n\n", string(e))
rootCmd.Help()
} else if !viper.GetBool("debug") {
fmt.Printf("Error: %v\n", err)
} else {
fmt.Printf("Error: %+v\n", err)
}
os.Exit(1)
}
}
type commandFailure string
func (c commandFailure) Error() string {
return string(c)
}
func preRun(cmd *cobra.Command, args []string) error {
var err error
if len(args) == 0 {
return commandFailure("must provide a driver name")
}
driverName, driverPath, err := drivers.RegisterBinaryFromCmdArg(args[0])
if err != nil {
return errors.Wrap(err, "could not register driver")
}
cmdConfig = &boilingcore.Config{
DriverName: driverName,
OutFolder: viper.GetString("output"),
PkgName: viper.GetString("pkgname"),
Debug: viper.GetBool("debug"),
AddGlobal: viper.GetBool("add-global-variants"),
AddPanic: viper.GetBool("add-panic-variants"),
AddSoftDeletes: viper.GetBool("add-soft-deletes"),
AddEnumTypes: viper.GetBool("add-enum-types"),
EnumNullPrefix: viper.GetString("enum-null-prefix"),
NoContext: viper.GetBool("no-context"),
NoTests: viper.GetBool("no-tests"),
NoHooks: viper.GetBool("no-hooks"),
NoRowsAffected: viper.GetBool("no-rows-affected"),
NoAutoTimestamps: viper.GetBool("no-auto-timestamps"),
NoDriverTemplates: viper.GetBool("no-driver-templates"),
NoBackReferencing: viper.GetBool("no-back-referencing"),
AlwaysWrapErrors: viper.GetBool("always-wrap-errors"),
Wipe: viper.GetBool("wipe"),
StructTagCasing: strings.ToLower(viper.GetString("struct-tag-casing")), // camel | snake | title
TagIgnore: viper.GetStringSlice("tag-ignore"),
RelationTag: viper.GetString("relation-tag"),
TemplateDirs: viper.GetStringSlice("templates"),
Tags: viper.GetStringSlice("tag"),
Replacements: viper.GetStringSlice("replace"),
Aliases: boilingcore.ConvertAliases(viper.Get("aliases")),
TypeReplaces: boilingcore.ConvertTypeReplace(viper.Get("types")),
AutoColumns: boilingcore.AutoColumns{
Created: viper.GetString("auto-columns.created"),
Updated: viper.GetString("auto-columns.updated"),
Deleted: viper.GetString("auto-columns.deleted"),
},
Inflections: boilingcore.Inflections{
Plural: viper.GetStringMapString("inflections.plural"),
PluralExact: viper.GetStringMapString("inflections.plural_exact"),
Singular: viper.GetStringMapString("inflections.singular"),
SingularExact: viper.GetStringMapString("inflections.singular_exact"),
Irregular: viper.GetStringMapString("inflections.irregular"),
},
ForeignKeys: boilingcore.ConvertForeignKeys(viper.Get("foreign_keys")),
Version: sqlBoilerVersion,
}
if cmdConfig.Debug {
fmt.Fprintln(os.Stderr, "using driver:", driverPath)
}
// Configure the driver
cmdConfig.DriverConfig = map[string]interface{}{
"whitelist": viper.GetStringSlice(driverName + ".whitelist"),
"blacklist": viper.GetStringSlice(driverName + ".blacklist"),
"add-enum-types": cmdConfig.AddEnumTypes,
"enum-null-prefix": cmdConfig.EnumNullPrefix,
"foreign-keys": cmdConfig.ForeignKeys,
}
keys := allKeys(driverName)
for _, key := range keys {
if key != "blacklist" && key != "whitelist" {
prefixedKey := fmt.Sprintf("%s.%s", driverName, key)
cmdConfig.DriverConfig[key] = viper.Get(prefixedKey)
}
}
cmdConfig.Imports = configureImports()
cmdState, err = boilingcore.New(cmdConfig)
return err
}
func configureImports() importers.Collection {
imports := importers.NewDefaultImports()
mustMap := func(m importers.Map, err error) importers.Map {
if err != nil {
panic("failed to change viper interface into importers.Map: " + err.Error())
}
return m
}
if viper.IsSet("imports.all.standard") {
imports.All.Standard = viper.GetStringSlice("imports.all.standard")
}
if viper.IsSet("imports.all.third_party") {
imports.All.ThirdParty = viper.GetStringSlice("imports.all.third_party")
}
if viper.IsSet("imports.test.standard") {
imports.Test.Standard = viper.GetStringSlice("imports.test.standard")
}
if viper.IsSet("imports.test.third_party") {
imports.Test.ThirdParty = viper.GetStringSlice("imports.test.third_party")
}
if viper.IsSet("imports.singleton") {
imports.Singleton = mustMap(importers.MapFromInterface(viper.Get("imports.singleton")))
}
if viper.IsSet("imports.test_singleton") {
imports.TestSingleton = mustMap(importers.MapFromInterface(viper.Get("imports.test_singleton")))
}
if viper.IsSet("imports.based_on_type") {
imports.BasedOnType = mustMap(importers.MapFromInterface(viper.Get("imports.based_on_type")))
}
return imports
}
func run(cmd *cobra.Command, args []string) error {
return cmdState.Run()
}
func postRun(cmd *cobra.Command, args []string) error {
return cmdState.Cleanup()
}
func allKeys(prefix string) []string {
keys := make(map[string]bool)
prefix += "."
for _, e := range os.Environ() {
splits := strings.SplitN(e, "=", 2)
key := strings.ReplaceAll(strings.ToLower(splits[0]), "_", ".")
if strings.HasPrefix(key, prefix) {
keys[strings.ReplaceAll(key, prefix, "")] = true
}
}
for _, key := range viper.AllKeys() {
if strings.HasPrefix(key, prefix) {
keys[strings.ReplaceAll(key, prefix, "")] = true
}
}
keySlice := make([]string, 0, len(keys))
for k := range keys {
keySlice = append(keySlice, k)
}
return keySlice
}
|
// DRUNKWATER TEMPLATE(add description and prototypes)
// Question Title and Description on leetcode.com
// Function Declaration and Function Prototypes on leetcode.com
//327. Count of Range Sum
//Given an integer array nums, return the number of range sums that lie in [lower, upper] inclusive.
//Range sum S(i, j) is defined as the sum of the elements in nums between indices i and j (i ≤ j), inclusive.
//Note:
//A naive algorithm of O(n2) is trivial. You MUST do better than that.
//Example:
//Given nums = [-2, 5, -1], lower = -2, upper = 2,
//Return 3.
//The three ranges are : [0, 0], [2, 2], [0, 2] and their respective sums are: -2, -1, 2.
//Credits:
//Special thanks to @dietpepsi for adding this problem and creating all test cases.
//func countRangeSum(nums []int, lower int, upper int) int {
//}
// Time Is Money |
package main
import (
log "github.com/mailgun/vulcand/Godeps/_workspace/src/github.com/mailgun/gotools-log"
"os"
)
var vulcanUrl string
func main() {
log.Init([]*log.LogConfig{&log.LogConfig{Name: "console"}})
cmd := NewCommand()
err := cmd.Run(os.Args)
if err != nil {
log.Errorf("Error: %s\n", err)
}
}
|
package gogo
import (
"io/ioutil"
"net/http"
"net/http/httptest"
"net/http/httputil"
"strconv"
"strings"
"testing"
"github.com/golib/assert"
)
func Test_NewAppRoute(t *testing.T) {
prefix := "/prefix"
server := newMockServer()
assertion := assert.New(t)
route := NewAppRoute(prefix, server)
assertion.Empty(route.handlers)
assertion.Equal(prefix, route.prefix)
assertion.Equal(server, route.server)
}
func Test_RouteHandle(t *testing.T) {
server := newMockServer()
route := NewAppRoute("/", server)
assertion := assert.New(t)
testCases := map[string]struct {
path string
handler Middleware
}{
"PUT": {
"/put",
func(ctx *Context) {
ctx.Text("PUT")
},
},
"POST": {
"/post",
func(ctx *Context) {
ctx.Text("POST")
},
},
"GET": {
"/get",
func(ctx *Context) {
ctx.Text("GET")
},
},
"PATCH": {
"/patch",
func(ctx *Context) {
ctx.Text("PATCH")
},
},
"DELETE": {
"/delete",
func(ctx *Context) {
ctx.Text("DELETE")
},
},
"HEAD": {
"/head",
func(ctx *Context) {
ctx.Text("HEAD")
},
},
"OPTIONS": {
"/options",
func(ctx *Context) {
ctx.Text("OPTIONS")
},
},
}
// register handlers
for method, testCase := range testCases {
route.Handle(method, testCase.path, testCase.handler)
}
// start server
ts := httptest.NewServer(server)
defer ts.Close()
// testing by http request
for method, testCase := range testCases {
request, _ := http.NewRequest(method, ts.URL+testCase.path, nil)
res, err := http.DefaultClient.Do(request)
assertion.Nil(err)
body, err := ioutil.ReadAll(res.Body)
res.Body.Close()
switch method {
case "HEAD":
assertion.Empty(body)
default:
assertion.Equal(method, string(body))
}
}
}
func Test_RouteHandleWithTailSlash(t *testing.T) {
server := newMockServer()
route := NewAppRoute("/", server)
assertion := assert.New(t)
route.Handle("GET", "/:tailslash", func(ctx *Context) {
ctx.Text("GET /:tailslash")
})
route.Handle("GET", "/:tailslash/*extraargs", func(ctx *Context) {
ctx.Text("GET /:tailslash/*extraargs")
})
// start server
ts := httptest.NewServer(server)
defer ts.Close()
// without tail slash
request, _ := http.NewRequest("GET", ts.URL+"/tailslash", nil)
response, err := http.DefaultClient.Do(request)
assertion.Nil(err)
body, err := ioutil.ReadAll(response.Body)
response.Body.Close()
assertion.Equal("GET /:tailslash", string(body))
// with tail slash
request, _ = http.NewRequest("GET", ts.URL+"/tailslash/?query", nil)
response, err = http.DefaultClient.Do(request)
assertion.Nil(err)
body, err = ioutil.ReadAll(response.Body)
response.Body.Close()
assertion.Equal("GET /:tailslash", string(body))
// with extra args without tail slash
request, _ = http.NewRequest("GET", ts.URL+"/tailslash/extraargs", nil)
response, err = http.DefaultClient.Do(request)
assertion.Nil(err)
body, err = ioutil.ReadAll(response.Body)
response.Body.Close()
assertion.Equal("GET /:tailslash/*extraargs", string(body))
// with extra args with tail slash
request, _ = http.NewRequest("GET", ts.URL+"/tailslash/extraargs/", nil)
response, err = http.DefaultClient.Do(request)
assertion.Nil(err)
body, err = ioutil.ReadAll(response.Body)
response.Body.Close()
assertion.Equal("GET /:tailslash/*extraargs", string(body))
}
func Test_RouteProxyHandle(t *testing.T) {
server := newMockServer()
route := NewAppRoute("/", server)
assertion := assert.New(t)
// proxied handler
route.Use(func(ctx *Context) {
ctx.Logger.Warn("v ...interface{}")
})
route.Handle("GET", "/proxy", func(ctx *Context) {
ctx.Text("Proxied!")
})
proxy := &httputil.ReverseProxy{
Director: func(r *http.Request) {
r.URL.Scheme = "http"
r.URL.Host = r.Host
r.URL.Path = "/proxy"
r.URL.RawPath = "/proxy"
},
}
route.ProxyHandle("GET", "/mock", proxy, func(w Responser, b []byte) []byte {
s := strings.ToUpper(string(b))
w.Header().Set("Content-Length", strconv.Itoa(len(s)*2))
return []byte(s + s)
})
// start server
ts := httptest.NewServer(server)
defer ts.Close()
// testing by http request
request, _ := http.NewRequest("GET", ts.URL+"/mock", nil)
res, err := http.DefaultClient.Do(request)
assertion.Nil(err)
body, err := ioutil.ReadAll(res.Body)
res.Body.Close()
assertion.Nil(err)
assertion.Equal("PROXIED!PROXIED!", string(body))
}
func Test_RouteMockHandle(t *testing.T) {
server := newMockServer()
route := NewAppRoute("/", server)
response := httptest.NewRecorder()
assertion := assert.New(t)
// mock handler
route.MockHandle("GET", "/mock", response, func(ctx *Context) {
ctx.Text("MOCK")
})
// start server
ts := httptest.NewServer(server)
defer ts.Close()
// testing by http request
request, _ := http.NewRequest("GET", ts.URL+"/mock", nil)
res, err := http.DefaultClient.Do(request)
assertion.Nil(err)
body, err := ioutil.ReadAll(res.Body)
res.Body.Close()
assertion.Nil(err)
assertion.Empty(body)
assertion.Equal(http.StatusOK, response.Code)
assertion.Equal("MOCK", response.Body.String())
}
func Test_RouteGroup(t *testing.T) {
server := newMockServer()
route := NewAppRoute("/", server).Group("/group")
assertion := assert.New(t)
// register handler
// GET /group/:method
route.Handle("GET", "/:method", func(ctx *Context) {
ctx.Text(ctx.Params.Get("method"))
})
// start server
ts := httptest.NewServer(server)
defer ts.Close()
res, err := http.Get(ts.URL + "/group/testing")
assertion.Nil(err)
body, err := ioutil.ReadAll(res.Body)
res.Body.Close()
assertion.Equal("testing", string(body))
}
type TestGroupController struct{}
func (t *TestGroupController) Index(ctx *Context) {
ctx.Text("GET /group")
}
func (t *TestGroupController) Show(ctx *Context) {
ctx.Text("GET /group/" + ctx.Params.Get("group"))
}
type TestUserController struct{}
func (t *TestUserController) ID() string {
return "id"
}
func (t *TestUserController) Show(ctx *Context) {
ctx.Text("GET /group/" + ctx.Params.Get("group") + "/user/" + ctx.Params.Get("id"))
}
func Test_ResourceController(t *testing.T) {
server := newMockServer()
route := NewAppRoute("/", server)
assertion := assert.New(t)
// start server
ts := httptest.NewServer(server)
defer ts.Close()
// group resource
group := route.Resource("group", &TestGroupController{})
// should work for GET /group/:group
res, err := http.Get(ts.URL + "/group/my-group")
assertion.Nil(err)
body, err := ioutil.ReadAll(res.Body)
res.Body.Close()
assertion.Equal("GET /group/my-group", string(body))
// user resource
group.Resource("user", &TestUserController{})
// should work for GET /group/:group/user/:id
res, err = http.Get(ts.URL + "/group/my-group/user/my-user")
assertion.Nil(err)
body, err = ioutil.ReadAll(res.Body)
res.Body.Close()
assertion.Equal("GET /group/my-group/user/my-user", string(body))
// error for not found
res, err = http.Get(ts.URL + "/group/my-group/user/")
assertion.Nil(err)
body, err = ioutil.ReadAll(res.Body)
res.Body.Close()
assertion.Equal(http.StatusNotFound, res.StatusCode)
assertion.Contains(string(body), "404 page not found")
}
type TestGroupMemberController struct{}
func (t *TestGroupMemberController) Index(ctx *Context) {
ctx.Text("GET /group/member")
}
func (t *TestGroupMemberController) Show(ctx *Context) {
ctx.Text("GET /group/member/" + ctx.Params.Get("member"))
}
func Test_ResourceControllerWithSubPath(t *testing.T) {
server := newMockServer()
route := NewAppRoute("/", server)
assertion := assert.New(t)
// start server
ts := httptest.NewServer(server)
defer ts.Close()
// member resource
route.Resource("group/member", &TestGroupMemberController{})
// should work for GET /group/member/:group
res, err := http.Get(ts.URL + "/group/member/my-group")
assertion.Nil(err)
body, err := ioutil.ReadAll(res.Body)
res.Body.Close()
assertion.Equal("GET /group/member/my-group", string(body))
// error for not found
res, err = http.Get(ts.URL + "/group/member/my-group/user")
assertion.Nil(err)
body, err = ioutil.ReadAll(res.Body)
res.Body.Close()
assertion.Equal(http.StatusNotFound, res.StatusCode)
assertion.Contains(string(body), "404 page not found")
}
|
/*
Copyright 2021 Digitalis.IO.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package controllers
import (
"context"
"fmt"
"time"
"github.com/go-logr/logr"
"k8s.io/apimachinery/pkg/runtime"
ctrl "sigs.k8s.io/controller-runtime"
"sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/controller-runtime/pkg/event"
"sigs.k8s.io/controller-runtime/pkg/predicate"
ldapv1 "ldap-accounts-controller/api/v1"
ld "ldap-accounts-controller/ldap"
)
var (
ldapUserOwnerKey = ".metadata.controller"
)
// LdapUserReconciler reconciles a LdapUser object
type LdapUserReconciler struct {
client.Client
Log logr.Logger
Scheme *runtime.Scheme
}
// +kubebuilder:rbac:groups=ldap.digitalis.io,resources=ldapusers,verbs=get;list;watch;create;update;patch;delete
// +kubebuilder:rbac:groups=ldap.digitalis.io,resources=ldapusers/status,verbs=get;update;patch
func (r *LdapUserReconciler) Reconcile(req ctrl.Request) (ctrl.Result, error) {
ctx := context.Background()
log := r.Log.WithValues("ldapuser", req.NamespacedName)
var ldapuser ldapv1.LdapUser
if err := r.Get(ctx, req.NamespacedName, &ldapuser); err != nil {
//log.Error(err, "unable to fetch ldap user")
return ctrl.Result{}, client.IgnoreNotFound(err)
}
//! [finalizer]
ldapuserFinalizerName := "ldap.digitalis.io/finalizer"
if ldapuser.ObjectMeta.DeletionTimestamp.IsZero() {
if !containsString(ldapuser.GetFinalizers(), ldapuserFinalizerName) {
ldapuser.SetFinalizers(append(ldapuser.GetFinalizers(), ldapuserFinalizerName))
if err := r.Update(context.Background(), &ldapuser); err != nil {
return ctrl.Result{}, err
}
}
} else {
// The object is being deleted
if containsString(ldapuser.GetFinalizers(), ldapuserFinalizerName) {
// our finalizer is present, so lets handle any external dependency
if err := ld.DeleteUser(ldapuser.Spec); err != nil {
log.Error(err, "Error deleting from LDAP")
return ctrl.Result{}, err
}
// remove our finalizer from the list and update it.
ldapuser.SetFinalizers(removeString(ldapuser.GetFinalizers(), ldapuserFinalizerName))
if err := r.Update(context.Background(), &ldapuser); err != nil {
return ctrl.Result{}, err
}
}
// Stop reconciliation as the item is being deleted
return ctrl.Result{}, nil
}
//! [finalizer]
log.Info("Adding or updating LDAP user")
err := ld.AddUser(ldapuser.Spec)
if err != nil {
log.Error(err, "cannot add user to ldap")
}
ldapuser.Status.CreatedOn = time.Now().Format("2006-01-02 15:04:05")
var ldapUsers ldapv1.LdapUserList
if err := r.List(ctx, &ldapUsers, client.InNamespace(req.Namespace), client.MatchingFields{ldapUserOwnerKey: req.Name}); err != nil {
log.Error(err, "unable to list ldap accounts")
return ctrl.Result{}, err
}
for _, acc := range ldapUsers.Items {
msg := fmt.Sprintf("Checking user %s", acc.Spec.Username)
log.Info(msg)
acc.Status.CreatedOn = time.Now().Format("2006-01-02 15:04:05")
}
return ctrl.Result{}, nil
}
// Helper functions to check and remove string from a slice of strings.
func containsString(slice []string, s string) bool {
for _, item := range slice {
if item == s {
return true
}
}
return false
}
func removeString(slice []string, s string) (result []string) {
for _, item := range slice {
if item == s {
continue
}
result = append(result, item)
}
return
}
func (r *LdapUserReconciler) SetupWithManager(mgr ctrl.Manager) error {
if err := mgr.GetFieldIndexer().IndexField(&ldapv1.LdapUser{}, ldapUserOwnerKey, func(rawObj runtime.Object) []string {
acc := rawObj.(*ldapv1.LdapUser)
return []string{acc.Name}
}); err != nil {
return err
}
//! [pred]
pred := predicate.Funcs{
CreateFunc: func(event.CreateEvent) bool { return true },
DeleteFunc: func(event.DeleteEvent) bool { return false },
GenericFunc: func(event.GenericEvent) bool { return true },
UpdateFunc: func(e event.UpdateEvent) bool {
oldGeneration := e.MetaOld.GetGeneration()
newGeneration := e.MetaNew.GetGeneration()
// Generation is only updated on spec changes (also on deletion),
// not metadata or status
// Filter out events where the generation hasn't changed to
// avoid being triggered by status updates
return oldGeneration != newGeneration
},
}
//! [pred]
return ctrl.NewControllerManagedBy(mgr).
For(&ldapv1.LdapUser{}).
WithEventFilter(pred).
Complete(r)
}
|
// Copyright 2015 Walter Schulze
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package json_test
import (
"encoding/json"
"github.com/katydid/katydid/parser/debug"
sjson "github.com/katydid/katydid/parser/json"
"testing"
)
func TestDebug(t *testing.T) {
p := sjson.NewJsonParser()
data, err := json.Marshal(debug.Input)
if err != nil {
t.Fatal(err)
}
if err := p.Init(data); err != nil {
t.Fatal(err)
}
m := debug.Walk(p)
if !m.Equal(debug.Output) {
t.Fatalf("expected %s but got %s", debug.Output, m)
}
}
func TestRandomDebug(t *testing.T) {
p := sjson.NewJsonParser()
data, err := json.Marshal(debug.Input)
if err != nil {
t.Fatal(err)
}
for i := 0; i < 10; i++ {
if err := p.Init(data); err != nil {
t.Fatal(err)
}
//l := debug.NewLogger(p, debug.NewLineLogger())
debug.RandomWalk(p, debug.NewRand(), 10, 3)
//t.Logf("original %v vs random %v", debug.Output, m)
}
}
func TestEscapedChar(t *testing.T) {
j := map[string][]interface{}{
`a\"`: {1},
}
data, err := json.Marshal(j)
if err != nil {
t.Fatal(err)
}
t.Logf("%s", string(data))
parser := sjson.NewJsonParser()
if err := parser.Init(data); err != nil {
t.Fatal(err)
}
m := debug.Walk(parser)
name := m[0].Label
if name != `a\"` {
t.Fatalf("wrong escaped name %s", name)
}
}
func TestMultiLineArray(t *testing.T) {
s := `{
"A":[1]
}`
parser := sjson.NewJsonParser()
if err := parser.Init([]byte(s)); err != nil {
t.Fatal(err)
}
jout := debug.Walk(parser)
t.Logf("%v", jout)
}
func TestIntWithExponent(t *testing.T) {
s := `{"A":1e+08}`
parser := sjson.NewJsonParser()
if err := parser.Init([]byte(s)); err != nil {
t.Fatal(err)
}
if err := parser.Next(); err != nil {
t.Fatal(err)
}
parser.Down()
if err := parser.Next(); err != nil {
t.Fatal(err)
}
if !parser.IsLeaf() {
t.Fatal("incorrect walk, please adjust the path above")
}
if i, err := parser.Int(); err != nil {
t.Fatalf("did not expect error %v", err)
} else if i != 1e+08 {
t.Fatalf("got %d", i)
}
}
func testValue(t *testing.T, input, output string) {
parser := sjson.NewJsonParser()
if err := parser.Init([]byte(input)); err != nil {
t.Fatalf("init error: %v", err)
}
jout := debug.Walk(parser)
if len(jout) != 1 {
t.Fatalf("expected one node")
}
if len(jout[0].Children) != 0 {
t.Fatalf("did not expected any children")
}
if jout[0].Label != output {
t.Fatalf("expected %s got %s", output, jout[0].Label)
}
}
func TestValues(t *testing.T) {
testValue(t, "0", "0")
testValue(t, "1", "1")
testValue(t, "-1", "-1")
testValue(t, "123", "123")
testValue(t, "1.1", "1.1")
testValue(t, "1.123", "1.123")
testValue(t, "1.1e1", "11")
testValue(t, "1.1e-1", "0.11")
testValue(t, "1.1e10", "11000000000")
testValue(t, "1.1e+10", "11000000000")
testValue(t, `"a"`, "a")
testValue(t, `"abc"`, "abc")
testValue(t, `""`, "")
testValue(t, `"\b"`, "\b")
testValue(t, `true`, "true")
testValue(t, `false`, "false")
testValue(t, `null`, "<nil>")
}
func testArray(t *testing.T, s string) {
parser := sjson.NewJsonParser()
if err := parser.Init([]byte(s)); err != nil {
t.Fatal(err)
}
jout := debug.Walk(parser)
t.Logf("%v", jout)
}
func TestArray(t *testing.T) {
testArray(t, `[1]`)
testArray(t, `[1,2.3e5]`)
testArray(t, `[1,"a"]`)
testArray(t, `[true, false, null]`)
testArray(t, `[{"a": true, "b": [1,2]}]`)
}
|
package regexpx_test
import (
"regexp"
"testing"
rx "github.com/yargevad/regexpx"
)
var testMatch = rx.RegexpSet{
regexp.MustCompile(`^abc+$`),
regexp.MustCompile(`^abc+d$`),
}
type MatchTest struct {
Input string
Match bool
Index int
}
func TestMatch(t *testing.T) {
for _, test := range []MatchTest{
{"", false, -1},
{"a", false, -1},
{"abc", true, 0},
{"abcc", true, 0},
{"abcd", true, 1},
{"abcde", false, -1},
} {
actual, idx := testMatch.Match(test.Input)
if actual != test.Match {
t.Fatalf("string %q expected %t actual %t", test.Input, test.Match, actual)
} else if idx != test.Index {
t.Fatalf("string %q expected index %d, actual %d", test.Input, test.Index, idx)
}
}
}
type SplitTest struct {
Input string
Index int
Output []string
}
var testSplit = rx.RegexpSet{
regexp.MustCompile(`[aeiou]`),
}
func TestSplit(t *testing.T) {
for _, test := range []SplitTest{
{"paqerisotu", 0, []string{"p", "q", "r", "s", "t", ""}},
{"fffffff", -1, nil},
} {
out, idx := testSplit.Split(test.Input, -1)
if idx != test.Index {
t.Fatalf("string %q expected index %d, actual %d", test.Input, test.Index, idx)
} else if len(out) != len(test.Output) {
t.Fatalf("string %q expected length %d, actual %d", test.Input, len(test.Output), len(out))
} else {
for i, elt := range out {
if test.Output[i] != elt {
t.Fatalf("string %q expected %q, actual %q", test.Input, test.Output, out)
}
}
}
}
}
type ReplaceTest struct {
Input string
Replace string
Index int
Output string
}
var testReplace = rx.RegexpSet{
regexp.MustCompile(`foo`),
regexp.MustCompile(`y`),
regexp.MustCompile(`(baz)`),
}
func TestReplace(t *testing.T) {
for _, test := range []ReplaceTest{
{"afoobfooc", "", 0, "abc"},
{"xbarybarz", "", 1, "xbarbarz"},
{"abazbbazc", "($1)", 2, "a(baz)b(baz)c"},
{"abarbbarc", "", -1, "abarbbarc"},
} {
out, idx := testReplace.Replace(test.Input, test.Replace)
if idx != test.Index {
t.Fatalf("string %q expected index %d, actual %d", test.Input, test.Index, idx)
} else if test.Output != out {
t.Fatalf("string %q expected %q, actual %q", test.Input, test.Output, out)
}
}
}
type ReplaceSubmatchTest struct {
Input string
Replace string
Index int
Output string
Matches []string
}
var testReplaceSubmatch = rx.RegexpSet{
regexp.MustCompile(`(cd)`),
}
func TestReplaceSubmatch(t *testing.T) {
for _, test := range []ReplaceSubmatchTest{
{"abcdef", "", 0, "abef", []string{"cd", "cd"}},
} {
out, idx, matches := testReplaceSubmatch.ReplaceSubmatch(test.Input, test.Replace)
if idx != test.Index {
t.Fatalf("string %q expected index %d, actual %d", test.Input, test.Index, idx)
} else if test.Output != out {
t.Fatalf("string %q expected %q, actual %q", test.Input, test.Output, out)
} else if len(test.Matches) != len(matches) {
t.Fatalf("string %q expected length %d, actual %d", test.Input, test.Matches, matches)
} else {
for i, m := range matches {
if test.Matches[i] != m {
t.Fatalf("string %q expected matches %q, actual %q", test.Input, test.Matches, matches)
}
}
}
}
}
var testSuffixes = rx.RegexpSet{
regexp.MustCompile(`(?i)\s*\b(jr\.?)(?:(,)|$)`),
regexp.MustCompile(`(?i)\s*\b(sr\.?)(?:(,)|$)`),
regexp.MustCompile(`(?i)\s*\b(iii?)(?:(,)|$)`),
regexp.MustCompile(`(?i)\s*\b(iv)(?:(,)|$)`),
}
func TestSuffixes(t *testing.T) {
for _, test := range []ReplaceSubmatchTest{
{"DOE JR, JOHN J", "$2", 0, "DOE, JOHN J", []string{" JR,", "JR", ","}},
{"Doe, Jane G Jr", "$2", 0, "Doe, Jane G", []string{" Jr", "Jr", ""}},
} {
out, idx, matches := testSuffixes.ReplaceSubmatch(test.Input, test.Replace)
if idx != test.Index {
t.Fatalf("string %q expected index %d, actual %d", test.Input, test.Index, idx)
} else if test.Output != out {
t.Fatalf("string %q expected %q, actual %q", test.Input, test.Output, out)
} else if len(test.Matches) != len(matches) {
t.Fatalf("string %q expected length %d, actual %d", test.Input, len(test.Matches), len(matches))
} else {
for i, m := range matches {
if test.Matches[i] != m {
t.Fatalf("string %q expected matches %q, actual %q", test.Input, test.Matches, matches)
}
}
}
}
}
|
package releasetarsrepo
import (
"fmt"
"strings"
bhs3 "github.com/bosh-io/web/s3"
)
type ReleaseTarballRec struct {
urlFactory bhs3.URLFactory
source string
versionRaw string
BlobID string
SHA1 string
}
func (r ReleaseTarballRec) ActualDownloadURL() (string, error) {
path := "/" + r.BlobID
fileName := fmt.Sprintf("%s-%s.tgz", r.sourceShortName(), r.versionRaw)
return r.urlFactory.New(path, fileName).String()
}
func (r ReleaseTarballRec) sourceShortName() string {
parts := strings.Split(r.source, "/")
return parts[len(parts)-1]
}
|
package leetcode
import (
"reflect"
"testing"
)
func TestFourSum(t *testing.T) {
tests := []struct {
nums []int
target int
solutions [][]int
}{
{
nums: []int{1, 0, -1, 0, -2, 2},
target: 0,
solutions: [][]int{
{-2, -1, 1, 2},
{-2, 0, 0, 2},
{-1, 0, 0, 1},
},
},
{
nums: []int{0, 0, 0, 0},
target: 0,
solutions: [][]int{
{0, 0, 0, 0},
},
},
{
nums: []int{1, 0, 0, 0},
target: 1,
solutions: [][]int{
{0, 0, 0, 1},
},
},
{
nums: []int{1, -2, -5, -4, -3, 3, 3, 5},
target: -11,
solutions: [][]int{
{-5, -4, -3, 1},
},
},
}
for i, tt := range tests {
if got, want := fourSum(tt.nums, tt.target), tt.solutions; !reflect.DeepEqual(got, want) {
t.Errorf("%d: fourSum: got %v, want %v", i, got, want)
}
}
}
|
package oku
import (
"fmt"
"io"
"github.com/qiniu/iconv"
"github.com/saintfish/chardet"
)
// validEncodings is the intersection of types supported by chardet and iconv (ISO-8859-8-I is the only format not recognised by iconv)
var validEncodings = []string{
"Big5",
"EUC-JP", "EUC-KR",
"ISO-2022-JP", "ISO-2022-KR", "ISO-2022-CN",
"ISO-8859-1", "ISO-8859-2", "ISO-8859-5", "ISO-8859-6", "ISO-8859-7", "ISO-8859-8", "ISO-8859-9",
"GB18030",
"windows-1250", "windows-1251", "windows-1252", "windows-1253", "windows-1254", "windows-1255", "windows-1256",
"KOI8-R",
"Shift_JIS",
"UTF-8", "UTF-16BE", "UTF-16LE", "UTF-32BE", "UTF-32LE",
}
type UTF8ReadCloser struct {
r *iconv.Reader
c iconv.Iconv
}
func (u *UTF8ReadCloser) Close() error {
return u.c.Close()
}
func (u *UTF8ReadCloser) Read(b []byte) (int, error) {
return u.r.Read(b)
}
func NewUTF8ReadCloser(r io.ReadSeeker, encoding string) (io.ReadCloser, error) {
// rewind Seeker after encoding detection
defer r.Seek(0, 0)
// validate encoding for reading
var found bool
for _, v := range validEncodings {
if v == encoding {
found = true
break
}
}
if !found {
return nil, fmt.Errorf(`detected file encoding:"%s", but there is no valid reader`, encoding)
}
cd, err := iconv.Open("utf-8", encoding)
if err != nil {
return nil, err
}
return &UTF8ReadCloser{r: iconv.NewReader(cd, r, 0), c: cd}, nil
}
type Detected struct {
Charset string
Confidence int
}
func DetectEncoding(b []byte) (Detected, error) {
d := chardet.NewTextDetector()
res, err := d.DetectBest(b)
if err != nil {
return Detected{}, err
}
if res.Charset == "GB-18030" {
// set canonical name for this encoding type (this is a chardet bug)
res.Charset = "GB18030"
}
return Detected{Charset: res.Charset, Confidence: res.Confidence}, nil
}
|
/*
This module consist of cache implementation of cache
and has global variable that will be require to access
the access the cache of Routing server.
*/
package servercac
import "github.com/sirupsen/logrus"
// This function permit us to initialize various cache related
// variable at the start of the go subroutine.
func InitServerCache() {
logrus.Debug("Initializing various variable related cache.")
logrus.Debug("Completed Initialization of cache.")
} |
package main
import (
"fmt"
"github.com/pkg/profile"
)
func main() {
defer profile.Start(profile.MemProfile, profile.ProfilePath(".")).Stop()
fmt.Println("start")
new()
add()
remove()
fmt.Println("end")
}
func exec() {
new()
add()
remove()
}
var m map[int64]struct{}
func new() {
m = make(map[int64]struct{})
}
func add() {
for i := 0; i < 1000; i++ {
m[int64(i)] = struct{}{}
}
}
func remove() {
for i := 0; i < 1000; i++ {
delete(m, int64(i))
}
}
|
package model
type Payload struct {
Payload string `json:"payload"`
}
|
package equinix
import (
"context"
"fmt"
"github.com/equinix/ne-go"
"github.com/hashicorp/go-cty/cty"
"github.com/hashicorp/terraform-plugin-sdk/v2/diag"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation"
)
var networkSSHUserSchemaNames = map[string]string{
"UUID": "uuid",
"Username": "username",
"Password": "password",
"DeviceUUIDs": "device_ids",
}
var networkSSHUserDescriptions = map[string]string{
"UUID": "SSH user unique identifier",
"Username": "SSH user login name",
"Password": "SSH user password",
"DeviceUUIDs": "list of device identifiers to which user will have access",
}
func resourceNetworkSSHUser() *schema.Resource {
return &schema.Resource{
CreateContext: resourceNetworkSSHUserCreate,
ReadContext: resourceNetworkSSHUserRead,
UpdateContext: resourceNetworkSSHUserUpdate,
DeleteContext: resourceNetworkSSHUserDelete,
Schema: createNetworkSSHUserResourceSchema(),
Description: "Resource allows creation and management of Equinix Network Edge SSH users",
}
}
func createNetworkSSHUserResourceSchema() map[string]*schema.Schema {
return map[string]*schema.Schema{
networkSSHUserSchemaNames["UUID"]: {
Type: schema.TypeString,
Computed: true,
Description: networkSSHUserDescriptions["UUID"],
},
networkSSHUserSchemaNames["Username"]: {
Type: schema.TypeString,
Required: true,
ForceNew: true,
ValidateFunc: validation.StringLenBetween(3, 32),
Description: networkSSHUserDescriptions["Username"],
},
networkSSHUserSchemaNames["Password"]: {
Type: schema.TypeString,
Sensitive: true,
Required: true,
ValidateFunc: validation.StringLenBetween(8, 20),
Description: networkSSHUserDescriptions["Password"],
},
networkSSHUserSchemaNames["DeviceUUIDs"]: {
Type: schema.TypeSet,
Required: true,
MinItems: 1,
Elem: &schema.Schema{
Type: schema.TypeString,
ValidateFunc: validation.StringIsNotEmpty,
},
Description: networkSSHUserDescriptions["DeviceUUIDs"],
},
}
}
func resourceNetworkSSHUserCreate(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics {
conf := m.(*Config)
var diags diag.Diagnostics
user := createNetworkSSHUser(d)
if len(user.DeviceUUIDs) < 0 {
return diag.Errorf("create ssh-user failed: user needs to have at least one device defined")
}
uuid, err := conf.ne.CreateSSHUser(ne.StringValue(user.Username), ne.StringValue(user.Password), user.DeviceUUIDs[0])
if err != nil {
return diag.FromErr(err)
}
d.SetId(ne.StringValue(uuid))
userUpdateReq := conf.ne.NewSSHUserUpdateRequest(ne.StringValue(uuid))
userUpdateReq.WithDeviceChange([]string{}, user.DeviceUUIDs[1:len(user.DeviceUUIDs)])
if err := userUpdateReq.Execute(); err != nil {
diags = append(diags, diag.Diagnostic{
Severity: diag.Warning,
Summary: "Failed to assign all devices to newly created user",
Detail: err.Error(),
AttributePath: cty.GetAttrPath(networkSSHUserSchemaNames["DeviceUUIDs"]),
})
}
diags = append(diags, resourceNetworkSSHUserRead(ctx, d, m)...)
return diags
}
func resourceNetworkSSHUserRead(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics {
conf := m.(*Config)
var diags diag.Diagnostics
user, err := conf.ne.GetSSHUser(d.Id())
if err != nil {
return diag.FromErr(err)
}
if err := updateNetworkSSHUserResource(user, d); err != nil {
return diag.FromErr(err)
}
return diags
}
func resourceNetworkSSHUserUpdate(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics {
conf := m.(*Config)
var diags diag.Diagnostics
updateReq := conf.ne.NewSSHUserUpdateRequest(d.Id())
if v, ok := d.GetOk(networkSSHUserSchemaNames["Password"]); ok && d.HasChange(networkSSHUserSchemaNames["Password"]) {
updateReq.WithNewPassword(v.(string))
}
if d.HasChange(networkSSHUserSchemaNames["DeviceUUIDs"]) {
a, b := d.GetChange(networkSSHUserSchemaNames["DeviceUUIDs"])
aList := expandSetToStringList(a.(*schema.Set))
bList := expandSetToStringList(b.(*schema.Set))
updateReq.WithDeviceChange(aList, bList)
}
if err := updateReq.Execute(); err != nil {
return diag.FromErr(err)
}
diags = append(diags, resourceNetworkSSHUserRead(ctx, d, m)...)
return diags
}
func resourceNetworkSSHUserDelete(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics {
conf := m.(*Config)
var diags diag.Diagnostics
if err := conf.ne.DeleteSSHUser(d.Id()); err != nil {
return diag.FromErr(err)
}
return diags
}
func createNetworkSSHUser(d *schema.ResourceData) ne.SSHUser {
user := ne.SSHUser{}
if v, ok := d.GetOk(networkSSHUserSchemaNames["UUID"]); ok {
user.UUID = ne.String(v.(string))
}
if v, ok := d.GetOk(networkSSHUserSchemaNames["Username"]); ok {
user.Username = ne.String(v.(string))
}
if v, ok := d.GetOk(networkSSHUserSchemaNames["Password"]); ok {
user.Password = ne.String(v.(string))
}
if v, ok := d.GetOk(networkSSHUserSchemaNames["DeviceUUIDs"]); ok {
user.DeviceUUIDs = expandSetToStringList(v.(*schema.Set))
}
return user
}
func updateNetworkSSHUserResource(user *ne.SSHUser, d *schema.ResourceData) error {
if err := d.Set(networkSSHUserSchemaNames["UUID"], user.UUID); err != nil {
return fmt.Errorf("error reading UUID: %s", err)
}
if err := d.Set(networkSSHUserSchemaNames["Username"], user.Username); err != nil {
return fmt.Errorf("error reading Username: %s", err)
}
if ne.StringValue(user.Password) != "" {
if err := d.Set(networkSSHUserSchemaNames["Password"], user.Password); err != nil {
return fmt.Errorf("error reading Password: %s", err)
}
}
if err := d.Set(networkSSHUserSchemaNames["DeviceUUIDs"], user.DeviceUUIDs); err != nil {
return fmt.Errorf("error reading DeviceUUIDs: %s", err)
}
return nil
}
|
package types
import (
"fmt"
"strings"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
configv1 "github.com/openshift/api/config/v1"
"github.com/openshift/installer/pkg/ipnet"
"github.com/openshift/installer/pkg/types/alibabacloud"
"github.com/openshift/installer/pkg/types/aws"
"github.com/openshift/installer/pkg/types/azure"
"github.com/openshift/installer/pkg/types/baremetal"
"github.com/openshift/installer/pkg/types/external"
"github.com/openshift/installer/pkg/types/gcp"
"github.com/openshift/installer/pkg/types/ibmcloud"
"github.com/openshift/installer/pkg/types/libvirt"
"github.com/openshift/installer/pkg/types/none"
"github.com/openshift/installer/pkg/types/nutanix"
"github.com/openshift/installer/pkg/types/openstack"
"github.com/openshift/installer/pkg/types/ovirt"
"github.com/openshift/installer/pkg/types/powervs"
"github.com/openshift/installer/pkg/types/vsphere"
)
const (
// InstallConfigVersion is the version supported by this package.
// If you bump this, you must also update the list of convertable values in
// pkg/types/conversion/installconfig.go
InstallConfigVersion = "v1"
)
var (
// PlatformNames is a slice with all the visibly-supported
// platform names in alphabetical order. This is the list of
// platforms presented to the user in the interactive wizard.
PlatformNames = []string{
alibabacloud.Name,
aws.Name,
azure.Name,
gcp.Name,
ibmcloud.Name,
nutanix.Name,
openstack.Name,
powervs.Name,
vsphere.Name,
}
// HiddenPlatformNames is a slice with all the
// hidden-but-supported platform names. This list isn't presented
// to the user in the interactive wizard.
HiddenPlatformNames = []string{
baremetal.Name,
external.Name,
none.Name,
}
// FCOS is a setting to enable Fedora CoreOS-only modifications
FCOS = false
// SCOS is a setting to enable CentOS Stream CoreOS-only modifications
SCOS = false
)
// PublishingStrategy is a strategy for how various endpoints for the cluster are exposed.
// +kubebuilder:validation:Enum="";External;Internal
type PublishingStrategy string
const (
// ExternalPublishingStrategy exposes endpoints for the cluster to the Internet.
ExternalPublishingStrategy PublishingStrategy = "External"
// InternalPublishingStrategy exposes the endpoints for the cluster to the private network only.
InternalPublishingStrategy PublishingStrategy = "Internal"
)
// PolicyType is for usage polices that are applied to additionalTrustBundle.
// +kubebuilder:validation:Enum="";Proxyonly;Always
type PolicyType string
const (
// PolicyProxyOnly enables use of AdditionalTrustBundle when http/https proxy is configured.
PolicyProxyOnly PolicyType = "Proxyonly"
// PolicyAlways ignores all conditions and uses AdditionalTrustBundle.
PolicyAlways PolicyType = "Always"
)
//go:generate go run ../../vendor/sigs.k8s.io/controller-tools/cmd/controller-gen crd:crdVersions=v1 paths=. output:dir=../../data/data/
// InstallConfig is the configuration for an OpenShift install.
type InstallConfig struct {
// +optional
metav1.TypeMeta `json:",inline"`
metav1.ObjectMeta `json:"metadata"`
// AdditionalTrustBundle is a PEM-encoded X.509 certificate bundle
// that will be added to the nodes' trusted certificate store.
//
// +optional
AdditionalTrustBundle string `json:"additionalTrustBundle,omitempty"`
// AdditionalTrustBundlePolicy determines when to add the AdditionalTrustBundle
// to the nodes' trusted certificate store. "Proxyonly" is the default.
// The field can be set to following specified values.
// "Proxyonly" : adds the AdditionalTrustBundle to nodes when http/https proxy is configured.
// "Always" : always adds AdditionalTrustBundle.
AdditionalTrustBundlePolicy PolicyType `json:"additionalTrustBundlePolicy,omitempty"`
// SSHKey is the public Secure Shell (SSH) key to provide access to instances.
// +optional
SSHKey string `json:"sshKey,omitempty"`
// BaseDomain is the base domain to which the cluster should belong.
BaseDomain string `json:"baseDomain"`
// Networking is the configuration for the pod network provider in
// the cluster.
*Networking `json:"networking,omitempty"`
// ControlPlane is the configuration for the machines that comprise the
// control plane.
// +optional
ControlPlane *MachinePool `json:"controlPlane,omitempty"`
// Compute is the configuration for the machines that comprise the
// compute nodes.
// +optional
Compute []MachinePool `json:"compute,omitempty"`
// Platform is the configuration for the specific platform upon which to
// perform the installation.
Platform `json:"platform"`
// PullSecret is the secret to use when pulling images.
PullSecret string `json:"pullSecret"`
// Proxy defines the proxy settings for the cluster.
// If unset, the cluster will not be configured to use a proxy.
// +optional
Proxy *Proxy `json:"proxy,omitempty"`
// ImageContentSources lists sources/repositories for the release-image content.
// The field is deprecated. Please use imageDigestSources.
// +optional
DeprecatedImageContentSources []ImageContentSource `json:"imageContentSources,omitempty"`
// ImageDigestSources lists sources/repositories for the release-image content.
// +optional
ImageDigestSources []ImageDigestSource `json:"imageDigestSources,omitempty"`
// Publish controls how the user facing endpoints of the cluster like the Kubernetes API, OpenShift routes etc. are exposed.
// When no strategy is specified, the strategy is "External".
//
// +kubebuilder:default=External
// +optional
Publish PublishingStrategy `json:"publish,omitempty"`
// FIPS configures https://www.nist.gov/itl/fips-general-information
//
// +kubebuilder:default=false
// +optional
FIPS bool `json:"fips,omitempty"`
// CPUPartitioning determines if a cluster should be setup for CPU workload partitioning at install time.
// When this field is set the cluster will be flagged for CPU Partitioning allowing users to segregate workloads to
// specific CPU Sets. This does not make any decisions on workloads it only configures the nodes to allow CPU Partitioning.
// The "AllNodes" value will setup all nodes for CPU Partitioning, the default is "None".
//
// +kubebuilder:default="None"
// +optional
CPUPartitioning CPUPartitioningMode `json:"cpuPartitioningMode,omitempty"`
// CredentialsMode is used to explicitly set the mode with which CredentialRequests are satisfied.
//
// If this field is set, then the installer will not attempt to query the cloud permissions before attempting
// installation. If the field is not set or empty, then the installer will perform its normal verification that the
// credentials provided are sufficient to perform an installation.
//
// There are three possible values for this field, but the valid values are dependent upon the platform being used.
// "Mint": create new credentials with a subset of the overall permissions for each CredentialsRequest
// "Passthrough": copy the credentials with all of the overall permissions for each CredentialsRequest
// "Manual": CredentialsRequests must be handled manually by the user
//
// For each of the following platforms, the field can set to the specified values. For all other platforms, the
// field must not be set.
// AWS: "Mint", "Passthrough", "Manual"
// Azure: "Passthrough", "Manual"
// AzureStack: "Manual"
// GCP: "Mint", "Passthrough", "Manual"
// IBMCloud: "Manual"
// AlibabaCloud: "Manual"
// PowerVS: "Manual"
// Nutanix: "Manual"
// +optional
CredentialsMode CredentialsMode `json:"credentialsMode,omitempty"`
// BootstrapInPlace is the configuration for installing a single node
// with bootstrap in place installation.
BootstrapInPlace *BootstrapInPlace `json:"bootstrapInPlace,omitempty"`
// Capabilities configures the installation of optional core cluster components.
// +optional
Capabilities *Capabilities `json:"capabilities,omitempty"`
// FeatureSet enables features that are not part of the default feature set.
// Valid values are "Default", "TechPreviewNoUpgrade" and "CustomNoUpgrade".
// When omitted, the "Default" feature set is used.
// +optional
FeatureSet configv1.FeatureSet `json:"featureSet,omitempty"`
// FeatureGates enables a set of custom feature gates.
// May only be used in conjunction with FeatureSet "CustomNoUpgrade".
// Features may be enabled or disabled by providing a true or false value for the feature gate.
// E.g. "featureGates": ["FeatureGate1=true", "FeatureGate2=false"].
// +optional
FeatureGates []string `json:"featureGates,omitempty"`
}
// ClusterDomain returns the DNS domain that all records for a cluster must belong to.
func (c *InstallConfig) ClusterDomain() string {
return fmt.Sprintf("%s.%s", c.ObjectMeta.Name, strings.TrimSuffix(c.BaseDomain, "."))
}
// IsFCOS returns true if Fedora CoreOS-only modifications are enabled
func (c *InstallConfig) IsFCOS() bool {
return FCOS
}
// IsSCOS returns true if CentOs Stream CoreOS-only modifications are enabled
func (c *InstallConfig) IsSCOS() bool {
return SCOS
}
// IsOKD returns true if community-only modifications are enabled
func (c *InstallConfig) IsOKD() bool {
return c.IsFCOS() || c.IsSCOS()
}
// IsSingleNodeOpenShift returns true if the install-config has been configured for
// bootstrapInPlace
func (c *InstallConfig) IsSingleNodeOpenShift() bool {
return c.BootstrapInPlace != nil
}
// CPUPartitioningMode defines how the nodes should be setup for partitioning the CPU Sets.
// +kubebuilder:validation:Enum=None;AllNodes
type CPUPartitioningMode string
const (
// CPUPartitioningNone means that no CPU Partitioning is on in this cluster infrastructure.
CPUPartitioningNone CPUPartitioningMode = "None"
// CPUPartitioningAllNodes means that all nodes are configured with CPU Partitioning in this cluster.
CPUPartitioningAllNodes CPUPartitioningMode = "AllNodes"
)
// Platform is the configuration for the specific platform upon which to perform
// the installation. Only one of the platform configuration should be set.
type Platform struct {
// AlibabaCloud is the configuration used when installing on Alibaba Cloud.
// +optional
AlibabaCloud *alibabacloud.Platform `json:"alibabacloud,omitempty"`
// AWS is the configuration used when installing on AWS.
// +optional
AWS *aws.Platform `json:"aws,omitempty"`
// Azure is the configuration used when installing on Azure.
// +optional
Azure *azure.Platform `json:"azure,omitempty"`
// BareMetal is the configuration used when installing on bare metal.
// +optional
BareMetal *baremetal.Platform `json:"baremetal,omitempty"`
// GCP is the configuration used when installing on Google Cloud Platform.
// +optional
GCP *gcp.Platform `json:"gcp,omitempty"`
// IBMCloud is the configuration used when installing on IBM Cloud.
// +optional
IBMCloud *ibmcloud.Platform `json:"ibmcloud,omitempty"`
// Libvirt is the configuration used when installing on libvirt.
// +optional
Libvirt *libvirt.Platform `json:"libvirt,omitempty"`
// None is the empty configuration used when installing on an unsupported
// platform.
None *none.Platform `json:"none,omitempty"`
// External is the configuration used when installing on
// an external cloud provider.
External *external.Platform `json:"external,omitempty"`
// OpenStack is the configuration used when installing on OpenStack.
// +optional
OpenStack *openstack.Platform `json:"openstack,omitempty"`
// PowerVS is the configuration used when installing on Power VS.
// +optional
PowerVS *powervs.Platform `json:"powervs,omitempty"`
// VSphere is the configuration used when installing on vSphere.
// +optional
VSphere *vsphere.Platform `json:"vsphere,omitempty"`
// Ovirt is the configuration used when installing on oVirt.
// +optional
Ovirt *ovirt.Platform `json:"ovirt,omitempty"`
// Nutanix is the configuration used when installing on Nutanix.
// +optional
Nutanix *nutanix.Platform `json:"nutanix,omitempty"`
}
// Name returns a string representation of the platform (e.g. "aws" if
// AWS is non-nil). It returns an empty string if no platform is
// configured.
func (p *Platform) Name() string {
switch {
case p == nil:
return ""
case p.AlibabaCloud != nil:
return alibabacloud.Name
case p.AWS != nil:
return aws.Name
case p.Azure != nil:
return azure.Name
case p.BareMetal != nil:
return baremetal.Name
case p.GCP != nil:
return gcp.Name
case p.IBMCloud != nil:
return ibmcloud.Name
case p.Libvirt != nil:
return libvirt.Name
case p.None != nil:
return none.Name
case p.External != nil:
return external.Name
case p.OpenStack != nil:
return openstack.Name
case p.VSphere != nil:
return vsphere.Name
case p.Ovirt != nil:
return ovirt.Name
case p.PowerVS != nil:
return powervs.Name
case p.Nutanix != nil:
return nutanix.Name
default:
return ""
}
}
// Networking defines the pod network provider in the cluster.
type Networking struct {
// NetworkType is the type of network to install.
// The default value is OVNKubernetes.
//
// +kubebuilder:default=OVNKubernetes
// +optional
NetworkType string `json:"networkType,omitempty"`
// MachineNetwork is the list of IP address pools for machines.
// This field replaces MachineCIDR, and if set MachineCIDR must
// be empty or match the first entry in the list.
// Default is 10.0.0.0/16 for all platforms other than libvirt and Power VS.
// For libvirt, the default is 192.168.126.0/24.
// For Power VS, the default is 192.168.0.0/24.
//
// +optional
MachineNetwork []MachineNetworkEntry `json:"machineNetwork,omitempty"`
// ClusterNetwork is the list of IP address pools for pods.
// Default is 10.128.0.0/14 and a host prefix of /23.
//
// +optional
ClusterNetwork []ClusterNetworkEntry `json:"clusterNetwork,omitempty"`
// ServiceNetwork is the list of IP address pools for services.
// Default is 172.30.0.0/16.
// NOTE: currently only one entry is supported.
//
// +kubebuilder:validation:MaxItems=1
// +optional
ServiceNetwork []ipnet.IPNet `json:"serviceNetwork,omitempty"`
// Deprecated types, scheduled to be removed
// Deprecated way to configure an IP address pool for machines.
// Replaced by MachineNetwork which allows for multiple pools.
// +optional
DeprecatedMachineCIDR *ipnet.IPNet `json:"machineCIDR,omitempty"`
// Deprecated name for NetworkType
// +optional
DeprecatedType string `json:"type,omitempty"`
// Deprecated way to configure an IP address pool for services.
// Replaced by ServiceNetwork which allows for multiple pools.
// +optional
DeprecatedServiceCIDR *ipnet.IPNet `json:"serviceCIDR,omitempty"`
// Deprecated name for ClusterNetwork
// +optional
DeprecatedClusterNetworks []ClusterNetworkEntry `json:"clusterNetworks,omitempty"`
}
// MachineNetworkEntry is a single IP address block for node IP blocks.
type MachineNetworkEntry struct {
// CIDR is the IP block address pool for machines within the cluster.
CIDR ipnet.IPNet `json:"cidr"`
}
// ClusterNetworkEntry is a single IP address block for pod IP blocks. IP blocks
// are allocated with size 2^HostSubnetLength.
type ClusterNetworkEntry struct {
// CIDR is the IP block address pool.
CIDR ipnet.IPNet `json:"cidr"`
// HostPrefix is the prefix size to allocate to each node from the CIDR.
// For example, 24 would allocate 2^8=256 adresses to each node. If this
// field is not used by the plugin, it can be left unset.
// +optional
HostPrefix int32 `json:"hostPrefix,omitempty"`
// The size of blocks to allocate from the larger pool.
// This is the length in bits - so a 9 here will allocate a /23.
// +optional
DeprecatedHostSubnetLength int32 `json:"hostSubnetLength,omitempty"`
}
// Proxy defines the proxy settings for the cluster.
// At least one of HTTPProxy or HTTPSProxy is required.
type Proxy struct {
// HTTPProxy is the URL of the proxy for HTTP requests.
// +optional
HTTPProxy string `json:"httpProxy,omitempty"`
// HTTPSProxy is the URL of the proxy for HTTPS requests.
// +optional
HTTPSProxy string `json:"httpsProxy,omitempty"`
// NoProxy is a comma-separated list of domains and CIDRs for which the proxy should not be used.
// +optional
NoProxy string `json:"noProxy,omitempty"`
}
// ImageContentSource defines a list of sources/repositories that can be used to pull content.
// The field is deprecated. Please use imageDigestSources.
type ImageContentSource struct {
// Source is the repository that users refer to, e.g. in image pull specifications.
Source string `json:"source"`
// Mirrors is one or more repositories that may also contain the same images.
// +optional
Mirrors []string `json:"mirrors,omitempty"`
}
// ImageDigestSource defines a list of sources/repositories that can be used to pull content.
type ImageDigestSource struct {
// Source is the repository that users refer to, e.g. in image pull specifications.
Source string `json:"source"`
// Mirrors is one or more repositories that may also contain the same images.
// +optional
Mirrors []string `json:"mirrors,omitempty"`
}
// CredentialsMode is the mode by which CredentialsRequests will be satisfied.
// +kubebuilder:validation:Enum="";Mint;Passthrough;Manual
type CredentialsMode string
const (
// ManualCredentialsMode indicates that cloud-credential-operator should not process any CredentialsRequests.
ManualCredentialsMode CredentialsMode = "Manual"
// MintCredentialsMode indicates that cloud-credential-operator should be creating users for each
// CredentialsRequest.
MintCredentialsMode CredentialsMode = "Mint"
// PassthroughCredentialsMode indicates that cloud-credential-operator should just copy over the cluster's
// cloud credentials for each CredentialsRequest.
PassthroughCredentialsMode CredentialsMode = "Passthrough"
)
// BootstrapInPlace defines the configuration for bootstrap-in-place installation
type BootstrapInPlace struct {
// InstallationDisk is the target disk drive for coreos-installer
InstallationDisk string `json:"installationDisk"`
}
// Capabilities selects the managed set of optional, core cluster components.
type Capabilities struct {
// baselineCapabilitySet selects an initial set of
// optional capabilities to enable, which can be extended via
// additionalEnabledCapabilities. The default is vCurrent.
// +optional
BaselineCapabilitySet configv1.ClusterVersionCapabilitySet `json:"baselineCapabilitySet,omitempty"`
// additionalEnabledCapabilities extends the set of managed
// capabilities beyond the baseline defined in
// baselineCapabilitySet. The default is an empty set.
// +optional
AdditionalEnabledCapabilities []configv1.ClusterVersionCapability `json:"additionalEnabledCapabilities,omitempty"`
}
// WorkerMachinePool retrieves the worker MachinePool from InstallConfig.Compute
func (c *InstallConfig) WorkerMachinePool() *MachinePool {
for _, machinePool := range c.Compute {
switch machinePool.Name {
case MachinePoolComputeRoleName, MachinePoolEdgeRoleName:
return &machinePool
}
}
return nil
}
|
package DeviceAPI
import b64 "encoding/base64"
type JPush struct {
Authorization string
Device
Push//用于推送
}
func NewJPush(appKey, masterSecrect string) (*JPush) {
authorization := "Basic " + b64.StdEncoding.EncodeToString([]byte(appKey+":"+masterSecrect))
return &JPush{
Authorization: authorization,
Device: NewDevice(authorization),
Push: NewPush(authorization),
}
}
|
package log
import "fmt"
func Log(s string) {
fmt.Println(s)
}
|
package main
import (
"database/sql"
"fmt"
_ "github.com/mattn/go-sqlite3"
"log"
"os"
"strconv"
)
type DbFunc func(Db *sql.DB)
//TODO add indexes
func main() {
BackendDbSchema := []DbFunc{
func(Db *sql.DB) {
Db.Exec(`CREATE TABLE GPSRecords (
ID INTEGER PRIMARY KEY AUTOINCREMENT,
Message TEXT,
Latitude TEXT NOT NULL,
Longitude TEXT NOT NULL,
Speed INTEGER NOT NULL,
Heading float NOT NULL,
Fix INTEGER NOT NULL,
DateTime date NOT NULL DEFAULT current_timestamp,
BusID TEXT NOT NULL);`)
Db.Exec(`CREATE TABLE DiagnosticRecords (
ID INTEGER PRIMARY KEY AUTOINCREMENT,
CPUTemperature REAL,
CPUVoltage REAL,
CPUFrequency REAL,
MemoryFree INTEGER,
Date DateTime DEFAULT CURRENT_TIMESTAMP);`)
Db.Exec(`CREATE TABLE Support (
SupportID INTEGER PRIMARY KEY AUTOINCREMENT,
UserID INTEGER NOT NULL,
Subject TEXT NOT NULL,
Body TEXT NOT NULL,
DateTime date NOT NULL DEFAULT current_timestamp,
FOREIGN KEY (UserID) REFERENCES User(ID));`)
Db.Exec(`CREATE TABLE Errors (
ID INTEGER PRIMARY KEY AUTOINCREMENT,
GPSRecordID INTEGER NOT NULL,
Error TEXT,
DateTime date NOT NULL DEFAULT current_timestamp,
FOREIGN KEY (GPSRecordID) REFERENCES GPSrecords(id));`)
Db.Exec(`CREATE TABLE Network (
ID INTEGER PRIMARY KEY AUTOINCREMENT,
GPSRecordID INTEGER NOT NULL,
Acknowledge INTEGER NOT NULL DEFAULT 0,
FOREIGN KEY (GPSRecordID) REFERENCES GPSRecords(id));`)
Db.Exec("PRAGMA foreign_keys=ON;")
Db.Exec("PRAGMA journal_mode=WAL;")
},
}
LicenseDbSchema := []DbFunc{
func(Db *sql.DB) {
Db.Exec(`CREATE TABLE User (
ID INTEGER PRIMARY KEY AUTOINCREMENT,
FirstName TEXT NOT NULL,
LastName TEXT NOT NULL,
CompanyID INTEGER NOT NULL,
Password TEXT NOT NULL,
AccessLevel INTEGER NOT NULL DEFAULT 0,
Email TEXT NOT NULL,
FOREIGN KEY (CompanyID) REFERENCES Company(ID));`)
Db.Exec(`CREATE TABLE Settings (
ID INTEGER PRIMARY KEY AUTOINCREMENT,
UserID INTEGER NOT NULL,
MapAPI TEXT NOT NULL DEFAULT 'GoogleMaps',
Interpolate INTEGER NOT NULL DEFAULT 1,
SnaptoRoad INTEGER NOT NULL DEFAULT 1,
CameraPanTrigger INTEGER NOT NULL DEFAULT 10,
FOREIGN KEY (UserID) REFERENCES User(ID));`)
Db.Exec(`CREATE TABLE ApplicationLogin (
UserID INTEGER,
LoggedIn date NOT NULL DEFAULT current_timestamp,
LoggedOut date,
PRIMARY KEY(UserID, LoggedIN));)`)
Db.Exec(`CREATE TABLE Company (
ID INTEGER PRIMARY KEY AUTOINCREMENT,
Name TEXT NOT NULL,
Expiry date NOT NULL DEFAULT current_timestamp,
MaxUsers INTEGER NOT NULL DEFAULT 0,
LogoPath TEXT NOT NULL DEFAULT '');`)
Db.Exec(`CREATE TABLE CompanySettings (
ID integer primary key autoincrement,
CompanyID integer not null,
RadioCommunication integer not null default 1,
DataCommunication integer not null default 1,
SecurityRemoteAdmin integer not null default 0,
SecurityConsoleAccess integer not null default 0,
SecurityAdminPasswordReset integer not null default 0,
MobileSmartPhoneAccess integer not null default 0,
MobileShowBusLocation integer not null default 0,
MinZoom integer not null default 10,
Maxzoom integer not null default 2,
HistoricalmapsKmMin integer not null default 10,
ClubBoundaryKM integer not null default 100,
FOREIGN KEY (CompanyID) REFERENCES Company(ID));`)
//there needs to be a command that grabs current git master sha and updates this table with Version and VersionDate
Db.Exec(`CREATE TABLE Version (
ID TEXT PRIMARY KEY,
SHA1 TEXT NOT NULL UNIQUE,
ReleaseDate date NOT NULL DEFAULT current_timestamp);`)
Db.Exec("PRAGMA foreign_keys=ON;")
},
func(Db *sql.DB) {
//create a few users - note when I do salted hashing another function to update the passwords will be required LOL this is fucked
Db.Exec(`INSERT INTO User (FirstName, LastName, CompanyID, Password, AccessLevel, Email)
VALUES ('guest','user', 1, 'guest', 0, 'guest@myclublink.com.au');`)
Db.Exec(`INSERT INTO User (FirstName, LastName, CompanyID, Password, AccessLevel, Email)
VALUES ('Joe', 'Blow', 2, 'joe', 10, 'joe@blow.com');"`)
Db.Exec(`INSERT INTO User (FirstName, LastName, CompanyID, Password, AccessLevel, Email)
VALUES ('Brad' , 'McCormack', 2, 'brad', 9, 'bradmccormack100@gmail.com');`)
//Set up some default settings
Db.Exec("INSERT INTO Settings (UserID, MapAPI, Interpolate, SnaptoRoad, CameraPanTrigger) VALUES (1, 'Google Maps', 0, 0, 1);")
Db.Exec("INSERT INTO Settings (UserID, MapAPI, Interpolate, SnaptoRoad, CameraPanTrigger) VALUES (2, 'Google Maps', 0, 0, 1);")
Db.Exec("INSERT INTO Settings (UserID, MapAPI, Interpolate, SnaptoRoad, CameraPanTrigger) VALUES (3, 'Google Maps', 0, 0, 1);")
Db.Exec("INSERT INTO Settings (UserID, MapAPI, Interpolate, SnaptoRoad, CameraPanTrigger) VALUES (4, 'Google Maps', 0, 0, 1);")
//Set up some default companies and path to logo
Db.Exec("INSERT INTO Company (Name, MaxUsers, Expiry, LogoPath) VALUES ('myClubLink' , 1, '2100-01-20 12:00:00', 'img/mcl_logo.png');")
Db.Exec("INSERT INTO Company (Name, MaxUsers, Expiry, LogoPath) VALUES ('Sussex Inlet RSL Group', 5, '2015-06-6 12:00:00', 'img/sussex_logo.PNG');")
//Note a company must have a company settings record
Db.Exec(`INSERT INTO CompanySettings (CompanyID, RadioCommunication, DataCommunication, SecurityRemoteAdmin,
SecurityConsoleAccess, SecurityAdminPasswordReset, MobileSmartPhoneAccess, MinZoom, MaxZoom, HistoricalmapsKmMin, ClubBoundaryKM)
VALUES(1, 1, 1, 0, 0, 0, 0, 1, 10, 10, 100);`)
Db.Exec(`INSERT INTO CompanySettings (CompanyID, RadioCommunication, DataCommunication, SecurityRemoteAdmin,
SecurityConsoleAccess, SecurityAdminPasswordReset, MobileSmartPhoneAccess, MinZoom, MaxZoom, HistoricalmapsKmMin, ClubBoundaryKM)
VALUES(2, 1, 1, 0, 0, 0, 0, 1, 10, 10, 100);`)
},
}
GeoDbSchema := []DbFunc{
func(Db *sql.DB) {
//point of interest such as street
Db.Exec(`CREATE TABLE POI (
ID INTEGER NOT NULL PRIMARY KEY,
Name TEXT NOT NULL);`)
Db.Exec(`CREATE TABLE LatLong (
ID INTEGER NOT NULL PRIMARY KEY,
POIID INTEGER NOT NULL,
LAT TEXT NOT NULL,
LONG TEXT NOT NULL,
Name TEXT);`)
Db.Exec("PRAGMA foreign_keys=ON;")
},
}
DatabasesChanges := map[string][]DbFunc{
"backend.db": BackendDbSchema,
"license.key": LicenseDbSchema,
"geodata.db": GeoDbSchema,
}
for k, DataBaseChanges := range DatabasesChanges {
DbHandle, err := sql.Open("sqlite3", k)
if err != nil {
fmt.Printf("%s didn't exist. will be created", k)
_, err := os.Create("./backend.db")
if err != nil {
log.Fatal("Cannot create %s!\n", k)
}
}
var user_version int
result := DbHandle.QueryRow("PRAGMA USER_VERSION").Scan(&user_version)
if result != nil {
log.Fatal("Cannot get user version\n")
}
for i := user_version; i < len(DataBaseChanges); i++ {
defer func() {
if r := recover(); r != nil {
fmt.Printf("Schema changes failed for version %d on database %s\n", i, k)
DbHandle.Exec("ROLLBACK TRANSACTION")
}
}()
DbHandle.Exec("BEGIN EXCLUSIVE TRANSACTION")
fmt.Printf("\n%s - executing schema version %d\n", k, i)
Fn := DataBaseChanges[i]
Fn(DbHandle)
DbHandle.Exec("COMMIT TRANSACTION")
user_version++
}
DbHandle.Exec("PRAGMA USER_VERSION=" + strconv.Itoa(user_version))
DbHandle.Close()
}
}
|
package foo
import (
"context"
"net/http"
"strings"
kithttp "github.com/go-kit/kit/transport/http"
"github.com/pkg/errors"
)
type ResStatus string
var ResponseMessage = map[ResStatus]int{
Invalid: 400,
}
const (
Invalid ResStatus = "invalid"
)
func (c ResStatus) String() string {
return string(c)
}
func (c ResStatus) Error() error {
return errors.New(string(c))
}
func (c ResStatus) Wrap(err error) error {
return errors.Wrap(err, string(c))
}
type Response struct {
Success bool `json:"success"`
Code int `json:"code"`
Data interface{} `json:"data,omitempty"`
Error error `json:"message,omitempty"`
}
type Failure interface {
Failed() error
}
type Errorer interface {
Error() error
}
func Error(_ context.Context, err error, w http.ResponseWriter) {
w.WriteHeader(http.StatusInternalServerError)
_, _ = w.Write([]byte(err.Error()))
}
func JsonError(ctx context.Context, err error, w http.ResponseWriter) {
headers, ok := ctx.Value("response-headers").(map[string]string)
if ok {
for k, v := range headers {
w.Header().Set(k, v)
}
}
_ = kithttp.EncodeJSONResponse(ctx, w, map[string]interface{}{
"message": err.Error(),
"code": ResponseMessage[ResStatus(strings.Split(err.Error(), ":")[0])],
"success": false,
})
}
func JsonResponse(ctx context.Context, w http.ResponseWriter, response interface{}) (err error) {
if f, ok := response.(Failure); ok && f.Failed() != nil {
JsonError(ctx, f.Failed(), w)
return nil
}
resp := response.(Response)
if resp.Error == nil {
resp.Code = 200
resp.Success = true
}
headers, ok := ctx.Value("response-headers").(map[string]string)
if ok {
for k, v := range headers {
w.Header().Set(k, v)
}
}
return kithttp.EncodeJSONResponse(ctx, w, resp)
}
|
package ws
import (
"fmt"
"net/http"
"github.com/gorilla/websocket"
)
const wsReadBufferSize = 1024
const wsWriteBufferSize = 1024
type Client struct {
conn *websocket.Conn
}
func UpgradeConnection(w http.ResponseWriter, req *http.Request, responseHeader http.Header) (*Client, error) {
conn, err := websocket.Upgrade(w, req, responseHeader, wsReadBufferSize, wsWriteBufferSize)
if err != nil {
return nil, fmt.Errorf("an error occurred during connection upgrade: %v", err)
}
client := &Client{
conn: conn,
}
return client, nil
}
func (c *Client) WriteMsg(msg []byte) error {
return c.conn.WriteMessage(websocket.TextMessage, msg)
}
func (c *Client) Close() error {
return c.conn.Close()
}
|
package typeInfo
import (
"github.com/graphql-go/graphql/language/ast"
)
// TypeInfoI defines the interface for TypeInfo Implementation
type TypeInfoI interface {
Enter(node ast.Node)
Leave(node ast.Node)
}
|
// All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package v20180930
import (
"github.com/tencentyun/tcecloud-sdk-go/tcecloud/common"
tchttp "github.com/tencentyun/tcecloud-sdk-go/tcecloud/common/http"
"github.com/tencentyun/tcecloud-sdk-go/tcecloud/common/profile"
)
const APIVersion = "2018-09-30"
type Client struct {
common.Client
}
// Deprecated
func NewClientWithSecretId(secretId, secretKey, region string) (client *Client, err error) {
cpf := profile.NewClientProfile()
client = &Client{}
client.Init(region).WithSecretId(secretId, secretKey).WithProfile(cpf)
return
}
func NewClient(credential *common.Credential, region string, clientProfile *profile.ClientProfile) (client *Client, err error) {
client = &Client{}
client.Init(region).
WithCredential(credential).
WithProfile(clientProfile)
return
}
func NewCreateModuleRequest() (request *CreateModuleRequest) {
request = &CreateModuleRequest{
BaseRequest: &tchttp.BaseRequest{},
}
request.Init().WithApiInfo("opagentcontrol", APIVersion, "CreateModule")
return
}
func NewCreateModuleResponse() (response *CreateModuleResponse) {
response = &CreateModuleResponse{
BaseResponse: &tchttp.BaseResponse{},
}
return
}
// 新增模块
func (c *Client) CreateModule(request *CreateModuleRequest) (response *CreateModuleResponse, err error) {
if request == nil {
request = NewCreateModuleRequest()
}
response = NewCreateModuleResponse()
err = c.Send(request, response)
return
}
func NewCreateModuleNewVersionCreatePkgRequest() (request *CreateModuleNewVersionCreatePkgRequest) {
request = &CreateModuleNewVersionCreatePkgRequest{
BaseRequest: &tchttp.BaseRequest{},
}
request.Init().WithApiInfo("opagentcontrol", APIVersion, "CreateModuleNewVersionCreatePkg")
return
}
func NewCreateModuleNewVersionCreatePkgResponse() (response *CreateModuleNewVersionCreatePkgResponse) {
response = &CreateModuleNewVersionCreatePkgResponse{
BaseResponse: &tchttp.BaseResponse{},
}
return
}
// 创建新模块版本之创建
func (c *Client) CreateModuleNewVersionCreatePkg(request *CreateModuleNewVersionCreatePkgRequest) (response *CreateModuleNewVersionCreatePkgResponse, err error) {
if request == nil {
request = NewCreateModuleNewVersionCreatePkgRequest()
}
response = NewCreateModuleNewVersionCreatePkgResponse()
err = c.Send(request, response)
return
}
func NewCreateModuleNewVersionGetTmpdirRequest() (request *CreateModuleNewVersionGetTmpdirRequest) {
request = &CreateModuleNewVersionGetTmpdirRequest{
BaseRequest: &tchttp.BaseRequest{},
}
request.Init().WithApiInfo("opagentcontrol", APIVersion, "CreateModuleNewVersionGetTmpdir")
return
}
func NewCreateModuleNewVersionGetTmpdirResponse() (response *CreateModuleNewVersionGetTmpdirResponse) {
response = &CreateModuleNewVersionGetTmpdirResponse{
BaseResponse: &tchttp.BaseResponse{},
}
return
}
// 创建新模块版本之获取临时目录ID
func (c *Client) CreateModuleNewVersionGetTmpdir(request *CreateModuleNewVersionGetTmpdirRequest) (response *CreateModuleNewVersionGetTmpdirResponse, err error) {
if request == nil {
request = NewCreateModuleNewVersionGetTmpdirRequest()
}
response = NewCreateModuleNewVersionGetTmpdirResponse()
err = c.Send(request, response)
return
}
func NewDescribeModulesRequest() (request *DescribeModulesRequest) {
request = &DescribeModulesRequest{
BaseRequest: &tchttp.BaseRequest{},
}
request.Init().WithApiInfo("opagentcontrol", APIVersion, "DescribeModules")
return
}
func NewDescribeModulesResponse() (response *DescribeModulesResponse) {
response = &DescribeModulesResponse{
BaseResponse: &tchttp.BaseResponse{},
}
return
}
// 查询模块列表
func (c *Client) DescribeModules(request *DescribeModulesRequest) (response *DescribeModulesResponse, err error) {
if request == nil {
request = NewDescribeModulesRequest()
}
response = NewDescribeModulesResponse()
err = c.Send(request, response)
return
}
func NewGetEnumInfoRequest() (request *GetEnumInfoRequest) {
request = &GetEnumInfoRequest{
BaseRequest: &tchttp.BaseRequest{},
}
request.Init().WithApiInfo("opagentcontrol", APIVersion, "GetEnumInfo")
return
}
func NewGetEnumInfoResponse() (response *GetEnumInfoResponse) {
response = &GetEnumInfoResponse{
BaseResponse: &tchttp.BaseResponse{},
}
return
}
// 获取对象枚举信息
func (c *Client) GetEnumInfo(request *GetEnumInfoRequest) (response *GetEnumInfoResponse, err error) {
if request == nil {
request = NewGetEnumInfoRequest()
}
response = NewGetEnumInfoResponse()
err = c.Send(request, response)
return
}
func NewInstallModulesRequest() (request *InstallModulesRequest) {
request = &InstallModulesRequest{
BaseRequest: &tchttp.BaseRequest{},
}
request.Init().WithApiInfo("opagentcontrol", APIVersion, "InstallModules")
return
}
func NewInstallModulesResponse() (response *InstallModulesResponse) {
response = &InstallModulesResponse{
BaseResponse: &tchttp.BaseResponse{},
}
return
}
// 模块安装
func (c *Client) InstallModules(request *InstallModulesRequest) (response *InstallModulesResponse, err error) {
if request == nil {
request = NewInstallModulesRequest()
}
response = NewInstallModulesResponse()
err = c.Send(request, response)
return
}
func NewModifyModuleDescRequest() (request *ModifyModuleDescRequest) {
request = &ModifyModuleDescRequest{
BaseRequest: &tchttp.BaseRequest{},
}
request.Init().WithApiInfo("opagentcontrol", APIVersion, "ModifyModuleDesc")
return
}
func NewModifyModuleDescResponse() (response *ModifyModuleDescResponse) {
response = &ModifyModuleDescResponse{
BaseResponse: &tchttp.BaseResponse{},
}
return
}
// 修改模块描述
func (c *Client) ModifyModuleDesc(request *ModifyModuleDescRequest) (response *ModifyModuleDescResponse, err error) {
if request == nil {
request = NewModifyModuleDescRequest()
}
response = NewModifyModuleDescResponse()
err = c.Send(request, response)
return
}
func NewUninstallModulesRequest() (request *UninstallModulesRequest) {
request = &UninstallModulesRequest{
BaseRequest: &tchttp.BaseRequest{},
}
request.Init().WithApiInfo("opagentcontrol", APIVersion, "UninstallModules")
return
}
func NewUninstallModulesResponse() (response *UninstallModulesResponse) {
response = &UninstallModulesResponse{
BaseResponse: &tchttp.BaseResponse{},
}
return
}
// 卸载模块
func (c *Client) UninstallModules(request *UninstallModulesRequest) (response *UninstallModulesResponse, err error) {
if request == nil {
request = NewUninstallModulesRequest()
}
response = NewUninstallModulesResponse()
err = c.Send(request, response)
return
}
|
// Copyright 2021 Clivern. All rights reserved.
// Use of this source code is governed by the MIT
// license that can be found in the LICENSE file.
package cmd
import (
"fmt"
"os"
"github.com/spf13/cobra"
)
var config string
var rootCmd = &cobra.Command{
Use: "peanut",
Short: `🐺 Deploy Databases and Services Easily for Development and Testing Pipelines
If you have any suggestions, bug reports, or annoyances please report
them to our issue tracker at <https://github.com/clivern/peanut/issues>`,
}
// Execute runs cmd tool
func Execute() {
if err := rootCmd.Execute(); err != nil {
fmt.Println(err)
os.Exit(1)
}
}
|
package main
import (
"fmt"
)
type TreeNode struct {
Val int
Left *TreeNode
Right *TreeNode
}
// 想法:
// 其实就是一个先序遍历
func tree2str(t *TreeNode) string {
st := []*TreeNode{}
st = append(st, t)
nums := []int{}
for len(st) > 0 {
len := len(st)
cur := st[len-1]
nums = append(nums, cur.Val)
st = st[:len-1]
if cur.Right != nil {
st = append(st, cur.Right)
}
if cur.Left != nil {
st = append(st, cur.Left)
}
}
fmt.Println(nums)
return ""
}
func main() {
node4 := &TreeNode{Val: 4, Left: nil, Right: nil}
node5 := &TreeNode{Val: 5, Left: nil, Right: nil}
node6 := &TreeNode{Val: 6, Left: nil, Right: nil}
node2 := &TreeNode{Val: 2, Left: node4, Right: node5}
node3 := &TreeNode{Val: 3, Left: nil, Right: node6}
node1 := &TreeNode{Val: 1, Left: node2, Right: node3}
tree2str(node1)
}
|
package defaults
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/openshift/installer/pkg/types/ovirt"
)
func defaultPlatform() *ovirt.Platform {
return &ovirt.Platform{
NetworkName: DefaultNetworkName,
AffinityGroups: []ovirt.AffinityGroup{
defaultComputeAffinityGroup(),
defaultControlPlaneAffinityGroup(),
},
}
}
func TestSetPlatformDefaults(t *testing.T) {
cases := []struct {
name string
platform *ovirt.Platform
expected *ovirt.Platform
}{
{
name: "empty",
platform: &ovirt.Platform{},
expected: defaultPlatform(),
},
{
name: "URL present",
platform: &ovirt.Platform{},
expected: func() *ovirt.Platform {
p := defaultPlatform()
return p
}(),
},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
SetPlatformDefaults(tc.platform)
assert.Equal(t, tc.expected, tc.platform, "unexpected platform")
})
}
}
|
package state
import "fmt"
const (
SMALL = "small"
SUPER = "super"
FIRE = "fire"
DIE = "die"
)
// Mario is the game role
type Mario interface {
State() string
MeetMushroom(*MarioContext)
MeetFireFlower(*MarioContext)
MeetMonster(*MarioContext)
}
type defaultMario struct{}
func (*defaultMario) State() string { return "" }
func (*defaultMario) MeetMushroom(*MarioContext) {}
func (*defaultMario) MeetFireFlower(*MarioContext) {}
func (*defaultMario) MeetMonster(*MarioContext) {}
// MarioContext is the state context of mario
type MarioContext struct {
mario Mario
}
// NewMarioContext init the object with small mario
func NewMarioContext() *MarioContext {
return &MarioContext{mario: &SmallMario{}}
}
// State return the current mario state
func (ctx *MarioContext) State() string {
return ctx.mario.State()
}
// MeetMushroom change the current mario's state
func (ctx *MarioContext) MeetMushroom() {
fmt.Printf("%s meet mushroom ", ctx.mario.State())
ctx.mario.MeetMushroom(ctx)
fmt.Printf(" --> %s\n", ctx.mario.State())
}
// MeetFireFlower change the current mario's state
func (ctx *MarioContext) MeetFireFlower() {
fmt.Printf("%s meet fire flower ", ctx.mario.State())
ctx.mario.MeetFireFlower(ctx)
fmt.Printf(" --> %s\n", ctx.mario.State())
}
// MeetMonster change the current mario's state
func (ctx *MarioContext) MeetMonster() {
fmt.Printf("%s meet monster ", ctx.mario.State())
ctx.mario.MeetMonster(ctx)
fmt.Printf(" --> %s\n", ctx.mario.State())
}
// SmallMario is the normal state of the game role
type SmallMario struct {
*defaultMario
}
// NewSmallMario ...
func NewSmallMario() Mario {
return &SmallMario{&defaultMario{}}
}
// State ...
func (small *SmallMario) State() string {
return SMALL
}
// MeetMushroom change state to super
func (small *SmallMario) MeetMushroom(ctx *MarioContext) {
ctx.mario = NewSuperMario()
}
// MeetFireFlower change state to fire
func (*SmallMario) MeetFireFlower(ctx *MarioContext) {
ctx.mario = NewFireFlowerMario()
}
// MeetMonster change state to die
func (*SmallMario) MeetMonster(ctx *MarioContext) {
ctx.mario = NewDieMario()
}
// SuperMario is the super state mario
type SuperMario struct {
Mario
}
// NewSuperMario ...
func NewSuperMario() Mario {
return &SuperMario{&defaultMario{}}
}
// State return the state of super mario
func (*SuperMario) State() string {
return SUPER
}
// MeetFireFlower change the state to fire
func (*SuperMario) MeetFireFlower(ctx *MarioContext) {
ctx.mario = NewFireFlowerMario()
}
// MeetMonster change the state to small
func (*SuperMario) MeetMonster(ctx *MarioContext) {
ctx.mario = NewSmallMario()
}
// FireFlowerMario is the fire state mario
type FireFlowerMario struct {
Mario
}
// NewFireFlowerMario ...
func NewFireFlowerMario() Mario {
return &FireFlowerMario{&defaultMario{}}
}
// State return the state of fire mario
func (*FireFlowerMario) State() string {
return FIRE
}
// MeetMonster change the state to small
func (*FireFlowerMario) MeetMonster(ctx *MarioContext) {
ctx.mario = NewSmallMario()
}
// DieMario is the die state mario
type DieMario struct {
Mario
}
// NewDieMario ...
func NewDieMario() Mario {
return &DieMario{&defaultMario{}}
}
// State return the state of die mario
func (*DieMario) State() string {
return DIE
}
|
package main
import "fmt"
import "log"
import "net/http"
import "io/ioutil"
import "golang.org/x/net/html"
func main() {
resp, err := http.Get("http://www.zhihu.com/")
if err != nil {
log.Fatal(err)
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
fmt.Printf("%q", body)
z := html.NewTokenizer(resp.Body)
for {
tt := z.Next()
if tt == html.ErrorToken {
// ...
return ...
}
fmt.Printf("%q", z)
// Process the current token.
}
}
|
package user
import (
"bytes"
"database/sql"
"encoding/json"
"fmt"
"strings"
// Postgresql Driver
_ "github.com/lib/pq"
)
// User is a modle of record.
type User struct {
ID int `json:"id"`
Username string `json:"username"`
Loginid string `json:"loginid"`
Password string `json:"password"`
Permissions map[string]interface{} `json:"permissions"`
}
func stringifyPermissions(permissions *map[string]interface{}) string {
b := new(bytes.Buffer)
for key, value := range *permissions {
fmt.Fprintf(b, "%s=%t,", key, value.(bool))
}
return b.String()
}
func decodePermissions(permissonsjson string) *map[string]interface{} {
r := strings.NewReader(permissonsjson)
d := json.NewDecoder(r)
var permissons map[string]interface{}
err := d.Decode(&permissons)
if err != nil {
return nil
}
return &permissons
}
func (m *User) String() string {
return fmt.Sprintf("{Id:%d, Username:%s, Loginid:%s, Password:%s, Permissions:%s}", m.ID, m.Username, m.Loginid, m.Password, stringifyPermissions(&m.Permissions))
}
// SelectAll is a function that get all users from repositoy.
func SelectAll(conn *sql.Tx) ([]User, error) {
rows, err := conn.Query("SELECT id, username, loginid, '********' AS password, permissionsjson FROM s3web.users ORDER BY id FOR READ ONLY;")
if err != nil {
return nil, err
}
defer rows.Close()
users := make([]User, 0)
for rows.Next() {
user := User{}
var permissionsjson string
rows.Scan(&user.ID, &user.Username, &user.Loginid, &user.Password, &permissionsjson)
json.Unmarshal([]byte(permissionsjson), &user.Permissions)
users = append(users, user)
}
return users, nil
}
// SelectByID is a function that get all users from repositoy.
func SelectByID(conn *sql.Tx, id int) (*User, error) {
row := conn.QueryRow("SELECT id, username, loginid, '********' AS password, permissionsjson FROM s3web.users WHERE id = $1 FOR READ ONLY;", id)
user := User{}
var permissionsjson string
err := row.Scan(&user.ID, &user.Username, &user.Loginid, &user.Password, &permissionsjson)
if err != nil {
return nil, err
}
json.Unmarshal([]byte(permissionsjson), &user.Permissions)
return &user, nil
}
// SelectForAuth is a function that try get a record using login infomation.
func SelectForAuth(conn *sql.Tx, loginid string, password string) (*User, error) {
row := conn.QueryRow("SELECT id, username, loginid, '********' AS password, permissionsjson FROM s3web.users WHERE loginid = $1 AND password_sha256 = digest($2, 'sha256')::varchar(256) FOR READ ONLY;", loginid, password)
user := User{}
var permissionsjson string
err := row.Scan(&user.ID, &user.Username, &user.Loginid, &user.Password, &permissionsjson)
if err != nil {
return nil, err
}
user.Permissions = *decodePermissions(permissionsjson)
return &user, nil
}
// Insert is a function that insert a record to repositoy.
func Insert(conn *sql.Tx, m *User) (int, error) {
query := "INSERT INTO s3web.users(username, loginid, password_sha256, permissionsjson, create_at, update_at) VALUES($1, $2, digest($3, 'sha256'), $4, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP) RETURNING id;"
permissionsjson, _ := json.Marshal(m.Permissions)
args := []interface{}{&m.Username, &m.Loginid, &m.Password, &permissionsjson}
row := conn.QueryRow(query, args...)
id := 0
err := row.Scan(&id)
return id, err
}
// UpdateByID is a function that update a record in repositoy.
// This function do not update password.
func UpdateByID(conn *sql.Tx, m *User) (int64, error) {
query := "UPDATE s3web.users SET username=$2, loginid=$3, permissionsjson=$4, update_at=CURRENT_TIMESTAMP WHERE id=$1;"
permissionsjson, _ := json.Marshal(m.Permissions)
args := []interface{}{m.ID, m.Username, m.Loginid, &permissionsjson}
r, err := conn.Exec(query, args...)
if err != nil {
return 0, err
}
return r.RowsAffected()
}
// UpdatePasswordByID is a function that update the pasword of record in repositoy.
func UpdatePasswordByID(conn *sql.Tx, m *User) (int64, error) {
query := "UPDATE s3web.users SET password_sha256=digest($2, 'sha256'), update_at=CURRENT_TIMESTAMP WHERE id=$1;"
args := []interface{}{m.ID, m.Password}
r, err := conn.Exec(query, args...)
if err != nil {
return 0, err
}
return r.RowsAffected()
}
// DeleteByID is a function that delete a record from repositoy.
func DeleteByID(conn *sql.Tx, id int) (int64, error) {
query := "DELETE FROM s3web.users WHERE id=$1;"
r, err := conn.Exec(query, id)
if err != nil {
return 0, err
}
return r.RowsAffected()
}
|
package main
import (
"fmt"
"github.com/glassechidna/trackiam/generator"
"os"
)
func main() {
if len(os.Args) == 1 {
usage()
}
switch os.Args[2] {
case "generate":
generator.Generate()
case "publish":
generator.Publish()
default:
usage()
}
}
func usage() {
fmt.Printf("usage: %s generate|publish\n", os.Args[0])
os.Exit(1)
}
|
package main
import (
"math/rand"
)
type Generator struct {
minID uint64
maxID uint64
checksumLength int
src rand.Source
rand *rand.Rand
indices []int
pos int
}
type Batch struct {
IDs []uint64
Checksums map[uint64][]byte
}
func newGenerator(minID, maxID uint64, checksumLength int, src rand.Source) *Generator {
return &Generator{
minID: minID,
maxID: maxID,
checksumLength: checksumLength,
src: src,
rand: rand.New(src),
}
}
func (g *Generator) prepare() {
g.pos = 0
g.indices = g.rand.Perm(int(g.maxID - g.minID))
}
func (g *Generator) Done() bool {
return g.pos >= len(g.indices)
}
func (g *Generator) Next() (id uint64, checksum []byte, ok bool) {
ok = g.pos < len(g.indices)
if !ok {
return
}
id = uint64(g.indices[g.pos]) + g.minID
g.pos++
checksum = make([]byte, g.checksumLength)
_, err := g.rand.Read(checksum)
if err != nil {
// Rand.Read() always returns nil as err
panic(err)
}
return
}
func (g *Generator) CollectBatch(batchSize int) Batch {
checksums := make(map[uint64][]byte)
ids := make([]uint64, batchSize)
for i := 0; i < batchSize; i++ {
id, checksum, ok := g.Next()
if !ok {
ids = ids[0:i]
break
}
checksums[id] = checksum
ids[i] = id
}
return Batch{
IDs: ids,
Checksums: checksums,
}
}
|
package handler
import (
"fmt"
"memoapp/internal/database"
"memoapp/model"
"log"
"github.com/labstack/echo/v4"
)
type (
// MemoHandler メモ用ハンドラー
MemoHandler struct {
HasCache bool
Client database.Client
echo *echo.Echo
}
EndPointHandler func(c echo.Context) ([]byte, error)
)
var (
pkgName = "handler"
)
// ProvideHandler メモハンドラーからルーティングを設定する
func ProvideHandler(e *echo.Echo) *MemoHandler {
hdr := &MemoHandler{echo: e}
routes := []struct {
method string
path string
callback EndPointHandler
cache bool // キャッシュをするかどうか
cacheClear bool // レスポンス返却後、キャッシュをリセットするかどうか
}{
{
"GET",
"/list",
hdr.MemoIndex,
true,
false,
},
{
"POST",
"/",
hdr.MemoCreate,
false,
true,
},
{
"DELETE",
"/",
hdr.MemoDelete,
false,
true,
},
}
for _, r := range routes {
if r.cache {
e.Add(r.method, r.path, hdr.cacheEndpointHandler(r.callback))
} else {
e.Add(r.method, r.path, hdr.endpointHandler(r.callback, r.cacheClear))
}
}
// e.GET("/list", hdr.cacheEndpointHandler(hdr.MemoIndex))
// e.POST("/", hdr.endpointHandler(hdr.MemoCreate))
// e.DELETE("/:id", hdr.endpointHandler(hdr.MemoDelete))
return hdr
}
func (h *MemoHandler) MemoIndex(c echo.Context) ([]byte, error) {
memos, err := h.Client.Get(c.Request().URL.Query())
if err != nil {
log.Printf("error: failed to Get memo data : %v\n", err)
return nil, fmt.Errorf("failed to Get memo data: [%s]%w\n ", pkgName, err)
}
log.Printf("info: pkg=%s データ取得OK\n", pkgName)
return memos, nil
}
// MemoCreate メモ作成
func (h *MemoHandler) MemoCreate(c echo.Context) ([]byte, error) {
var (
memo = &model.Memo{}
)
err := c.Bind(memo)
if err != nil {
log.Printf("error: 入力データに誤りがあります。:[%s] %v\n", pkgName, err)
return nil, fmt.Errorf("failed to Bind request params :[%s] %v\n ", pkgName, err)
}
// バリデートを実行
err = memo.Validate()
if err != nil {
log.Printf("error: バリデーションでエラーが発生しました。:[%s] %v\n", pkgName, err)
return nil, fmt.Errorf("validation error:[%s] %w\n ", pkgName, err)
}
memoData, err := h.Client.Set(memo)
if err != nil {
log.Printf("error: pkg=%s データ挿入エラー : %v\n", pkgName, err)
return nil, fmt.Errorf("failed to insert memo data :[%s] %w\n ", pkgName, err)
}
log.Printf("info: pkg=%s データ作成OK\n", pkgName)
return memoData, nil
}
// MemoDelete メモ削除
func (h *MemoHandler) MemoDelete(c echo.Context) ([]byte, error) {
// id, err := strconv.Atoi(c.Param("id"))
// if err != nil {
// log.Printf("error: データ型の変換エラー(int) : pkg=%s %v\n", pkgName, err)
// return nil, fmt.Errorf("failed to converted to type int :[%s] %w\n ", pkgName, err)
// }
memoID, err := h.Client.DEL(c.Request().URL.Query())
if err != nil {
log.Printf("error: データ削除エラー :[%s] %v\n", pkgName, err)
return nil, fmt.Errorf("failed to delete memo data: [%s] %w\n ", pkgName, err)
}
log.Printf("info: pkg=%s データ削除OK", pkgName)
return memoID, nil
}
|
package controller
import (
"net/http"
"github.com/gin-gonic/gin"
)
func Home(c *gin.Context) {
c.JSON(http.StatusOK, gin.H{
"code": 0,
"msg": "service running",
})
}
|
package server
import (
"bytes"
"context"
"testing"
"github.com/danielkvist/botio/proto"
"github.com/golang/protobuf/ptypes/empty"
)
func TestAddCommand(t *testing.T) {
tt := []struct {
name string
command *proto.BotCommand
expectedToFail bool
}{
{
name: "without command",
},
{
name: "with command",
command: &proto.BotCommand{
Cmd: &proto.Command{
Command: "start",
},
Resp: &proto.Response{
Response: "hi",
},
},
},
}
for _, tc := range tt {
t.Run(tc.name, func(t *testing.T) {
s := testServer(t)
_, err := s.AddCommand(context.TODO(), tc.command)
if err != nil {
if tc.expectedToFail {
t.Skipf("add command operation failed as expected: %v", err)
}
t.Fatalf("while adding command: %v", err)
}
if tc.expectedToFail {
t.Fatalf("add command operation not failed as expected")
}
})
}
}
func TestGetCommand(t *testing.T) {
command := &proto.BotCommand{
Cmd: &proto.Command{
Command: "start",
},
Resp: &proto.Response{
Response: "hi",
},
}
s := testServer(t)
if _, err := s.AddCommand(context.TODO(), command); err != nil {
t.Fatalf("while adding command to the database: %v", err)
}
cmd, err := s.GetCommand(context.TODO(), command.GetCmd())
if err != nil {
t.Fatalf("while getting command %q: %v", command.GetCmd().GetCommand(), err)
}
if cmd.GetCmd().GetCommand() != command.GetCmd().GetCommand() {
t.Fatalf("expected command %q. got=%q", command.GetCmd().GetCommand(), cmd.GetCmd().GetCommand())
}
if cmd.GetResp().GetResponse() != command.GetResp().GetResponse() {
t.Fatalf("expected command with response %q. got=%q", command.GetResp().GetResponse(), cmd.GetResp().GetResponse())
}
}
func TestListCommands(t *testing.T) {
commandOne := &proto.BotCommand{
Cmd: &proto.Command{
Command: "start",
},
Resp: &proto.Response{
Response: "hi",
},
}
commandTwo := &proto.BotCommand{
Cmd: &proto.Command{
Command: "end",
},
Resp: &proto.Response{
Response: "goodbye",
},
}
s := testServer(t)
if _, err := s.AddCommand(context.TODO(), commandOne); err != nil {
t.Fatalf("while adding command %q: %v", commandOne.GetCmd().GetCommand(), err)
}
if _, err := s.AddCommand(context.TODO(), commandTwo); err != nil {
t.Fatalf("while adding command %q: %v", commandTwo.GetCmd().GetCommand(), err)
}
commands, err := s.ListCommands(context.TODO(), &empty.Empty{})
if err != nil {
t.Fatalf("while listing commands: %v", err)
}
if len(commands.GetCommands()) != 2 {
t.Fatalf("expected to get a list of commands with %v elements. got=%v elements", 2, commands.GetCommands())
}
}
func TestUpdateCommand(t *testing.T) {
command := &proto.BotCommand{
Cmd: &proto.Command{
Command: "start",
},
Resp: &proto.Response{
Response: "hi",
},
}
newCommand := &proto.BotCommand{
Cmd: &proto.Command{
Command: "start",
},
Resp: &proto.Response{
Response: "hello",
},
}
s := testServer(t)
if _, err := s.AddCommand(context.TODO(), command); err != nil {
t.Fatalf("while adding command: %v", err)
}
if _, err := s.UpdateCommand(context.TODO(), newCommand); err != nil {
t.Fatalf("while updating command: %v", err)
}
cmd, err := s.GetCommand(context.TODO(), command.GetCmd())
if err != nil {
t.Fatalf("while getting comand %q: %v", command.GetCmd(), err)
}
if cmd.GetResp().GetResponse() != newCommand.GetResp().GetResponse() {
t.Fatalf("expected command to have updated response %q. got=%q", newCommand.GetResp().GetResponse(), cmd.GetResp().GetResponse())
}
}
func TestDeleteCommand(t *testing.T) {
s := testServer(t)
if _, err := s.AddCommand(context.TODO(), &proto.BotCommand{
Cmd: &proto.Command{
Command: "start",
},
Resp: &proto.Response{
Response: "hi",
},
}); err != nil {
t.Fatalf("while adding command: %v", err)
}
if _, err := s.DeleteCommand(context.TODO(), &proto.Command{Command: "start"}); err != nil {
t.Fatalf("while deleting command: %v", err)
}
commands, err := s.ListCommands(context.TODO(), &empty.Empty{})
if err != nil {
t.Fatalf("while listing commands: %v", err)
}
if len(commands.GetCommands()) != 0 {
t.Fatalf("expected no commands. got=%v commands", len(commands.GetCommands()))
}
}
func testServer(t *testing.T) Server {
t.Helper()
s, err := New(
WithTestDB(),
WithRistrettoCache(262144000),
WithListener(":0"),
WithInsecureGRPCServer(),
WithJWTAuthToken("testing"),
WithTextLogger(&bytes.Buffer{}),
)
if err != nil {
t.Fatalf("while creating a new Server for testing: %v", err)
}
if err := s.Connect(); err != nil {
t.Fatalf("while connecting Server for testing to its database: %v", err)
}
return s
}
|
package main
import "fmt"
func fib(n int) int {
thesum := 0
a := 1
b := 1
for a < n {
if a%2 == 0 {
thesum = thesum + a
}
a, b = b, a+b
}
return thesum
}
func main() {
fmt.Println(fib(4000000))
} |
package main
import (
"net/http"
"net/http/httptest"
"testing"
"time"
)
/*
Wraping up
* select
* Helps you wait on multiple channels.
* Sometimes you'll want to include time.
After in one of your cases to prevent your system blocking forever.
* httptest
* A convenient way of creating test servers so you can have reliable and controllable tests.
* Using the same interfaces as the "real" net/http servers which is consistent and less for you to learn.
*/
func TestRacer(t *testing.T) {
t.Run("compares speeds of servers, returning the url of the faster", func(t *testing.T) {
slowServer := makeDelayedServer(20 * time.Millisecond)
fastServer := makeDelayedServer(0 * time.Millisecond)
defer slowServer.Close()
defer fastServer.Close()
slowURL := slowServer.URL
fastURL := fastServer.URL
want := fastURL
got, _ := Racer(slowURL, fastURL)
if got != want {
t.Errorf("got %q, want %q", got, want)
}
})
t.Run("returns an error if a server doesn't respond within 10s", func(t *testing.T) {
server := makeDelayedServer(20 * time.Millisecond)
defer server.Close()
_, err := ConfigurableRacer(server.URL, server.URL, 20*time.Millisecond)
if err == nil {
t.Error("expected and error but didn't get one")
}
})
}
func makeDelayedServer(delay time.Duration) *httptest.Server {
return httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
time.Sleep(delay)
rw.WriteHeader(http.StatusOK)
}))
}
|
package main
import (
"os"
"bufio"
"errors"
"fmt"
)
func writeFile(filename string) {
file,err := os.Create(filename)
if err!=nil {
panic(err)
}
defer file.Close()
writer := bufio.NewWriter(file)
defer writer.Flush()
}
// 入口函数
func main() {
fmt.Println("who are u...");
errors.New("this is customer error...")
}
|
package test_string
import (
"fmt"
"github.com/golang/example/stringutil"
)
func ExampleReverse() {
fmt.Println(stringutil.Reverse("HOLA"))
// Output: ALOH!
} |
package microsvc
import (
"sync"
"github.com/go-kit/kit/log"
"github.com/hashicorp/consul/api"
"errors"
"github.com/hathbanger/microsvc-base/pkg/microsvc/models"
)
const (
// ServiceName - name of the service
ServiceName = "microsvc-base"
)
var (
// ErrMarshal - error for UnMarshalling
ErrMarshal = errors.New("could not marshal request")
// ErrRequest - error if a request cannot be created
ErrRequest = errors.New("could not create request")
// ErrToken - error if a token is not present or valid
ErrToken = errors.New("token is invalid or empty")
// ErrSize - error for provisioning size configurations
ErrSize = errors.New("requested provisioning size not found")
// ErrDNS - error for DNS lookups
ErrDNS = errors.New("hostnames in use")
// Arch - the build arch
Arch string
// APIVersion - the api version
APIVersion string
// BuildTime - the build time
BuildTime string
// GitCommit - the git commit
GitCommit string
// Name - the service name
Name = "microsvc-base"
// Ver - the service version
Ver string
)
//go:generate gobin -m -run github.com/maxbrunsfeld/counterfeiter/v6 . Service
type service struct {
mut *sync.Mutex
config *models.Config
logger log.Logger
}
// New - returns new service
func New(config *models.Config, logger log.Logger) Service {
return service{
mut: &sync.Mutex{},
config: config,
logger: logger,
}
}
func (s service) name() string {
return ServiceName
}
func (s service) Health() bool {
return true
}
// Service - interface into service methods
type Service interface {
Health() bool
ServiceDiscovery(string, string) (*api.Client, *api.AgentServiceRegistration, error)
// interfaceDeclaration.txt
}
|
package main
import "fmt"
//example function 1
func func1(a int, b int) int {
return a * b
}
//example function 2
func func2(a, b int) int {
return a * b
}
//example function 3
func func3(a, b int) (int, int) {
sum := a + b
mul := a * b
return sum, mul
}
//example function 4
func func4(a, b int) (sum, mul int) {
sum = a + b
mul = a * b
return
}
//example function 5
func func5(a, b int) {
fmt.Println("\nanswer is:", a+b)
}
func main() {
fmt.Printf("func1 gives %v", func1(2, 2))
fmt.Printf("\nfunc2 gives %v", func2(2, 2))
val1, val2 := func3(2, 2)
fmt.Printf("\nfunc3 gives %v and %v", val1, val2)
val1, val2 = func4(2, 2)
fmt.Printf("\nfunc4 gives %v and %v", val1, val2)
func5(2, 2)
val1, _ = func3(3, 3) //second value not required so used BLANK IDENTIFIER
fmt.Println("val1 is:", val1)
}
|
package main
import (
"bufio"
"bytes"
"encoding/json"
"flag"
"fmt"
"io/ioutil"
"log"
"math"
"math/rand"
"net/http"
"os"
"sort"
"strings"
"sync"
"time"
)
var (
numUser = flag.Int("numuser", 1, "number of users hitting simultaneously")
numSec = flag.Float64("numsec", 10, "number number of seconds each user streams requests")
serverAddr = flag.String("ip", ":10001", "IP addr of server")
countC chan int
jsonP chan float64
serverP chan float64
parsingP chan float64
totalP chan float64
latC chan float64
actors, directors []string
serverList []string
)
var qa1 = `{
debug(func: uid(`
var qa2 = `)) {
name@en
actor.film {
performance.film {
name@en
}
}
}
}`
var qa1xl = `{
debug(func: uid(`
var qa2xl = `)) {
name@en
actor.film {
performance.film {
name@en
starring {
performance.actor {
name@en
actor.film {
performance.film {
name@en
}
}
}
}
}
}
}
}`
var qd1 = `{
debug(func: uid(`
var qd2 = `)) {
name@en
director.film {
genre {
name@en
}
}
}
}`
var qd1xl = `{
debug(func: uid(`
var qd2xl = `)) {
name@en
director.film {
genre {
name@en
}
starring {
performance.actor {
name@en
actor.film {
performance.film {
name@en
}
}
}
}
}
}
}`
func runUser(wg *sync.WaitGroup) {
var proT, parT, jsonT, totT time.Duration
var count int
client := &http.Client{Transport: &http.Transport{
MaxIdleConnsPerHost: 100,
}}
var dat map[string]interface{}
var latency map[string]interface{}
var extensions map[string]interface{}
tix := time.Now()
for time.Now().Sub(tix).Seconds() < *numSec {
var choose = rand.Intn(2)
var query string
if choose == 1 {
var ridx = rand.Intn(len(actors))
query = qa1 + actors[ridx] + qa2
} else {
var ridx = rand.Intn(len(directors))
query = qd1 + directors[ridx] + qd2
}
r, _ := http.NewRequest("POST", serverList[rand.Intn(len(serverList))], bytes.NewBufferString(query))
resp, err := client.Do(r)
count++
if err != nil {
log.Fatal(err)
} else {
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
log.Fatalf("Couldn't parse response body. %+v", err)
}
resp.Body.Close()
err = json.Unmarshal(body, &dat)
if err != nil {
log.Fatal(err)
}
var ok bool
temp := dat["extensions"]
extensions, ok = temp.(map[string]interface{})
if !ok {
log.Print("no 'extensions' in response data")
log.Fatalf("%#v", dat)
}
temp = extensions["server_latency"]
latency, ok = temp.(map[string]interface{})
if !ok {
log.Print("no 'server_latency' in extension data'")
log.Fatalf("%#v", extensions)
}
pro := time.Duration(latency["encoding_ns"].(float64)) * time.Nanosecond
proT += pro
js := time.Duration(latency["parsing_ns"].(float64)) * time.Nanosecond
jsonT += js
par := time.Duration(latency["processing_ns"].(float64)) * time.Nanosecond
parT += par
tot := pro + js + par
totT += tot
latC <- tot.Seconds()
}
}
countC <- count
totalP <- totT.Seconds()
wg.Done()
}
func main() {
flag.Parse()
var meanLat, sdLat, serTi, jsonTi, parTi, totTi float64
var totCount int
var wg sync.WaitGroup
var allLat []float64
serverList = strings.Split(*serverAddr, ",")
actorfile, err := os.Open("listofactors_uid")
directorfile, err1 := os.Open("listofdirectors_uid")
if err != nil || err1 != nil {
return
}
defer actorfile.Close()
defer directorfile.Close()
scanner := bufio.NewScanner(actorfile)
for scanner.Scan() {
actors = append(actors, scanner.Text())
}
scanner = bufio.NewScanner(directorfile)
for scanner.Scan() {
directors = append(directors, scanner.Text())
}
countC = make(chan int, 5*(*numUser))
serverP = make(chan float64, 5*(*numUser))
totalP = make(chan float64, 5*(*numUser))
parsingP = make(chan float64, 5*(*numUser))
jsonP = make(chan float64, 5*(*numUser))
latC = make(chan float64, 100000)
go func() {
for t := range latC {
allLat = append(allLat, t)
}
}()
wg.Add(*numUser)
fmt.Println("First run")
for i := 0; i < *numUser; i++ {
go runUser(&wg)
}
wg.Wait()
time.Sleep(1 * time.Second)
wg.Add(*numUser)
fmt.Println("Second run")
for i := 0; i < *numUser; i++ {
go runUser(&wg)
}
wg.Wait()
time.Sleep(1 * time.Second)
wg.Add(*numUser)
fmt.Println("Third run")
for i := 0; i < *numUser; i++ {
go runUser(&wg)
}
wg.Wait()
close(countC)
close(serverP)
close(parsingP)
close(jsonP)
close(totalP)
close(latC)
fmt.Println("DONE!")
for it := range countC {
totCount += it
}
for it := range serverP {
serTi += it
}
for it := range parsingP {
parTi += it
}
for it := range jsonP {
jsonTi += it
}
for it := range totalP {
totTi += it
}
meanLat = serTi / float64(totCount)
for _, it := range allLat {
sdLat += math.Pow((it - meanLat), 2)
}
sort.Float64s(allLat)
sdLat = math.Sqrt(sdLat / float64(len(allLat)-1))
fmt.Println("------------------------------------------------------------------------")
fmt.Println("\nNumUser :", *numUser)
fmt.Println("Throughput (num request per second) : ", float64(totCount)/(3*(*numSec)))
fmt.Println("Total number of queries : ", totCount)
fmt.Println("Avg time (ms) : ", 1000*totTi/float64(totCount))
fmt.Println("95 percentile latency : ", 1000*allLat[int(len(allLat)/2)], 1000*allLat[int(95*len(allLat)/100)])
fmt.Println("Min, Max : ", 1000*allLat[0], 1000*allLat[len(allLat)-1])
fmt.Println("------------------------------------------------------------------------")
}
|
package main
import "fmt"
import "time"
func main() {
var c1 = make(chan string)
var c2 = make(chan string)
go func() {
for {
c1 <- "from 1"
time.Sleep(time.Second * 3)
}
}()
go func() {
for {
c2 <- "from 2"
time.Sleep(time.Second * 2)
}
}()
go func() {
i := 0
for {
select {
case msg1 := <-c1:
fmt.Println("[1]", msg1, i)
i = 0
case msg2 := <-c2:
fmt.Println("[2]", msg2, i)
i = 0
case <-time.After(time.Millisecond * 100):
fmt.Printf("Timeout occured %d\n", i)
i++
}
}
}()
// https://www.golang-book.com/books/intro/11
var input string
fmt.Scanln(&input)
}
|
package main
import (
"bytes"
"flag"
"fmt"
"io/ioutil"
"log"
"net/http"
"os"
"strconv"
"time"
"github.com/garyburd/redigo/redis"
)
func getRedisConnect(redisURL string) redis.Conn {
var c, e = redis.DialURL(redisURL)
if e != nil {
log.Fatal(e)
return nil
}
return c
}
func doGetClientCount(c redis.Conn, key string) int64 {
var v, e = redis.String(c.Do("GET", key))
if e != nil {
return 0
}
cc, ee := strconv.ParseInt(v, 10, 0)
if ee != nil {
return 0
}
return cc
}
func getClientCount(conn redis.Conn, vvv bool) int64 {
var ks, e = redis.Strings(conn.Do("KEYS", "websocket_clients_count_*"))
if e != nil {
log.Fatal(e)
return 0
}
var c int64
for _, k := range ks {
var cc = doGetClientCount(conn, k)
if vvv {
fmt.Println(k, cc)
}
c = c + cc
}
return c
}
func doReport(monitorURL string, count int64, step int64) {
json := `[{"metric":"%s","endpoint":"%s","timestamp":%d,"step":%d,"value":%d,"counterType":"GAUGE","tags":"%s"}]`
metric := "beeper_mpp.connection.current_client_count"
hostname, _ := os.Hostname()
timestamp := time.Now().Unix()
tags := "project=beeper_mpp,module=master,value=client_count"
content := fmt.Sprintf(json, metric, hostname, timestamp, step, count, tags)
log.Println(content)
client := &http.Client{}
req, err := http.NewRequest("POST", monitorURL, bytes.NewBufferString(content))
if err != nil {
log.Fatal(err)
return
}
req.Header.Set("Content-Type", "application/json")
resp, err := client.Do(req)
if err != nil {
log.Fatal(err)
return
}
body, _ := ioutil.ReadAll(resp.Body)
if resp.StatusCode != 200 {
log.Println(string(body))
}
}
func main() {
var h bool
var v bool
var s string
var r string
var m string
flag.BoolVar(&h, "h", false, "帮助信息")
flag.BoolVar(&v, "v", false, "显示更多细节信息")
flag.StringVar(&s, "s", "10", "向监控系统实时发送连接数的频率,单位(秒)")
flag.StringVar(&r, "r", "redis://127.0.0.1:6379", "Redis连接URL")
flag.StringVar(&m, "m", "http://127.0.0.1:1988", "监控系统服务地址")
flag.Parse()
if h {
flag.PrintDefaults()
return
}
ss, err := time.ParseDuration(s + "s")
if err != nil {
log.Fatal(err)
return
}
var conn = getRedisConnect(r)
defer conn.Close()
var step, _ = strconv.ParseInt(s, 10, 0)
start:
go doReport(m, getClientCount(conn, v), step)
time.Sleep(ss)
goto start
}
|
package main
import (
"fmt"
"math/rand"
"time"
)
func main() {
v, error := searchTimeout("test", time.Second*8)
if error != nil {
fmt.Println("time out")
}
fmt.Println(v)
}
func searchTimeout(kw string, t time.Duration) (string, error) {
select {
case v := <-mongo(kw):
return v, nil
case v := <-elec(kw):
return v, nil
case v := <-google(kw):
return v, nil
case <-time.After(t):
return "", fmt.Errorf("search time out")
}
return "", nil
}
func search(kw string) string {
select {
case v := <-mongo(kw):
return v
case v := <-elec(kw):
return v
case v := <-google(kw):
return v
default:
fmt.Println("error")
}
return ""
}
func mongo(kw string) chan string {
fmt.Println("mongo search...")
ch := make(chan string)
go func() {
time.Sleep(9 * time.Second)
ch <- "mongo"
}()
return ch
}
func elec(kw string) chan string {
fmt.Println("elec search...")
ch := make(chan string)
go func() {
time.Sleep(time.Duration(rand.Intn(5)) * time.Second)
ch <- "elec"
}()
return ch
}
func google(kw string) chan string {
fmt.Println("google search...")
ch := make(chan string)
go func() {
time.Sleep(time.Duration(rand.Intn(5)) * time.Second)
ch <- "google"
}()
return ch
}
|
package cmd
import (
"fmt"
"strconv"
"amru.in/cli/db"
"github.com/spf13/cobra"
)
// doCmd represents the do command
var doCmd = &cobra.Command{
Use: "do",
Short: "Marks the to-do tasks as complete",
Run: func(cmd *cobra.Command, args []string) {
var idx []int
for _, val := range args {
id, err := strconv.Atoi(val)
if err != nil {
fmt.Println(val, " is not a valid task id")
} else {
idx = append(idx, id)
}
}
tasks, err := db.ListTaskItems()
if err != nil {
fmt.Println("Some error occured")
return
}
for _, val := range idx {
db.DeleteItem(tasks[val-1].Id)
}
},
}
func init() {
RootCmd.AddCommand(doCmd)
}
|
package main
import (
"fmt"
"sort"
)
// 40. 组合总和 II
// 给定一个数组 candidates 和一个目标数 target ,找出 candidates 中所有可以使数字和为 target 的组合。
// candidates 中的每个数字在每个组合中只能使用一次。
// 说明:
// 所有数字(包括目标数)都是正整数。
// 解集不能包含重复的组合。
// https://leetcode-cn.com/problems/combination-sum-ii/
func main() {
fmt.Println(combinationSum2([]int{10, 1, 2, 7, 6, 1, 5}, 8))
}
// 回溯
func combinationSum2(candidates []int, target int) (result [][]int) {
if len(candidates) == 0 {
return
}
sort.Ints(candidates)
if candidates[0] > target {
return
}
combinationHelper(candidates, []int{}, 0, target, &result)
return result
}
func combinationHelper(candidates, cur []int, startIndex, target int, result *[][]int) {
if target == 0 {
tmp := make([]int, len(cur))
copy(tmp, cur)
*result = append(*result, tmp)
return
}
n := len(cur)
for i := startIndex; i < len(candidates); i++ {
if candidates[i] > target {
break
}
if i > startIndex && candidates[i] == candidates[i-1] {
continue
}
cur = append(cur, candidates[i])
combinationHelper(candidates, cur, i+1, target-candidates[i], result)
cur = cur[:n]
}
}
|
package repository
import "github.com/majid-cj/go-docker-mongo/domain/entity"
// MemberRepository ...
type MemberRepository interface {
CreateMember(*entity.Member) (*entity.Member, error)
DeleteMember(string) error
GetMembers() ([]entity.Member, error)
GetMember(string) (*entity.Member, error)
GetMembersByType(uint8) ([]entity.Member, error)
GetMemberByEmailAndPassword(*entity.Member) (*entity.Member, error)
UpdatePassword(*entity.Member) error
}
|
/*
Description
We'll call the consecutive distance rating of an integer sequence the sum of the distances between consecutive integers.
Consider the sequence 1 7 2 11 8 34 3. 1 and 2 are consecutive integers, but their distance apart in the sequence is 2.
2 and 3 are consecutive integers, and their distance is 4. The distance between 7 and 8 is 3. The sum of these distances is 9.
Your task is to find and display the consecutive distance rating of a number of integer sequences.
Input description
You'll be given two integers a and b on the first line denoting the number of sequences that follow and the length of those sequences, respectively.
You'll then be given a integer sequences of length b, one per line. The integers will always be unique and range from 1 to 100 inclusive.
Example input
6 11
31 63 53 56 96 62 73 25 54 55 64
77 39 35 38 41 42 76 73 40 31 10
30 63 57 87 37 31 58 83 34 76 38
18 62 55 92 88 57 90 10 11 96 12
26 8 7 25 52 17 45 64 11 35 12
89 57 21 55 56 81 54 100 22 62 50
Output description
Output each consecutive distance rating, one per line.
Example output
26
20
15
3
6
13
Challenge input
6 20
76 74 45 48 13 75 16 14 79 58 78 82 46 89 81 88 27 64 21 63
37 35 88 57 55 29 96 11 25 42 24 81 82 58 15 2 3 41 43 36
54 64 52 39 36 98 32 87 95 12 40 79 41 13 53 35 48 42 33 75
21 87 89 26 85 59 54 2 24 25 41 46 88 60 63 23 91 62 61 6
94 66 18 57 58 54 93 53 19 16 55 22 51 8 67 20 17 56 21 59
6 19 45 46 7 70 36 2 56 47 33 75 94 50 34 35 73 72 39 5
Notes / hints
Be careful that your program doesn't double up the distances. Consider the sequence 1 2. An incorrect algorithm might see 1 -> 2 and 2 -> 1 as two separate distances, resulting in a (wrong) consecutive distance rating of 2.
Visually, you should think of distances like this and not like that.
Bonus
Modify your program to work with any size gap between integers. For instance, we might want to find the distance rating of integers with a gap of 2, such as 1 and 3 or 7 and 9 rather than consecutive integers with a gap of 1.
Credit
This challenge was authored by /u/chunes, many thanks!
Have a good challenge idea? Consider submitting it to r/dailyprogrammer_ideas.
*/
package main
func main() {
assert(distance([]int{1, 7, 2, 11, 8, 34, 3}) == 9)
assert(distance([]int{31, 63, 53, 56, 96, 62, 73, 25, 54, 55, 64}) == 26)
assert(distance([]int{77, 39, 35, 38, 41, 42, 76, 73, 40, 31, 10}) == 20)
assert(distance([]int{30, 63, 57, 87, 37, 31, 58, 83, 34, 76, 38}) == 15)
assert(distance([]int{18, 62, 55, 92, 88, 57, 90, 10, 11, 96, 12}) == 3)
assert(distance([]int{26, 8, 7, 25, 52, 17, 45, 64, 11, 35, 12}) == 6)
assert(distance([]int{89, 57, 21, 55, 56, 81, 54, 100, 22, 62, 50}) == 13)
assert(distance([]int{76, 74, 45, 48, 13, 75, 16, 14, 79, 58, 78, 82, 46, 89, 81, 88, 27, 64, 21, 63}) == 31)
assert(distance([]int{37, 35, 88, 57, 55, 29, 96, 11, 25, 42, 24, 81, 82, 58, 15, 2, 3, 41, 43, 36}) == 68)
assert(distance([]int{54, 64, 52, 39, 36, 98, 32, 87, 95, 12, 40, 79, 41, 13, 53, 35, 48, 42, 33, 75}) == 67)
assert(distance([]int{21, 87, 89, 26, 85, 59, 54, 2, 24, 25, 41, 46, 88, 60, 63, 23, 91, 62, 61, 6}) == 52)
assert(distance([]int{94, 66, 18, 57, 58, 54, 93, 53, 19, 16, 55, 22, 51, 8, 67, 20, 17, 56, 21, 59}) == 107)
assert(distance([]int{6, 19, 45, 46, 7, 70, 36, 2, 56, 47, 33, 75, 94, 50, 34, 35, 73, 72, 39, 5}) == 45)
}
func assert(x bool) {
if !x {
panic("assertion failed")
}
}
func distance(a []int) int {
m := make(map[int][]int)
for i, v := range a {
m[v] = append(m[v], i)
}
r := 0
for i, v := range a {
r += sum(m[v+1], i) + sum(m[v-1], i)
}
return r
}
func sum(a []int, i int) int {
r := 0
for _, j := range a {
if j > i {
r += j - i
}
}
return r
}
|
package main
import (
"io/ioutil"
"os"
"strings"
smartling "github.com/Smartling/api-sdk-go"
"github.com/reconquest/hierr-go"
)
func readFilesFromStdin() ([]smartling.File, error) {
lines, err := ioutil.ReadAll(os.Stdin)
if err != nil {
return nil, hierr.Errorf(
err,
"unable to read stdin",
)
}
var files []smartling.File
for _, line := range strings.Split(string(lines), "\n") {
if line == "" {
continue
}
files = append(files, smartling.File{
FileURI: line,
})
}
return files, nil
}
|
/**
* Hastie - Static Site Generator
* https://github.com/mkaz/hastie
*/
package main
import (
"bytes"
"flag"
"fmt"
"io/ioutil"
"os"
"os/exec"
"path/filepath"
"strings"
"text/template"
"time"
"github.com/mkaz/hastie/pkg/logger"
"github.com/mkaz/hastie/pkg/utils"
)
var log logger.Logger
var config Config
// Page main page object
type Page struct {
Title string
Content string
Date time.Time
Category string
SimpleCategory string
Layout string
OutFile string
Extension string
Url string
PrevUrl string
PrevTitle string
NextUrl string
NextTitle string
PrevCatUrl string
PrevCatTitle string
NextCatUrl string
NextCatTitle string
Order int
Params map[string]string
AllPages *PageList
Recent *PageList
Categories *CategoryList
Unlisted bool
SourceFile string
Config Config
}
func main() {
var helpFlag = flag.Bool("help", false, "show this help")
var versionFlag = flag.Bool("version", false, "Display version and quit")
var noMarkdown = flag.Bool("nomarkdown", false, "do not use markdown conversion")
var noPrism = flag.Bool("noprism", false, "do not use Prism syntax highlighting")
var useAsciinema = flag.Bool("asciinema", false, "do use Asciinema player")
var configFile = flag.String("config", "hastie.json", "Config file")
flag.BoolVar(&log.DebugLevel, "debug", false, "Debug output (verbose)")
flag.BoolVar(&log.Verbose, "verbose", false, "Show info level")
flag.Parse()
if *helpFlag {
flag.Usage()
os.Exit(0)
}
if *versionFlag {
fmt.Println("hastie v0.9.2")
os.Exit(0)
}
config = setupConfig(*configFile)
if *noMarkdown {
config.UseMarkdown = false
}
if *noPrism {
config.UsePrism = false
}
if *useAsciinema {
config.UseAsciinema = true
}
// get pages and directories
pages, dirs := getSiteFiles(config.SourceDir)
allPages := filterUnlisted(pages)
By(orderOrder).Sort(allPages)
allPagesPointer := &allPages
// recent list is a sorted list of all pages with dates
By(dateOrder).Sort(pages)
recentList := pages.Reverse()
recentListPointer := &recentList
// category list is sorted map of pages by category
categoryList := getCategoryList(recentListPointer)
categoryListPointer := &categoryList
// functions made available to templates
funcMap := template.FuncMap{
"trim": utils.TrimSlash,
"Title": strings.Title,
"ToLower": strings.ToLower,
"ToUpper": strings.ToUpper,
}
// read and parse all template files
layoutsglob := config.LayoutDir + "/*.html"
ts, err := template.New("master").Funcs(funcMap).ParseGlob(layoutsglob)
if err != nil {
log.Fatal("Error Parsing Templates: ", err)
}
// loop through each page
// add extra data to page to be available to template
// apply templates and write out generated files
for _, page := range pages {
// add recent pages lists to page object
page.AllPages = allPagesPointer
page.Recent = recentListPointer
page.Categories = categoryListPointer
// add prev-next links
page.buildPrevNextLinks(recentListPointer)
page.Params["BaseURL"] = config.BaseURL
// applyTemplate to page
buffer, err := applyTemplate(ts, page)
if err != nil {
log.Warn("Error applying template", err)
continue
}
// confirm directory exists
writedir := filepath.Join(config.PublishDir, page.Category)
log.Debug(" Writing Directory:", writedir)
os.MkdirAll(writedir, 0755) // does nothing if already exists
// write out file
outfile := filepath.Join(config.PublishDir, page.OutFile)
log.Debug(" Writing File:", outfile)
ioutil.WriteFile(outfile, []byte(buffer.String()), 0644)
}
/* ******************************************
* Process Filters
* proces filters are a mapping of file extensions to commands
* and an output extensions. find files with extension, run
* command which should spit out text and create new file.extension
* For example: Less CSS or CoffeeSript
* ****************************************** */
for ext, filter := range config.ProcessFilters {
extStart := "." + ext
extEnd := "." + filter[1]
for _, dir := range dirs {
readglob := dir + "/*" + extStart
var dirfiles, _ = filepath.Glob(readglob)
for _, file := range dirfiles {
var cmd *exec.Cmd
// apply process filter command, capture output
if len(filter) > 2 {
opts := append(filter[2:], file)
cmd = exec.Command(filter[0], opts...)
} else {
cmd = exec.Command(filter[0], file)
}
output, err := cmd.Output()
if err != nil {
log.Warn("Error Process Filter: "+file, err)
continue
}
// determine output file path and extension
outfile := file[strings.Index(file, string(os.PathSeparator))+1:]
outfile = filepath.Join(config.PublishDir, outfile)
outfile = strings.Replace(outfile, extStart, extEnd, 1)
ioutil.WriteFile(outfile, output, 0644)
}
}
}
/* ******************************************
* Copy Theme Static Folder
* if a static directory exists in the theme, copy to publish/static
* TODO: process less files within theme
* ****************************************** */
staticDir := config.LayoutDir + "/static"
if utils.FileExists(staticDir) {
cmd := exec.Command("cp", "-rf", config.LayoutDir+"/static", config.PublishDir)
cmdErr := cmd.Run()
if cmdErr != nil {
log.Warn("Error copying theme's static dir")
}
}
} // main
/* ************************************************
* Build Category List
* - return a map containing a list of pages for
each category, the key being category name
* ************************************************ */
func getCategoryList(pages *PageList) CategoryList {
mapList := make(CategoryList)
// recentList is passed in which is already sorted
// just need to map the pages to category
// read category mash config, which allows to create
// a new category based on combining multiple categories
// this is used on my site when I want to display a list
// of recent items from similar categories together
reverseMap := make(map[string]string)
// config consists of a hash with new category being the
// key and a comma separated list of existing categories
// being the value, create a reverse map
for k, v := range config.CategoryMash {
cats := strings.Split(string(v), ",")
//loop through split and add to reverse map
for _, cat := range cats {
reverseMap[cat] = string(k)
}
}
for _, page := range *pages {
// create new category from category mash map
if reverseMap[page.Category] != page.Category {
thisCategory := reverseMap[page.Category]
mapList[thisCategory] = append(mapList[thisCategory], page)
}
// still want a list of regular categories
// simpleCategory replaces / in sub-dir categories to _
// this always the category to be referenced in template
simpleCategory := strings.Replace(page.Category, string(os.PathSeparator), "_", -1)
mapList[simpleCategory] = append(mapList[simpleCategory], page)
}
return mapList
}
/* ************************************************
* Add Prev Next Links to Page Object
* ************************************************ */
func (page *Page) buildPrevNextLinks(recentList *PageList) {
foundPage := false
nextPage := Page{}
prevPage := Page{}
nextPageCat := Page{}
prevPageCat := Page{}
lastPageCat := Page{}
for i, rp := range *recentList {
if rp.Category == page.Category {
if foundPage {
prevPageCat = rp
break
}
}
if rp.Title == page.Title {
foundPage = true
nextPageCat = lastPageCat
if i != 0 {
nextPage = recentList.Get(i - 1)
}
if i+1 < recentList.Len() {
prevPage = recentList.Get(i + 1)
}
}
if rp.Category == page.Category {
lastPageCat = rp // previous page
}
}
page.NextUrl = nextPage.Url
page.NextTitle = nextPage.Title
page.PrevUrl = prevPage.Url
page.PrevTitle = prevPage.Title
page.NextCatUrl = nextPageCat.Url
page.NextCatTitle = nextPageCat.Title
page.PrevCatUrl = prevPageCat.Url
page.PrevCatTitle = prevPageCat.Title
}
func applyTemplate(ts *template.Template, page Page) (*bytes.Buffer, error) {
buffer := new(bytes.Buffer)
// pick layout based on specified in file
templateFile := ""
if page.Layout == "" {
templateFile = "post.html"
} else {
templateFile = page.Layout + ".html"
}
if !utils.FileExists(filepath.Join(config.LayoutDir, templateFile)) {
return nil, fmt.Errorf("Missing template file %s", templateFile)
}
ts.ExecuteTemplate(buffer, templateFile, page)
return buffer, nil
}
func filterUnlisted(pages PageList) (filtered PageList) {
for _, page := range pages {
if !page.Unlisted {
filtered = append(filtered, page)
}
}
return filtered
}
|
package queue
type Queue interface {
EnQueue(interface{})
DeQueue()interface{}
} |
package rsvc
import (
"fmt"
"sort"
)
type SvmServices struct {
services map[uint8]*ServiceOrder
}
func NewSvmServices() *SvmServices {
svs := new(SvmServices)
svs.services = map[uint8]*ServiceOrder{}
return svs
}
func (svs *SvmServices) AddService(service InidService) {
if _, so := svs.services[service.GetServiceConfiguration().Stage]; !so {
svs.services[service.GetServiceConfiguration().Stage] = NewServiceOrder()
}
svs.services[service.GetServiceConfiguration().Stage].AddSevice(service)
}
func (svs *SvmServices) GetStages() []uint8 {
stages := []uint8{}
for stage := range svs.services {
stages = append(stages, stage)
}
return stages
}
func (svs *SvmServices) GetServiceByName(name string) (InidService, error) {
for _, so := range svs.services {
for _, s := range so.services {
if s.GetServiceConfiguration().GetName() == name {
return s, nil
}
}
}
return nil, fmt.Errorf("Service '%s' not found", name)
}
func (svs *SvmServices) GetRunlevels() []*ServiceOrder {
slots := []*ServiceOrder{}
idx := []int{}
for key := range svs.services {
idx = append(idx, int(key))
}
sort.Ints(idx)
for _, i := range idx {
slots = append(slots, svs.services[uint8(i)])
}
return slots
}
|
// I'm sure programs already exist to do this, but this is my implementation of a hash-based integrity checker for downloaded binaries, to encourage me to double check more often. - vkraven
// Version 0.2 - SmartChkk implemented
// SmartChkk allows the checksums to be generated only when required. This makes chkk perform better on mobile or embedded chips checking the integrity of larger files.
// SmartChkk also implements a smarter way to parse checksum files. Now chkk scans by words on a newline sentence, instead of by sentence line.
package main
import (
"crypto/sha256"
"crypto/sha1"
"crypto/md5"
"fmt"
"bufio"
"os"
"io"
"log"
"encoding/hex"
"strings"
"unicode"
)
func check(e error) {
if e != nil {
panic(e)
}
}
func splitter(c rune) bool {
return !unicode.IsLetter(c) && !unicode.IsNumber(c)
}
func main() {
// How-to
if len(os.Args) != 3 {
fmt.Printf("Usage: chkk <File to be checked> <Hash checksum>\n")
os.Exit(0)
}
// Open file to be checked
file1 := "./"
if os.Args[1][0] == '/' {
file1 = os.Args[1]
} else { file1 = file1 + os.Args[1] }
fileinput, err := os.Open(file1)
check(err)
defer fileinput.Close()
// Generate checksums. SmartChkk will do so lazily
// Declare vars for smartchkk
var data []byte
var deeone []byte
var mdeefive []byte
data1 := ""
dee1 := ""
dee5 := ""
generatesha256 := func() {
shainfo := sha256.New()
if _, err := io.Copy(shainfo, fileinput); err != nil {
log.Fatal(err)
}
data = shainfo.Sum(nil)
data1 = hex.EncodeToString(data)
_, err = fileinput.Seek(0,0)
check(err)
}
generatesha1 := func() {
shaone:= sha1.New()
if _, err := io.Copy(shaone, fileinput); err != nil {
log.Fatal(err)
}
deeone = shaone.Sum(nil)
dee1 = hex.EncodeToString(deeone)
// fmt.Printf("%x\n", deeone) remnants of debugging
// fmt.Printf("%s\n", dee1)
_, err = fileinput.Seek(0,0)
check(err)
}
generatemd5 := func() {
mdee5 := md5.New()
if _, err := io.Copy(mdee5, fileinput); err != nil {
log.Fatal(err)
}
mdeefive = mdee5.Sum(nil)
dee5 = hex.EncodeToString(mdeefive)
// fmt.Printf("%x\n", mdeefive) remnants of debugging
// fmt.Printf("%s\n", dee5)
_, err = fileinput.Seek(0,0)
check(err)
}
any := false
comparefile := "./"
if os.Args[2][0] == '/' {
comparefile = os.Args[2]
} else if len(os.Args[2]) == 64 {
if data1 == "" { generatesha256() }
if strings.ToLower(os.Args[2]) == strings.ToLower(data1) {
fmt.Printf("SHA256:\tPassed.\tHash: %x\n", data)
any = true
}
} else if len(os.Args[2]) == 32 {
if dee5 == "" { generatemd5() }
if strings.ToLower(os.Args[2]) == strings.ToLower(dee5) {
fmt.Printf("MD5:\tPassed.\tHash: %x\n", mdeefive)
any = true
}
} else if len(os.Args[2]) == 40 {
if dee1 == "" { generatesha1() }
if strings.ToLower(os.Args[2]) == strings.ToLower(dee1) {
fmt.Printf("SHA1:\tPassed.\tHash: %x\n", deeone)
any = true
}
}
if any == false {
comparefile = comparefile + os.Args[2]
compare, err := os.Open(comparefile)
if err != nil {
fmt.Printf("%s failed to verify file %s\n", os.Args[2], os.Args[1])
fmt.Printf("Assuming %s is a checksum-containing text file:\n\tCould not open file %s\n\n", os.Args[2], os.Args[2])
fmt.Printf("All verification tests failed. ALERT.\n")
os.Exit(2)
}
defer compare.Close()
scanee := bufio.NewScanner(compare)
for scanee.Scan() {
wordlist := strings.FieldsFunc(scanee.Text(), splitter)
// fmt.Printf("Fields are: %q\n", wordlist) remnants of debugging
for _, word := range wordlist {
// fmt.Printf("Current word is %s\n", word) remnants of debugging
length := len(word)
// fmt.Printf("Current word length is %d\n", length) remnants of debugging
if length == 64 {
if data1 == "" { generatesha256() }
if strings.ToLower(word) == strings.ToLower(data1) {
fmt.Printf("SHA256:\tPassed.\tHash: %x\n", data)
any = true
}
} else if length == 32 {
if dee5 == "" { generatemd5() }
if strings.ToLower(word) == strings.ToLower(dee5) {
fmt.Printf("MD5:\tPassed.\tHash: %x\n", mdeefive)
any = true
}
} else if length == 40 {
if dee1 == "" { generatesha1() }
if strings.ToLower(word) == strings.ToLower(dee1) {
fmt.Printf("SHA1:\tPassed.\tHash: %x\n", deeone)
any = true
}
}
}
}
if any == false {
fmt.Printf("All verification tests failed. ALERT.\n")
}
} else { os.Exit(0) }
}
|
package info
// generated from http://mervine.net/json2struct
type CollectionNameAndSize struct {
Name string `json:"name"`
TotalStorageSize float64 `json:"totalStorageSize"`
}
type WorkspaceInfo struct {
ApiHalted interface{} `json:"apiHalted"`
Collections []CollectionNameAndSize `json:"collections"`
Database string `json:"database"`
EarliestRevisionDate string `json:"earliestRevisionDate"`
EtlDate interface{} `json:"etlDate"`
Halted bool `json:"halted"`
LastRebuild struct {
Data struct {
EarliestRevisionDate string `json:"earliestRevisionDate"`
ElapsedOperationTimeInSeconds float64 `json:"elapsedOperationTimeInSeconds"`
LastDataRefreshTimestamp string `json:"lastDataRefreshTimestamp"`
RevisionsRemainingInQueue float64 `json:"revisionsRemainingInQueue"`
} `json:"data"`
OperationType string `json:"operationType"`
SpecifiedDate string `json:"specifiedDate"`
SubscriptionId float64 `json:"subscriptionId"`
Timestamp string `json:"timestamp"`
WorkspaceOid float64 `json:"workspaceOid"`
} `json:"lastRebuild"`
Metadata []struct {
ID struct {
Class string `json:"class"`
Inc float64 `json:"inc"`
Machine float64 `json:"machine"`
New bool `json:"new"`
Time float64 `json:"time"`
TimeSecond float64 `json:"timeSecond"`
} `json:"_id"`
Name string `json:"name"`
Status struct {
LastRevisionDate string `json:"lastRevisionDate"`
} `json:"status"`
} `json:"metadata"`
RevisionsInQueue float64 `json:"revisionsInQueue"`
Subscription float64 `json:"subscription"`
SubscriptionName string `json:"subscriptionName"`
TotalStorageSize float64 `json:"totalStorageSize"`
Workspace float64 `json:"workspace"`
WorkspaceCreationDate string `json:"workspaceCreationDate"`
}
type HaltedWorkspaceInfo struct {
Data struct {
Reason string `json:"reason"`
} `json:"data"`
HealthCheckShouldFail bool `json:"healthCheckShouldFail"`
SubscriptionId float64 `json:"subscriptionId"`
Timestamp string `json:"timestamp"`
WorkspaceOid float64 `json:"workspaceOid"`
}
|
// Copyright 2021 Clivern. All rights reserved.
// Use of this source code is governed by the MIT
// license that can be found in the LICENSE file.
package definition
import (
"fmt"
"strings"
"testing"
"github.com/franela/goblin"
)
// TestUnitMemcached test cases
func TestUnitMemcached(t *testing.T) {
g := goblin.Goblin(t)
g.Describe("#TestMemcached", func() {
g.It("It should satisfy all provided test cases", func() {
memcached := GetMemcachedConfig("memcached", "")
result, err := memcached.ToString()
g.Assert(strings.Contains(result, fmt.Sprintf("image: %s", fmt.Sprintf("%s:%s", MemcachedDockerImage, MemcachedDockerImageVersion)))).Equal(true)
g.Assert(strings.Contains(result, fmt.Sprintf(`- "%s"`, MemcachedPort))).Equal(true)
g.Assert(strings.Contains(result, fmt.Sprintf("restart: %s", MemcachedRestartPolicy))).Equal(true)
g.Assert(err).Equal(nil)
})
})
}
|
package grpc
import (
"context"
"log"
"net"
"github.com/BENSARI-Fathi/cni/v1/pb"
"google.golang.org/grpc"
)
var socketFile = "/tmp/my-ipam.sock"
func UnixConnect(context.Context, string) (net.Conn, error) {
unixAddress, _ := net.ResolveUnixAddr("unix", socketFile)
conn, err := net.DialUnix("unix", nil, unixAddress)
return conn, err
}
func NewGrpcClient() pb.IpamClient {
conn, err := grpc.Dial(socketFile, grpc.WithInsecure(), grpc.WithContextDialer(UnixConnect))
if err != nil {
log.Fatalf("Error while opening Unix connexion %s", err.Error())
}
return pb.NewIpamClient(conn)
}
|
package docker
import (
"context"
"encoding/json"
"io/ioutil"
"os"
"path/filepath"
"testing"
"github.com/devspace-cloud/devspace/pkg/util/fsutil"
"github.com/docker/docker/api/types"
"github.com/docker/docker/api/types/registry"
dockerclient "github.com/docker/docker/client"
"gopkg.in/yaml.v2"
"gotest.tools/assert"
)
type fakeDockerClient struct {
dockerclient.Client
}
func (f *fakeDockerClient) Info(ctx context.Context) (types.Info, error) {
return types.Info{
IndexServerAddress: "IndexServerAddress",
}, nil
}
func (f *fakeDockerClient) Ping(ctx context.Context) (types.Ping, error) {
return types.Ping{}, nil
}
func (f *fakeDockerClient) RegistryLogin(ctx context.Context, auth types.AuthConfig) (registry.AuthenticateOKBody, error) {
identityToken := ""
if auth.Password == "useToken" {
identityToken = "someToken"
}
return registry.AuthenticateOKBody{
IdentityToken: identityToken,
}, nil
}
func (f *fakeDockerClient) ImageList(ctx context.Context, options types.ImageListOptions) ([]types.ImageSummary, error) {
return []types.ImageSummary{
{
ID: "deleteThis",
},
}, nil
}
func (f *fakeDockerClient) ImageRemove(ctx context.Context, image string, options types.ImageRemoveOptions) ([]types.ImageDeleteResponseItem, error) {
return []types.ImageDeleteResponseItem{
{
Deleted: "deleteThis",
Untagged: "deleteThis",
},
}, nil
}
type getRegistryEndpointTestCase struct {
name string
registryURL string
expectedIsDefault bool
expectedEndpoint string
expectedErr bool
}
func TestGetRegistryEndpoint(t *testing.T) {
testCases := []getRegistryEndpointTestCase{
{
name: "Use auth server",
expectedIsDefault: true,
expectedEndpoint: "IndexServerAddress",
},
{
name: "Use custom server",
registryURL: "custom",
expectedIsDefault: false,
expectedEndpoint: "custom",
},
}
for _, testCase := range testCases {
client := &client{
&fakeDockerClient{},
}
isDefault, endpoint, err := client.GetRegistryEndpoint(testCase.registryURL)
if !testCase.expectedErr {
assert.NilError(t, err, "Unexpected error in testCase %s", testCase.name)
} else if err == nil {
t.Fatalf("Unexpected error %v in testCase %s", err, testCase.name)
}
assert.Equal(t, isDefault, testCase.expectedIsDefault, "Unexpected isDefault bool in testCase %s", testCase.name)
assert.Equal(t, endpoint, testCase.expectedEndpoint, "Unexpected endpoint in testCase %s", testCase.name)
}
}
type getAuthConfigTestCase struct {
name string
files map[string]interface{}
registryURL string
checkCredentialsStore bool
expectedAuthConfig *types.AuthConfig
expectedErr bool
}
func TestGetAuthConfig(t *testing.T) {
testCases := []getAuthConfigTestCase{
{
name: "Use default server",
checkCredentialsStore: true,
expectedAuthConfig: &types.AuthConfig{
ServerAddress: "IndexServerAddress",
},
},
{
name: "Use custom server",
registryURL: "http://custom",
checkCredentialsStore: true,
expectedAuthConfig: &types.AuthConfig{
ServerAddress: "custom",
},
},
}
dir, err := ioutil.TempDir("", "test")
if err != nil {
t.Fatalf("Error creating temporary directory: %v", err)
}
wdBackup, err := os.Getwd()
if err != nil {
t.Fatalf("Error getting current working directory: %v", err)
}
err = os.Chdir(dir)
if err != nil {
t.Fatalf("Error changing working directory: %v", err)
}
dir, err = filepath.EvalSymlinks(dir)
if err != nil {
t.Fatal(err)
}
defer func() {
err = os.Chdir(wdBackup)
if err != nil {
t.Fatalf("Error changing dir back: %v", err)
}
err = os.RemoveAll(dir)
if err != nil {
t.Fatalf("Error removing dir: %v", err)
}
}()
configDir = dir
for _, testCase := range testCases {
for path, content := range testCase.files {
asJSON, err := json.Marshal(content)
assert.NilError(t, err, "Error parsing content to json in testCase %s", testCase.name)
if content == "" {
asJSON = []byte{}
}
err = fsutil.WriteToFile(asJSON, path)
assert.NilError(t, err, "Error writing file in testCase %s", testCase.name)
}
client := &client{
&fakeDockerClient{},
}
auth, err := client.GetAuthConfig(testCase.registryURL, testCase.checkCredentialsStore)
if !testCase.expectedErr {
assert.NilError(t, err, "Unexpected error in testCase %s", testCase.name)
} else if err == nil {
t.Fatalf("Unexpected error %v in testCase %s", err, testCase.name)
}
authAsYaml, err := yaml.Marshal(auth)
assert.NilError(t, err, "Error parsing authConfig to yaml in testCase %s", testCase.name)
expectedAsYaml, err := yaml.Marshal(testCase.expectedAuthConfig)
assert.NilError(t, err, "Error parsing expection to yaml in testCase %s", testCase.name)
assert.Equal(t, string(authAsYaml), string(expectedAsYaml), "Unexpected authConfig in testCase %s", testCase.name)
err = filepath.Walk(".", func(path string, f os.FileInfo, err error) error {
os.RemoveAll(path)
return nil
})
assert.NilError(t, err, "Error cleaning up in testCase %s", testCase.name)
}
}
type loginTestCase struct {
name string
files map[string]interface{}
registryURL string
user string
password string
checkCredentialsStore bool
saveAuthConfig bool
relogin bool
expectedAuthConfig *types.AuthConfig
expectedErr bool
}
func TestLogin(t *testing.T) {
testCases := []loginTestCase{
{
name: "Use default server",
checkCredentialsStore: true,
saveAuthConfig: true,
user: "user",
password: "useToken",
expectedAuthConfig: &types.AuthConfig{
ServerAddress: "IndexServerAddress",
Username: "user",
IdentityToken: "someToken",
},
},
}
dir, err := ioutil.TempDir("", "test")
if err != nil {
t.Fatalf("Error creating temporary directory: %v", err)
}
wdBackup, err := os.Getwd()
if err != nil {
t.Fatalf("Error getting current working directory: %v", err)
}
err = os.Chdir(dir)
if err != nil {
t.Fatalf("Error changing working directory: %v", err)
}
dir, err = filepath.EvalSymlinks(dir)
if err != nil {
t.Fatal(err)
}
defer func() {
err = os.Chdir(wdBackup)
if err != nil {
t.Fatalf("Error changing dir back: %v", err)
}
err = os.RemoveAll(dir)
if err != nil {
t.Fatalf("Error removing dir: %v", err)
}
}()
configDir = dir
for _, testCase := range testCases {
for path, content := range testCase.files {
asJSON, err := json.Marshal(content)
assert.NilError(t, err, "Error parsing content to json in testCase %s", testCase.name)
if content == "" {
asJSON = []byte{}
}
err = fsutil.WriteToFile(asJSON, path)
assert.NilError(t, err, "Error writing file in testCase %s", testCase.name)
}
client := &client{
&fakeDockerClient{},
}
auth, err := client.Login(testCase.registryURL, testCase.user, testCase.password, testCase.checkCredentialsStore, testCase.saveAuthConfig, testCase.relogin)
if !testCase.expectedErr {
assert.NilError(t, err, "Unexpected error in testCase %s", testCase.name)
} else if err == nil {
t.Fatalf("Unexpected error %v in testCase %s", err, testCase.name)
}
authAsYaml, err := yaml.Marshal(auth)
assert.NilError(t, err, "Error parsing authConfig to yaml in testCase %s", testCase.name)
expectedAsYaml, err := yaml.Marshal(testCase.expectedAuthConfig)
assert.NilError(t, err, "Error parsing expection to yaml in testCase %s", testCase.name)
assert.Equal(t, string(authAsYaml), string(expectedAsYaml), "Unexpected authConfig in testCase %s", testCase.name)
err = filepath.Walk(".", func(path string, f os.FileInfo, err error) error {
os.RemoveAll(path)
return nil
})
assert.NilError(t, err, "Error cleaning up in testCase %s", testCase.name)
}
}
|
package methods
import (
"fmt"
"testing"
)
func TestPointString(t *testing.T) {
p := Point{X: 300, Y: 60}
got := fmt.Sprintf("%v", p)
want := "point: x=300, y=60"
if got != want {
t.Fatalf("got %q, expected %q", got, want)
}
}
func TestPointGetX(t *testing.T) {
p := Point{X: 100, Y: 200}
got := p.GetX()
want := 100
if got != want {
t.Fatalf("GetX: got %v, want %v", got, want)
}
}
|
package main
import (
"fmt"
"strconv"
)
func StartApp11() {
fmt.Println("==============StartApp11==============")
//test1101()
test1102()
}
func test1101() {
number, _ := strconv.Atoi("21")
fmt.Println(number)
str := strconv.Itoa(12)
fmt.Printf("%T, %s\n", str, str)
parseBool, _ := strconv.ParseBool("true")
fmt.Printf("%T, %t\n", parseBool, parseBool)
f, _ := strconv.ParseFloat("3.1415", 2)
fmt.Printf("%T, %f\n", f, f)
}
func test1102() {
fmt.Println("12432534567")
}
//占位符
//%v:只输出所有的值。
//%+v:先输出结构体字段类型,在输出字段值。
//%#v:先输出结构体名称,再输出结构体字段类型+值。
//%T:输出结构体名称,或输出目标的类型。
//%%:输出字面上的百分号。
//%b:二进制表示
//%c:相应的Unicode码所表示的字符。
//%d:十进制表示
//%o:八进制表示
//%x:十六进制表示,字母形式a-f
//%X:十六进制,字母形式A-F
//%q:双引号围绕的字符串
//%e:科学计数法1.020000e+01
//%E:科学计数法1.020000E+01
//%f:小数输出,有小数点而无指数
//%p:十六进制输出,输出指针类型。
//%g:末尾无零的小数输出。
//%G:末尾无零的小数输出。
//%t:布尔占位符。
|
package gin
//GetInitHandle ..
func GetInitHandle() HandlerFunc {
return func(c *Context) {
//init the Context
// c.Context = context.Background()
}
}
|
package handlers
import (
"encoding/json"
"fmt"
"net/http"
"time"
"github.com/esrever001/toyserver/db"
"github.com/julienschmidt/httprouter"
)
type EventsAddRequest struct {
User string
Type string
Time *time.Time
Notes string
Image string
}
type EventsAddHandler struct {
Database *db.Database
}
func (handler EventsAddHandler) Path() string {
return "/events/add"
}
func (handler EventsAddHandler) Method() HttpMethod {
return POST
}
func (handler EventsAddHandler) getEvent(request EventsAddRequest) db.Events {
timeFromRequest := time.Now().Unix()
if request.Time != nil {
timeFromRequest = (*request.Time).Unix()
}
return db.Events{
User: request.User,
Type: request.Type,
Timestamp: timeFromRequest,
Notes: request.Notes,
Image: request.Image,
}
}
func (handler EventsAddHandler) Handle(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {
decoder := json.NewDecoder(r.Body)
var requestBody EventsAddRequest
err := decoder.Decode(&requestBody)
if err != nil {
panic(err)
}
event := handler.getEvent(requestBody)
handler.Database.Database.Create(&event)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(event)
fmt.Printf("Adding event for user %s\n", ps.ByName("user"))
}
|
package main
import (
"log"
"github.com/royaloaklabs/super-genki-db/db"
"github.com/royaloaklabs/super-genki-db/freq"
"github.com/royaloaklabs/super-genki-db/jmdict"
)
func main() {
freq.BuildFrequencyData()
err := jmdict.Parse()
if err != nil {
log.Fatal(err)
}
databaseEntries := make([]*db.SGEntry, 0)
for _, entry := range jmdict.Entries {
databaseEntries = append(databaseEntries, db.NewSGEntryFromJMDict(entry))
}
db.Connect()
err = db.PopulateDatabase(databaseEntries)
if err != nil {
log.Fatal(err)
}
}
|
package owm
import (
"bufio"
"encoding/csv"
"fmt"
"io"
"os"
"path/filepath"
"strconv"
"github.com/tada3/triton/config"
)
const (
csvFilePath = "weather/owm/csv/weather_condition.csv"
unknown = "不明"
)
var (
wcMap = map[int64]string{}
)
func init() {
homeDir := config.GetHomeDir()
fp := filepath.Join(homeDir, csvFilePath)
f, err := os.Open(fp)
if err != nil {
panic(err)
}
defer f.Close()
err = loadCsvFile(f)
if err != nil {
panic(err)
}
}
func GetWeatherCondition(code int64) string {
wc, ok := wcMap[code]
if !ok {
fmt.Printf("Unknown code: %d\n", code)
return unknown
}
return wc
}
func loadCsvFile(f *os.File) error {
r := csv.NewReader(bufio.NewReader(f))
r.Comment = '#'
r.TrimLeadingSpace = true
count := 0
for {
l, err := r.Read()
if err != nil {
if err == io.EOF {
break
}
return err
}
if len(l) != 4 {
return fmt.Errorf("Invalid format: %v", l)
}
id, err := strconv.ParseInt(l[0], 10, 64)
if err != nil {
return err
}
wcMap[id] = l[2]
count++
}
fmt.Printf("Loaded %d records.\n", count)
return nil
}
|
package convert
import (
Model "MainApplication/internal/Letter/LetterModel"
pb "MainApplication/proto/MailService"
)
func ModelToProto(letter Model.Letter) *pb.Letter {
pbLetter := pb.Letter{
Sender: letter.Sender,
Receiver: letter.Receiver,
Lid: letter.Id,
DateTime: uint64(letter.DateTime),
Theme: letter.Theme,
Text: letter.Text,
IsWatched: letter.IsWatched,
}
return &pbLetter
}
func ModelToProtoList(letters *[]Model.Letter) []*pb.Letter {
var list []*pb.Letter
for _, letter := range *letters {
pbLetter := pb.Letter{
Sender: letter.Sender,
Receiver: letter.Receiver,
Lid: letter.Id,
DateTime: uint64(letter.DateTime),
Theme: letter.Theme,
Text: letter.Text,
IsWatched: letter.IsWatched,
}
list = append(list, &pbLetter)
}
return list
}
func ProtoToModelList(letters []*pb.Letter) []Model.Letter {
var list []Model.Letter
for _, letter := range letters {
pbLetter := Model.Letter{
Sender: letter.Sender,
Receiver: letter.Receiver,
Id: letter.Lid,
DateTime: int64(letter.DateTime),
Theme: letter.Theme,
Text: letter.Text,
IsWatched: letter.IsWatched,
}
list = append(list, pbLetter)
}
return list
}
func ProtoToModel(letter *pb.Letter) Model.Letter {
Letter := Model.Letter{
Sender: letter.Sender,
Receiver: letter.Receiver,
Id: letter.Lid,
DateTime: int64(letter.DateTime),
Theme: letter.Theme,
Text: letter.Text,
IsWatched: letter.IsWatched,
}
return Letter
}
|
package fs
// INode node interface of filesystem
type INode interface {
Print(string)
Clone() INode
}
|
package resource
import (
"os"
"github.com/chronojam/aws-pricing-api/types/schema"
"github.com/olekukonko/tablewriter"
)
func GetManageBlockChain() {
mgmtblockchain := &schema.AmazonManagedBlockchain{}
err := mgmtblockchain.Refresh()
if err != nil {
panic(err)
}
table := tablewriter.NewWriter(os.Stdout)
table.SetHeader([]string{"Description", "USD", "Unit"})
table.SetRowLine(true)
data := []*schema.AmazonManagedBlockchain_Product{}
for _, price := range mgmtblockchain.Products {
data = append(data, price)
}
for _, p := range data {
for _, term := range mgmtblockchain.Terms {
if term.Sku == p.Sku {
for _, priceData := range term.PriceDimensions {
x := []string{}
v := append(x, priceData.Description, priceData.PricePerUnit.USD, priceData.Unit)
table.Append(v)
}
}
}
}
table.Render()
}
|
/*
Go functions may be closures. A closure is a function value that references variables from outside its body
*/
package main
import "fmt"
func adder() func(int) int {
sum := 0
return func(x int) int {
sum += x
return sum
}
}
func muller() func(i int) int {
x := 2
return func(a int) int {
x *= a
return x
}
}
func main() {
pos, neg := adder(), adder()
for i := 0; i < 10; i++ {
fmt.Println(
pos(i),
neg(-2*i),
)
}
mul := muller()
fmt.Println(mul(20))
}
|
package main
import (
"strings"
"github.com/corymurphy/adventofcode/shared"
)
type Instruction int
const (
Noop Instruction = 0
Addx Instruction = 1
Unknown Instruction = -1
)
func (i Instruction) String() string {
switch i {
case Noop:
return "noop"
case Addx:
return "addx"
default:
return "Unknown"
}
}
type Operation struct {
data int
instruction Instruction
remaining int
}
func parseInstruction(input string) Instruction {
switch input {
case "noop":
return Noop
case "addx":
return Addx
default:
return Unknown
}
}
func NewOperation(line string) *Operation {
instruction := parseInstruction(strings.Split(line, " ")[0])
data := 0
remaining := 1
if instruction == Addx {
data = shared.ToInt(strings.Split(line, " ")[1])
remaining = 2
}
return &Operation{
data: data,
instruction: instruction,
remaining: remaining,
}
}
type Program []*Operation
func NewProgram(input []string) Program {
program := []*Operation{}
for _, line := range input {
program = append(program, NewOperation(line))
}
return program
}
|
// This file was generated for SObject LightningUsageByPageMetrics, API Version v43.0 at 2018-07-30 03:47:17.340680714 -0400 EDT m=+3.683425364
package sobjects
import (
"fmt"
"strings"
)
type LightningUsageByPageMetrics struct {
BaseSObject
Id string `force:",omitempty"`
MetricsDate string `force:",omitempty"`
PageName string `force:",omitempty"`
SystemModstamp string `force:",omitempty"`
TotalCount int `force:",omitempty"`
UserId string `force:",omitempty"`
}
func (t *LightningUsageByPageMetrics) ApiName() string {
return "LightningUsageByPageMetrics"
}
func (t *LightningUsageByPageMetrics) String() string {
builder := strings.Builder{}
builder.WriteString(fmt.Sprintf("LightningUsageByPageMetrics #%s - %s\n", t.Id, t.Name))
builder.WriteString(fmt.Sprintf("\tId: %v\n", t.Id))
builder.WriteString(fmt.Sprintf("\tMetricsDate: %v\n", t.MetricsDate))
builder.WriteString(fmt.Sprintf("\tPageName: %v\n", t.PageName))
builder.WriteString(fmt.Sprintf("\tSystemModstamp: %v\n", t.SystemModstamp))
builder.WriteString(fmt.Sprintf("\tTotalCount: %v\n", t.TotalCount))
builder.WriteString(fmt.Sprintf("\tUserId: %v\n", t.UserId))
return builder.String()
}
type LightningUsageByPageMetricsQueryResponse struct {
BaseQuery
Records []LightningUsageByPageMetrics `json:"Records" force:"records"`
}
|
package main
import (
"math"
"time"
cases "github.com/envoyproxy/protoc-gen-validate/tests/harness/cases/go"
other_package "github.com/envoyproxy/protoc-gen-validate/tests/harness/cases/other_package/go"
sort "github.com/envoyproxy/protoc-gen-validate/tests/harness/cases/sort/go"
yet_another_package "github.com/envoyproxy/protoc-gen-validate/tests/harness/cases/yet_another_package/go"
"google.golang.org/protobuf/proto"
"google.golang.org/protobuf/types/known/anypb"
"google.golang.org/protobuf/types/known/durationpb"
"google.golang.org/protobuf/types/known/timestamppb"
"google.golang.org/protobuf/types/known/wrapperspb"
)
type TestCase struct {
Name string
Message proto.Message
Failures int // expected number of failed validation errors
}
type TestResult struct {
OK, Skipped bool
}
var TestCases []TestCase
func init() {
sets := [][]TestCase{
floatCases,
doubleCases,
int32Cases,
int64Cases,
uint32Cases,
uint64Cases,
sint32Cases,
sint64Cases,
fixed32Cases,
fixed64Cases,
sfixed32Cases,
sfixed64Cases,
boolCases,
stringCases,
bytesCases,
enumCases,
messageCases,
repeatedCases,
mapCases,
oneofCases,
wrapperCases,
durationCases,
timestampCases,
anyCases,
kitchenSink,
nestedCases,
}
for _, set := range sets {
TestCases = append(TestCases, set...)
}
}
var floatCases = []TestCase{
{"float - none - valid", &cases.FloatNone{Val: -1.23456}, 0},
{"float - const - valid", &cases.FloatConst{Val: 1.23}, 0},
{"float - const - invalid", &cases.FloatConst{Val: 4.56}, 1},
{"float - in - valid", &cases.FloatIn{Val: 7.89}, 0},
{"float - in - invalid", &cases.FloatIn{Val: 10.11}, 1},
{"float - not in - valid", &cases.FloatNotIn{Val: 1}, 0},
{"float - not in - invalid", &cases.FloatNotIn{Val: 0}, 1},
{"float - lt - valid", &cases.FloatLT{Val: -1}, 0},
{"float - lt - invalid (equal)", &cases.FloatLT{Val: 0}, 1},
{"float - lt - invalid", &cases.FloatLT{Val: 1}, 1},
{"float - lte - valid", &cases.FloatLTE{Val: 63}, 0},
{"float - lte - valid (equal)", &cases.FloatLTE{Val: 64}, 0},
{"float - lte - invalid", &cases.FloatLTE{Val: 65}, 1},
{"float - gt - valid", &cases.FloatGT{Val: 17}, 0},
{"float - gt - invalid (equal)", &cases.FloatGT{Val: 16}, 1},
{"float - gt - invalid", &cases.FloatGT{Val: 15}, 1},
{"float - gte - valid", &cases.FloatGTE{Val: 9}, 0},
{"float - gte - valid (equal)", &cases.FloatGTE{Val: 8}, 0},
{"float - gte - invalid", &cases.FloatGTE{Val: 7}, 1},
{"float - gt & lt - valid", &cases.FloatGTLT{Val: 5}, 0},
{"float - gt & lt - invalid (above)", &cases.FloatGTLT{Val: 11}, 1},
{"float - gt & lt - invalid (below)", &cases.FloatGTLT{Val: -1}, 1},
{"float - gt & lt - invalid (max)", &cases.FloatGTLT{Val: 10}, 1},
{"float - gt & lt - invalid (min)", &cases.FloatGTLT{Val: 0}, 1},
{"float - exclusive gt & lt - valid (above)", &cases.FloatExLTGT{Val: 11}, 0},
{"float - exclusive gt & lt - valid (below)", &cases.FloatExLTGT{Val: -1}, 0},
{"float - exclusive gt & lt - invalid", &cases.FloatExLTGT{Val: 5}, 1},
{"float - exclusive gt & lt - invalid (max)", &cases.FloatExLTGT{Val: 10}, 1},
{"float - exclusive gt & lt - invalid (min)", &cases.FloatExLTGT{Val: 0}, 1},
{"float - gte & lte - valid", &cases.FloatGTELTE{Val: 200}, 0},
{"float - gte & lte - valid (max)", &cases.FloatGTELTE{Val: 256}, 0},
{"float - gte & lte - valid (min)", &cases.FloatGTELTE{Val: 128}, 0},
{"float - gte & lte - invalid (above)", &cases.FloatGTELTE{Val: 300}, 1},
{"float - gte & lte - invalid (below)", &cases.FloatGTELTE{Val: 100}, 1},
{"float - exclusive gte & lte - valid (above)", &cases.FloatExGTELTE{Val: 300}, 0},
{"float - exclusive gte & lte - valid (below)", &cases.FloatExGTELTE{Val: 100}, 0},
{"float - exclusive gte & lte - valid (max)", &cases.FloatExGTELTE{Val: 256}, 0},
{"float - exclusive gte & lte - valid (min)", &cases.FloatExGTELTE{Val: 128}, 0},
{"float - exclusive gte & lte - invalid", &cases.FloatExGTELTE{Val: 200}, 1},
{"float - ignore_empty gte & lte - valid", &cases.FloatIgnore{Val: 0}, 0},
}
var doubleCases = []TestCase{
{"double - none - valid", &cases.DoubleNone{Val: -1.23456}, 0},
{"double - const - valid", &cases.DoubleConst{Val: 1.23}, 0},
{"double - const - invalid", &cases.DoubleConst{Val: 4.56}, 1},
{"double - in - valid", &cases.DoubleIn{Val: 7.89}, 0},
{"double - in - invalid", &cases.DoubleIn{Val: 10.11}, 1},
{"double - not in - valid", &cases.DoubleNotIn{Val: 1}, 0},
{"double - not in - invalid", &cases.DoubleNotIn{Val: 0}, 1},
{"double - lt - valid", &cases.DoubleLT{Val: -1}, 0},
{"double - lt - invalid (equal)", &cases.DoubleLT{Val: 0}, 1},
{"double - lt - invalid", &cases.DoubleLT{Val: 1}, 1},
{"double - lte - valid", &cases.DoubleLTE{Val: 63}, 0},
{"double - lte - valid (equal)", &cases.DoubleLTE{Val: 64}, 0},
{"double - lte - invalid", &cases.DoubleLTE{Val: 65}, 1},
{"double - gt - valid", &cases.DoubleGT{Val: 17}, 0},
{"double - gt - invalid (equal)", &cases.DoubleGT{Val: 16}, 1},
{"double - gt - invalid", &cases.DoubleGT{Val: 15}, 1},
{"double - gte - valid", &cases.DoubleGTE{Val: 9}, 0},
{"double - gte - valid (equal)", &cases.DoubleGTE{Val: 8}, 0},
{"double - gte - invalid", &cases.DoubleGTE{Val: 7}, 1},
{"double - gt & lt - valid", &cases.DoubleGTLT{Val: 5}, 0},
{"double - gt & lt - invalid (above)", &cases.DoubleGTLT{Val: 11}, 1},
{"double - gt & lt - invalid (below)", &cases.DoubleGTLT{Val: -1}, 1},
{"double - gt & lt - invalid (max)", &cases.DoubleGTLT{Val: 10}, 1},
{"double - gt & lt - invalid (min)", &cases.DoubleGTLT{Val: 0}, 1},
{"double - exclusive gt & lt - valid (above)", &cases.DoubleExLTGT{Val: 11}, 0},
{"double - exclusive gt & lt - valid (below)", &cases.DoubleExLTGT{Val: -1}, 0},
{"double - exclusive gt & lt - invalid", &cases.DoubleExLTGT{Val: 5}, 1},
{"double - exclusive gt & lt - invalid (max)", &cases.DoubleExLTGT{Val: 10}, 1},
{"double - exclusive gt & lt - invalid (min)", &cases.DoubleExLTGT{Val: 0}, 1},
{"double - gte & lte - valid", &cases.DoubleGTELTE{Val: 200}, 0},
{"double - gte & lte - valid (max)", &cases.DoubleGTELTE{Val: 256}, 0},
{"double - gte & lte - valid (min)", &cases.DoubleGTELTE{Val: 128}, 0},
{"double - gte & lte - invalid (above)", &cases.DoubleGTELTE{Val: 300}, 1},
{"double - gte & lte - invalid (below)", &cases.DoubleGTELTE{Val: 100}, 1},
{"double - exclusive gte & lte - valid (above)", &cases.DoubleExGTELTE{Val: 300}, 0},
{"double - exclusive gte & lte - valid (below)", &cases.DoubleExGTELTE{Val: 100}, 0},
{"double - exclusive gte & lte - valid (max)", &cases.DoubleExGTELTE{Val: 256}, 0},
{"double - exclusive gte & lte - valid (min)", &cases.DoubleExGTELTE{Val: 128}, 0},
{"double - exclusive gte & lte - invalid", &cases.DoubleExGTELTE{Val: 200}, 1},
{"double - ignore_empty gte & lte - valid", &cases.DoubleIgnore{Val: 0}, 0},
}
var int32Cases = []TestCase{
{"int32 - none - valid", &cases.Int32None{Val: 123}, 0},
{"int32 - const - valid", &cases.Int32Const{Val: 1}, 0},
{"int32 - const - invalid", &cases.Int32Const{Val: 2}, 1},
{"int32 - in - valid", &cases.Int32In{Val: 3}, 0},
{"int32 - in - invalid", &cases.Int32In{Val: 5}, 1},
{"int32 - not in - valid", &cases.Int32NotIn{Val: 1}, 0},
{"int32 - not in - invalid", &cases.Int32NotIn{Val: 0}, 1},
{"int32 - lt - valid", &cases.Int32LT{Val: -1}, 0},
{"int32 - lt - invalid (equal)", &cases.Int32LT{Val: 0}, 1},
{"int32 - lt - invalid", &cases.Int32LT{Val: 1}, 1},
{"int32 - lte - valid", &cases.Int32LTE{Val: 63}, 0},
{"int32 - lte - valid (equal)", &cases.Int32LTE{Val: 64}, 0},
{"int32 - lte - invalid", &cases.Int32LTE{Val: 65}, 1},
{"int32 - gt - valid", &cases.Int32GT{Val: 17}, 0},
{"int32 - gt - invalid (equal)", &cases.Int32GT{Val: 16}, 1},
{"int32 - gt - invalid", &cases.Int32GT{Val: 15}, 1},
{"int32 - gte - valid", &cases.Int32GTE{Val: 9}, 0},
{"int32 - gte - valid (equal)", &cases.Int32GTE{Val: 8}, 0},
{"int32 - gte - invalid", &cases.Int32GTE{Val: 7}, 1},
{"int32 - gt & lt - valid", &cases.Int32GTLT{Val: 5}, 0},
{"int32 - gt & lt - invalid (above)", &cases.Int32GTLT{Val: 11}, 1},
{"int32 - gt & lt - invalid (below)", &cases.Int32GTLT{Val: -1}, 1},
{"int32 - gt & lt - invalid (max)", &cases.Int32GTLT{Val: 10}, 1},
{"int32 - gt & lt - invalid (min)", &cases.Int32GTLT{Val: 0}, 1},
{"int32 - exclusive gt & lt - valid (above)", &cases.Int32ExLTGT{Val: 11}, 0},
{"int32 - exclusive gt & lt - valid (below)", &cases.Int32ExLTGT{Val: -1}, 0},
{"int32 - exclusive gt & lt - invalid", &cases.Int32ExLTGT{Val: 5}, 1},
{"int32 - exclusive gt & lt - invalid (max)", &cases.Int32ExLTGT{Val: 10}, 1},
{"int32 - exclusive gt & lt - invalid (min)", &cases.Int32ExLTGT{Val: 0}, 1},
{"int32 - gte & lte - valid", &cases.Int32GTELTE{Val: 200}, 0},
{"int32 - gte & lte - valid (max)", &cases.Int32GTELTE{Val: 256}, 0},
{"int32 - gte & lte - valid (min)", &cases.Int32GTELTE{Val: 128}, 0},
{"int32 - gte & lte - invalid (above)", &cases.Int32GTELTE{Val: 300}, 1},
{"int32 - gte & lte - invalid (below)", &cases.Int32GTELTE{Val: 100}, 1},
{"int32 - exclusive gte & lte - valid (above)", &cases.Int32ExGTELTE{Val: 300}, 0},
{"int32 - exclusive gte & lte - valid (below)", &cases.Int32ExGTELTE{Val: 100}, 0},
{"int32 - exclusive gte & lte - valid (max)", &cases.Int32ExGTELTE{Val: 256}, 0},
{"int32 - exclusive gte & lte - valid (min)", &cases.Int32ExGTELTE{Val: 128}, 0},
{"int32 - exclusive gte & lte - invalid", &cases.Int32ExGTELTE{Val: 200}, 1},
{"int32 - ignore_empty gte & lte - valid", &cases.Int32Ignore{Val: 0}, 0},
}
var int64Cases = []TestCase{
{"int64 - none - valid", &cases.Int64None{Val: 123}, 0},
{"int64 - const - valid", &cases.Int64Const{Val: 1}, 0},
{"int64 - const - invalid", &cases.Int64Const{Val: 2}, 1},
{"int64 - in - valid", &cases.Int64In{Val: 3}, 0},
{"int64 - in - invalid", &cases.Int64In{Val: 5}, 1},
{"int64 - not in - valid", &cases.Int64NotIn{Val: 1}, 0},
{"int64 - not in - invalid", &cases.Int64NotIn{Val: 0}, 1},
{"int64 - lt - valid", &cases.Int64LT{Val: -1}, 0},
{"int64 - lt - invalid (equal)", &cases.Int64LT{Val: 0}, 1},
{"int64 - lt - invalid", &cases.Int64LT{Val: 1}, 1},
{"int64 - lte - valid", &cases.Int64LTE{Val: 63}, 0},
{"int64 - lte - valid (equal)", &cases.Int64LTE{Val: 64}, 0},
{"int64 - lte - invalid", &cases.Int64LTE{Val: 65}, 1},
{"int64 - gt - valid", &cases.Int64GT{Val: 17}, 0},
{"int64 - gt - invalid (equal)", &cases.Int64GT{Val: 16}, 1},
{"int64 - gt - invalid", &cases.Int64GT{Val: 15}, 1},
{"int64 - gte - valid", &cases.Int64GTE{Val: 9}, 0},
{"int64 - gte - valid (equal)", &cases.Int64GTE{Val: 8}, 0},
{"int64 - gte - invalid", &cases.Int64GTE{Val: 7}, 1},
{"int64 - gt & lt - valid", &cases.Int64GTLT{Val: 5}, 0},
{"int64 - gt & lt - invalid (above)", &cases.Int64GTLT{Val: 11}, 1},
{"int64 - gt & lt - invalid (below)", &cases.Int64GTLT{Val: -1}, 1},
{"int64 - gt & lt - invalid (max)", &cases.Int64GTLT{Val: 10}, 1},
{"int64 - gt & lt - invalid (min)", &cases.Int64GTLT{Val: 0}, 1},
{"int64 - exclusive gt & lt - valid (above)", &cases.Int64ExLTGT{Val: 11}, 0},
{"int64 - exclusive gt & lt - valid (below)", &cases.Int64ExLTGT{Val: -1}, 0},
{"int64 - exclusive gt & lt - invalid", &cases.Int64ExLTGT{Val: 5}, 1},
{"int64 - exclusive gt & lt - invalid (max)", &cases.Int64ExLTGT{Val: 10}, 1},
{"int64 - exclusive gt & lt - invalid (min)", &cases.Int64ExLTGT{Val: 0}, 1},
{"int64 - gte & lte - valid", &cases.Int64GTELTE{Val: 200}, 0},
{"int64 - gte & lte - valid (max)", &cases.Int64GTELTE{Val: 256}, 0},
{"int64 - gte & lte - valid (min)", &cases.Int64GTELTE{Val: 128}, 0},
{"int64 - gte & lte - invalid (above)", &cases.Int64GTELTE{Val: 300}, 1},
{"int64 - gte & lte - invalid (below)", &cases.Int64GTELTE{Val: 100}, 1},
{"int64 - exclusive gte & lte - valid (above)", &cases.Int64ExGTELTE{Val: 300}, 0},
{"int64 - exclusive gte & lte - valid (below)", &cases.Int64ExGTELTE{Val: 100}, 0},
{"int64 - exclusive gte & lte - valid (max)", &cases.Int64ExGTELTE{Val: 256}, 0},
{"int64 - exclusive gte & lte - valid (min)", &cases.Int64ExGTELTE{Val: 128}, 0},
{"int64 - exclusive gte & lte - invalid", &cases.Int64ExGTELTE{Val: 200}, 1},
{"int64 - ignore_empty gte & lte - valid", &cases.Int64Ignore{Val: 0}, 0},
{"int64 optional - lte - valid", &cases.Int64LTEOptional{Val: &wrapperspb.Int64(63).Value}, 0},
{"int64 optional - lte - valid (equal)", &cases.Int64LTEOptional{Val: &wrapperspb.Int64(64).Value}, 0},
{"int64 optional - lte - valid (unset)", &cases.Int64LTEOptional{}, 0},
}
var uint32Cases = []TestCase{
{"uint32 - none - valid", &cases.UInt32None{Val: 123}, 0},
{"uint32 - const - valid", &cases.UInt32Const{Val: 1}, 0},
{"uint32 - const - invalid", &cases.UInt32Const{Val: 2}, 1},
{"uint32 - in - valid", &cases.UInt32In{Val: 3}, 0},
{"uint32 - in - invalid", &cases.UInt32In{Val: 5}, 1},
{"uint32 - not in - valid", &cases.UInt32NotIn{Val: 1}, 0},
{"uint32 - not in - invalid", &cases.UInt32NotIn{Val: 0}, 1},
{"uint32 - lt - valid", &cases.UInt32LT{Val: 4}, 0},
{"uint32 - lt - invalid (equal)", &cases.UInt32LT{Val: 5}, 1},
{"uint32 - lt - invalid", &cases.UInt32LT{Val: 6}, 1},
{"uint32 - lte - valid", &cases.UInt32LTE{Val: 63}, 0},
{"uint32 - lte - valid (equal)", &cases.UInt32LTE{Val: 64}, 0},
{"uint32 - lte - invalid", &cases.UInt32LTE{Val: 65}, 1},
{"uint32 - gt - valid", &cases.UInt32GT{Val: 17}, 0},
{"uint32 - gt - invalid (equal)", &cases.UInt32GT{Val: 16}, 1},
{"uint32 - gt - invalid", &cases.UInt32GT{Val: 15}, 1},
{"uint32 - gte - valid", &cases.UInt32GTE{Val: 9}, 0},
{"uint32 - gte - valid (equal)", &cases.UInt32GTE{Val: 8}, 0},
{"uint32 - gte - invalid", &cases.UInt32GTE{Val: 7}, 1},
{"uint32 - gt & lt - valid", &cases.UInt32GTLT{Val: 7}, 0},
{"uint32 - gt & lt - invalid (above)", &cases.UInt32GTLT{Val: 11}, 1},
{"uint32 - gt & lt - invalid (below)", &cases.UInt32GTLT{Val: 1}, 1},
{"uint32 - gt & lt - invalid (max)", &cases.UInt32GTLT{Val: 10}, 1},
{"uint32 - gt & lt - invalid (min)", &cases.UInt32GTLT{Val: 5}, 1},
{"uint32 - exclusive gt & lt - valid (above)", &cases.UInt32ExLTGT{Val: 11}, 0},
{"uint32 - exclusive gt & lt - valid (below)", &cases.UInt32ExLTGT{Val: 4}, 0},
{"uint32 - exclusive gt & lt - invalid", &cases.UInt32ExLTGT{Val: 7}, 1},
{"uint32 - exclusive gt & lt - invalid (max)", &cases.UInt32ExLTGT{Val: 10}, 1},
{"uint32 - exclusive gt & lt - invalid (min)", &cases.UInt32ExLTGT{Val: 5}, 1},
{"uint32 - gte & lte - valid", &cases.UInt32GTELTE{Val: 200}, 0},
{"uint32 - gte & lte - valid (max)", &cases.UInt32GTELTE{Val: 256}, 0},
{"uint32 - gte & lte - valid (min)", &cases.UInt32GTELTE{Val: 128}, 0},
{"uint32 - gte & lte - invalid (above)", &cases.UInt32GTELTE{Val: 300}, 1},
{"uint32 - gte & lte - invalid (below)", &cases.UInt32GTELTE{Val: 100}, 1},
{"uint32 - exclusive gte & lte - valid (above)", &cases.UInt32ExGTELTE{Val: 300}, 0},
{"uint32 - exclusive gte & lte - valid (below)", &cases.UInt32ExGTELTE{Val: 100}, 0},
{"uint32 - exclusive gte & lte - valid (max)", &cases.UInt32ExGTELTE{Val: 256}, 0},
{"uint32 - exclusive gte & lte - valid (min)", &cases.UInt32ExGTELTE{Val: 128}, 0},
{"uint32 - exclusive gte & lte - invalid", &cases.UInt32ExGTELTE{Val: 200}, 1},
{"uint32 - ignore_empty gte & lte - valid", &cases.UInt32Ignore{Val: 0}, 0},
}
var uint64Cases = []TestCase{
{"uint64 - none - valid", &cases.UInt64None{Val: 123}, 0},
{"uint64 - const - valid", &cases.UInt64Const{Val: 1}, 0},
{"uint64 - const - invalid", &cases.UInt64Const{Val: 2}, 1},
{"uint64 - in - valid", &cases.UInt64In{Val: 3}, 0},
{"uint64 - in - invalid", &cases.UInt64In{Val: 5}, 1},
{"uint64 - not in - valid", &cases.UInt64NotIn{Val: 1}, 0},
{"uint64 - not in - invalid", &cases.UInt64NotIn{Val: 0}, 1},
{"uint64 - lt - valid", &cases.UInt64LT{Val: 4}, 0},
{"uint64 - lt - invalid (equal)", &cases.UInt64LT{Val: 5}, 1},
{"uint64 - lt - invalid", &cases.UInt64LT{Val: 6}, 1},
{"uint64 - lte - valid", &cases.UInt64LTE{Val: 63}, 0},
{"uint64 - lte - valid (equal)", &cases.UInt64LTE{Val: 64}, 0},
{"uint64 - lte - invalid", &cases.UInt64LTE{Val: 65}, 1},
{"uint64 - gt - valid", &cases.UInt64GT{Val: 17}, 0},
{"uint64 - gt - invalid (equal)", &cases.UInt64GT{Val: 16}, 1},
{"uint64 - gt - invalid", &cases.UInt64GT{Val: 15}, 1},
{"uint64 - gte - valid", &cases.UInt64GTE{Val: 9}, 0},
{"uint64 - gte - valid (equal)", &cases.UInt64GTE{Val: 8}, 0},
{"uint64 - gte - invalid", &cases.UInt64GTE{Val: 7}, 1},
{"uint64 - gt & lt - valid", &cases.UInt64GTLT{Val: 7}, 0},
{"uint64 - gt & lt - invalid (above)", &cases.UInt64GTLT{Val: 11}, 1},
{"uint64 - gt & lt - invalid (below)", &cases.UInt64GTLT{Val: 1}, 1},
{"uint64 - gt & lt - invalid (max)", &cases.UInt64GTLT{Val: 10}, 1},
{"uint64 - gt & lt - invalid (min)", &cases.UInt64GTLT{Val: 5}, 1},
{"uint64 - exclusive gt & lt - valid (above)", &cases.UInt64ExLTGT{Val: 11}, 0},
{"uint64 - exclusive gt & lt - valid (below)", &cases.UInt64ExLTGT{Val: 4}, 0},
{"uint64 - exclusive gt & lt - invalid", &cases.UInt64ExLTGT{Val: 7}, 1},
{"uint64 - exclusive gt & lt - invalid (max)", &cases.UInt64ExLTGT{Val: 10}, 1},
{"uint64 - exclusive gt & lt - invalid (min)", &cases.UInt64ExLTGT{Val: 5}, 1},
{"uint64 - gte & lte - valid", &cases.UInt64GTELTE{Val: 200}, 0},
{"uint64 - gte & lte - valid (max)", &cases.UInt64GTELTE{Val: 256}, 0},
{"uint64 - gte & lte - valid (min)", &cases.UInt64GTELTE{Val: 128}, 0},
{"uint64 - gte & lte - invalid (above)", &cases.UInt64GTELTE{Val: 300}, 1},
{"uint64 - gte & lte - invalid (below)", &cases.UInt64GTELTE{Val: 100}, 1},
{"uint64 - exclusive gte & lte - valid (above)", &cases.UInt64ExGTELTE{Val: 300}, 0},
{"uint64 - exclusive gte & lte - valid (below)", &cases.UInt64ExGTELTE{Val: 100}, 0},
{"uint64 - exclusive gte & lte - valid (max)", &cases.UInt64ExGTELTE{Val: 256}, 0},
{"uint64 - exclusive gte & lte - valid (min)", &cases.UInt64ExGTELTE{Val: 128}, 0},
{"uint64 - exclusive gte & lte - invalid", &cases.UInt64ExGTELTE{Val: 200}, 1},
{"uint64 - ignore_empty gte & lte - valid", &cases.UInt64Ignore{Val: 0}, 0},
}
var sint32Cases = []TestCase{
{"sint32 - none - valid", &cases.SInt32None{Val: 123}, 0},
{"sint32 - const - valid", &cases.SInt32Const{Val: 1}, 0},
{"sint32 - const - invalid", &cases.SInt32Const{Val: 2}, 1},
{"sint32 - in - valid", &cases.SInt32In{Val: 3}, 0},
{"sint32 - in - invalid", &cases.SInt32In{Val: 5}, 1},
{"sint32 - not in - valid", &cases.SInt32NotIn{Val: 1}, 0},
{"sint32 - not in - invalid", &cases.SInt32NotIn{Val: 0}, 1},
{"sint32 - lt - valid", &cases.SInt32LT{Val: -1}, 0},
{"sint32 - lt - invalid (equal)", &cases.SInt32LT{Val: 0}, 1},
{"sint32 - lt - invalid", &cases.SInt32LT{Val: 1}, 1},
{"sint32 - lte - valid", &cases.SInt32LTE{Val: 63}, 0},
{"sint32 - lte - valid (equal)", &cases.SInt32LTE{Val: 64}, 0},
{"sint32 - lte - invalid", &cases.SInt32LTE{Val: 65}, 1},
{"sint32 - gt - valid", &cases.SInt32GT{Val: 17}, 0},
{"sint32 - gt - invalid (equal)", &cases.SInt32GT{Val: 16}, 1},
{"sint32 - gt - invalid", &cases.SInt32GT{Val: 15}, 1},
{"sint32 - gte - valid", &cases.SInt32GTE{Val: 9}, 0},
{"sint32 - gte - valid (equal)", &cases.SInt32GTE{Val: 8}, 0},
{"sint32 - gte - invalid", &cases.SInt32GTE{Val: 7}, 1},
{"sint32 - gt & lt - valid", &cases.SInt32GTLT{Val: 5}, 0},
{"sint32 - gt & lt - invalid (above)", &cases.SInt32GTLT{Val: 11}, 1},
{"sint32 - gt & lt - invalid (below)", &cases.SInt32GTLT{Val: -1}, 1},
{"sint32 - gt & lt - invalid (max)", &cases.SInt32GTLT{Val: 10}, 1},
{"sint32 - gt & lt - invalid (min)", &cases.SInt32GTLT{Val: 0}, 1},
{"sint32 - exclusive gt & lt - valid (above)", &cases.SInt32ExLTGT{Val: 11}, 0},
{"sint32 - exclusive gt & lt - valid (below)", &cases.SInt32ExLTGT{Val: -1}, 0},
{"sint32 - exclusive gt & lt - invalid", &cases.SInt32ExLTGT{Val: 5}, 1},
{"sint32 - exclusive gt & lt - invalid (max)", &cases.SInt32ExLTGT{Val: 10}, 1},
{"sint32 - exclusive gt & lt - invalid (min)", &cases.SInt32ExLTGT{Val: 0}, 1},
{"sint32 - gte & lte - valid", &cases.SInt32GTELTE{Val: 200}, 0},
{"sint32 - gte & lte - valid (max)", &cases.SInt32GTELTE{Val: 256}, 0},
{"sint32 - gte & lte - valid (min)", &cases.SInt32GTELTE{Val: 128}, 0},
{"sint32 - gte & lte - invalid (above)", &cases.SInt32GTELTE{Val: 300}, 1},
{"sint32 - gte & lte - invalid (below)", &cases.SInt32GTELTE{Val: 100}, 1},
{"sint32 - exclusive gte & lte - valid (above)", &cases.SInt32ExGTELTE{Val: 300}, 0},
{"sint32 - exclusive gte & lte - valid (below)", &cases.SInt32ExGTELTE{Val: 100}, 0},
{"sint32 - exclusive gte & lte - valid (max)", &cases.SInt32ExGTELTE{Val: 256}, 0},
{"sint32 - exclusive gte & lte - valid (min)", &cases.SInt32ExGTELTE{Val: 128}, 0},
{"sint32 - exclusive gte & lte - invalid", &cases.SInt32ExGTELTE{Val: 200}, 1},
{"sint32 - ignore_empty gte & lte - valid", &cases.SInt32Ignore{Val: 0}, 0},
}
var sint64Cases = []TestCase{
{"sint64 - none - valid", &cases.SInt64None{Val: 123}, 0},
{"sint64 - const - valid", &cases.SInt64Const{Val: 1}, 0},
{"sint64 - const - invalid", &cases.SInt64Const{Val: 2}, 1},
{"sint64 - in - valid", &cases.SInt64In{Val: 3}, 0},
{"sint64 - in - invalid", &cases.SInt64In{Val: 5}, 1},
{"sint64 - not in - valid", &cases.SInt64NotIn{Val: 1}, 0},
{"sint64 - not in - invalid", &cases.SInt64NotIn{Val: 0}, 1},
{"sint64 - lt - valid", &cases.SInt64LT{Val: -1}, 0},
{"sint64 - lt - invalid (equal)", &cases.SInt64LT{Val: 0}, 1},
{"sint64 - lt - invalid", &cases.SInt64LT{Val: 1}, 1},
{"sint64 - lte - valid", &cases.SInt64LTE{Val: 63}, 0},
{"sint64 - lte - valid (equal)", &cases.SInt64LTE{Val: 64}, 0},
{"sint64 - lte - invalid", &cases.SInt64LTE{Val: 65}, 1},
{"sint64 - gt - valid", &cases.SInt64GT{Val: 17}, 0},
{"sint64 - gt - invalid (equal)", &cases.SInt64GT{Val: 16}, 1},
{"sint64 - gt - invalid", &cases.SInt64GT{Val: 15}, 1},
{"sint64 - gte - valid", &cases.SInt64GTE{Val: 9}, 0},
{"sint64 - gte - valid (equal)", &cases.SInt64GTE{Val: 8}, 0},
{"sint64 - gte - invalid", &cases.SInt64GTE{Val: 7}, 1},
{"sint64 - gt & lt - valid", &cases.SInt64GTLT{Val: 5}, 0},
{"sint64 - gt & lt - invalid (above)", &cases.SInt64GTLT{Val: 11}, 1},
{"sint64 - gt & lt - invalid (below)", &cases.SInt64GTLT{Val: -1}, 1},
{"sint64 - gt & lt - invalid (max)", &cases.SInt64GTLT{Val: 10}, 1},
{"sint64 - gt & lt - invalid (min)", &cases.SInt64GTLT{Val: 0}, 1},
{"sint64 - exclusive gt & lt - valid (above)", &cases.SInt64ExLTGT{Val: 11}, 0},
{"sint64 - exclusive gt & lt - valid (below)", &cases.SInt64ExLTGT{Val: -1}, 0},
{"sint64 - exclusive gt & lt - invalid", &cases.SInt64ExLTGT{Val: 5}, 1},
{"sint64 - exclusive gt & lt - invalid (max)", &cases.SInt64ExLTGT{Val: 10}, 1},
{"sint64 - exclusive gt & lt - invalid (min)", &cases.SInt64ExLTGT{Val: 0}, 1},
{"sint64 - gte & lte - valid", &cases.SInt64GTELTE{Val: 200}, 0},
{"sint64 - gte & lte - valid (max)", &cases.SInt64GTELTE{Val: 256}, 0},
{"sint64 - gte & lte - valid (min)", &cases.SInt64GTELTE{Val: 128}, 0},
{"sint64 - gte & lte - invalid (above)", &cases.SInt64GTELTE{Val: 300}, 1},
{"sint64 - gte & lte - invalid (below)", &cases.SInt64GTELTE{Val: 100}, 1},
{"sint64 - exclusive gte & lte - valid (above)", &cases.SInt64ExGTELTE{Val: 300}, 0},
{"sint64 - exclusive gte & lte - valid (below)", &cases.SInt64ExGTELTE{Val: 100}, 0},
{"sint64 - exclusive gte & lte - valid (max)", &cases.SInt64ExGTELTE{Val: 256}, 0},
{"sint64 - exclusive gte & lte - valid (min)", &cases.SInt64ExGTELTE{Val: 128}, 0},
{"sint64 - exclusive gte & lte - invalid", &cases.SInt64ExGTELTE{Val: 200}, 1},
{"sint64 - ignore_empty gte & lte - valid", &cases.SInt64Ignore{Val: 0}, 0},
}
var fixed32Cases = []TestCase{
{"fixed32 - none - valid", &cases.Fixed32None{Val: 123}, 0},
{"fixed32 - const - valid", &cases.Fixed32Const{Val: 1}, 0},
{"fixed32 - const - invalid", &cases.Fixed32Const{Val: 2}, 1},
{"fixed32 - in - valid", &cases.Fixed32In{Val: 3}, 0},
{"fixed32 - in - invalid", &cases.Fixed32In{Val: 5}, 1},
{"fixed32 - not in - valid", &cases.Fixed32NotIn{Val: 1}, 0},
{"fixed32 - not in - invalid", &cases.Fixed32NotIn{Val: 0}, 1},
{"fixed32 - lt - valid", &cases.Fixed32LT{Val: 4}, 0},
{"fixed32 - lt - invalid (equal)", &cases.Fixed32LT{Val: 5}, 1},
{"fixed32 - lt - invalid", &cases.Fixed32LT{Val: 6}, 1},
{"fixed32 - lte - valid", &cases.Fixed32LTE{Val: 63}, 0},
{"fixed32 - lte - valid (equal)", &cases.Fixed32LTE{Val: 64}, 0},
{"fixed32 - lte - invalid", &cases.Fixed32LTE{Val: 65}, 1},
{"fixed32 - gt - valid", &cases.Fixed32GT{Val: 17}, 0},
{"fixed32 - gt - invalid (equal)", &cases.Fixed32GT{Val: 16}, 1},
{"fixed32 - gt - invalid", &cases.Fixed32GT{Val: 15}, 1},
{"fixed32 - gte - valid", &cases.Fixed32GTE{Val: 9}, 0},
{"fixed32 - gte - valid (equal)", &cases.Fixed32GTE{Val: 8}, 0},
{"fixed32 - gte - invalid", &cases.Fixed32GTE{Val: 7}, 1},
{"fixed32 - gt & lt - valid", &cases.Fixed32GTLT{Val: 7}, 0},
{"fixed32 - gt & lt - invalid (above)", &cases.Fixed32GTLT{Val: 11}, 1},
{"fixed32 - gt & lt - invalid (below)", &cases.Fixed32GTLT{Val: 1}, 1},
{"fixed32 - gt & lt - invalid (max)", &cases.Fixed32GTLT{Val: 10}, 1},
{"fixed32 - gt & lt - invalid (min)", &cases.Fixed32GTLT{Val: 5}, 1},
{"fixed32 - exclusive gt & lt - valid (above)", &cases.Fixed32ExLTGT{Val: 11}, 0},
{"fixed32 - exclusive gt & lt - valid (below)", &cases.Fixed32ExLTGT{Val: 4}, 0},
{"fixed32 - exclusive gt & lt - invalid", &cases.Fixed32ExLTGT{Val: 7}, 1},
{"fixed32 - exclusive gt & lt - invalid (max)", &cases.Fixed32ExLTGT{Val: 10}, 1},
{"fixed32 - exclusive gt & lt - invalid (min)", &cases.Fixed32ExLTGT{Val: 5}, 1},
{"fixed32 - gte & lte - valid", &cases.Fixed32GTELTE{Val: 200}, 0},
{"fixed32 - gte & lte - valid (max)", &cases.Fixed32GTELTE{Val: 256}, 0},
{"fixed32 - gte & lte - valid (min)", &cases.Fixed32GTELTE{Val: 128}, 0},
{"fixed32 - gte & lte - invalid (above)", &cases.Fixed32GTELTE{Val: 300}, 1},
{"fixed32 - gte & lte - invalid (below)", &cases.Fixed32GTELTE{Val: 100}, 1},
{"fixed32 - exclusive gte & lte - valid (above)", &cases.Fixed32ExGTELTE{Val: 300}, 0},
{"fixed32 - exclusive gte & lte - valid (below)", &cases.Fixed32ExGTELTE{Val: 100}, 0},
{"fixed32 - exclusive gte & lte - valid (max)", &cases.Fixed32ExGTELTE{Val: 256}, 0},
{"fixed32 - exclusive gte & lte - valid (min)", &cases.Fixed32ExGTELTE{Val: 128}, 0},
{"fixed32 - exclusive gte & lte - invalid", &cases.Fixed32ExGTELTE{Val: 200}, 1},
{"fixed32 - ignore_empty gte & lte - valid", &cases.Fixed32Ignore{Val: 0}, 0},
}
var fixed64Cases = []TestCase{
{"fixed64 - none - valid", &cases.Fixed64None{Val: 123}, 0},
{"fixed64 - const - valid", &cases.Fixed64Const{Val: 1}, 0},
{"fixed64 - const - invalid", &cases.Fixed64Const{Val: 2}, 1},
{"fixed64 - in - valid", &cases.Fixed64In{Val: 3}, 0},
{"fixed64 - in - invalid", &cases.Fixed64In{Val: 5}, 1},
{"fixed64 - not in - valid", &cases.Fixed64NotIn{Val: 1}, 0},
{"fixed64 - not in - invalid", &cases.Fixed64NotIn{Val: 0}, 1},
{"fixed64 - lt - valid", &cases.Fixed64LT{Val: 4}, 0},
{"fixed64 - lt - invalid (equal)", &cases.Fixed64LT{Val: 5}, 1},
{"fixed64 - lt - invalid", &cases.Fixed64LT{Val: 6}, 1},
{"fixed64 - lte - valid", &cases.Fixed64LTE{Val: 63}, 0},
{"fixed64 - lte - valid (equal)", &cases.Fixed64LTE{Val: 64}, 0},
{"fixed64 - lte - invalid", &cases.Fixed64LTE{Val: 65}, 1},
{"fixed64 - gt - valid", &cases.Fixed64GT{Val: 17}, 0},
{"fixed64 - gt - invalid (equal)", &cases.Fixed64GT{Val: 16}, 1},
{"fixed64 - gt - invalid", &cases.Fixed64GT{Val: 15}, 1},
{"fixed64 - gte - valid", &cases.Fixed64GTE{Val: 9}, 0},
{"fixed64 - gte - valid (equal)", &cases.Fixed64GTE{Val: 8}, 0},
{"fixed64 - gte - invalid", &cases.Fixed64GTE{Val: 7}, 1},
{"fixed64 - gt & lt - valid", &cases.Fixed64GTLT{Val: 7}, 0},
{"fixed64 - gt & lt - invalid (above)", &cases.Fixed64GTLT{Val: 11}, 1},
{"fixed64 - gt & lt - invalid (below)", &cases.Fixed64GTLT{Val: 1}, 1},
{"fixed64 - gt & lt - invalid (max)", &cases.Fixed64GTLT{Val: 10}, 1},
{"fixed64 - gt & lt - invalid (min)", &cases.Fixed64GTLT{Val: 5}, 1},
{"fixed64 - exclusive gt & lt - valid (above)", &cases.Fixed64ExLTGT{Val: 11}, 0},
{"fixed64 - exclusive gt & lt - valid (below)", &cases.Fixed64ExLTGT{Val: 4}, 0},
{"fixed64 - exclusive gt & lt - invalid", &cases.Fixed64ExLTGT{Val: 7}, 1},
{"fixed64 - exclusive gt & lt - invalid (max)", &cases.Fixed64ExLTGT{Val: 10}, 1},
{"fixed64 - exclusive gt & lt - invalid (min)", &cases.Fixed64ExLTGT{Val: 5}, 1},
{"fixed64 - gte & lte - valid", &cases.Fixed64GTELTE{Val: 200}, 0},
{"fixed64 - gte & lte - valid (max)", &cases.Fixed64GTELTE{Val: 256}, 0},
{"fixed64 - gte & lte - valid (min)", &cases.Fixed64GTELTE{Val: 128}, 0},
{"fixed64 - gte & lte - invalid (above)", &cases.Fixed64GTELTE{Val: 300}, 1},
{"fixed64 - gte & lte - invalid (below)", &cases.Fixed64GTELTE{Val: 100}, 1},
{"fixed64 - exclusive gte & lte - valid (above)", &cases.Fixed64ExGTELTE{Val: 300}, 0},
{"fixed64 - exclusive gte & lte - valid (below)", &cases.Fixed64ExGTELTE{Val: 100}, 0},
{"fixed64 - exclusive gte & lte - valid (max)", &cases.Fixed64ExGTELTE{Val: 256}, 0},
{"fixed64 - exclusive gte & lte - valid (min)", &cases.Fixed64ExGTELTE{Val: 128}, 0},
{"fixed64 - exclusive gte & lte - invalid", &cases.Fixed64ExGTELTE{Val: 200}, 1},
{"fixed64 - ignore_empty gte & lte - valid", &cases.Fixed64Ignore{Val: 0}, 0},
}
var sfixed32Cases = []TestCase{
{"sfixed32 - none - valid", &cases.SFixed32None{Val: 123}, 0},
{"sfixed32 - const - valid", &cases.SFixed32Const{Val: 1}, 0},
{"sfixed32 - const - invalid", &cases.SFixed32Const{Val: 2}, 1},
{"sfixed32 - in - valid", &cases.SFixed32In{Val: 3}, 0},
{"sfixed32 - in - invalid", &cases.SFixed32In{Val: 5}, 1},
{"sfixed32 - not in - valid", &cases.SFixed32NotIn{Val: 1}, 0},
{"sfixed32 - not in - invalid", &cases.SFixed32NotIn{Val: 0}, 1},
{"sfixed32 - lt - valid", &cases.SFixed32LT{Val: -1}, 0},
{"sfixed32 - lt - invalid (equal)", &cases.SFixed32LT{Val: 0}, 1},
{"sfixed32 - lt - invalid", &cases.SFixed32LT{Val: 1}, 1},
{"sfixed32 - lte - valid", &cases.SFixed32LTE{Val: 63}, 0},
{"sfixed32 - lte - valid (equal)", &cases.SFixed32LTE{Val: 64}, 0},
{"sfixed32 - lte - invalid", &cases.SFixed32LTE{Val: 65}, 1},
{"sfixed32 - gt - valid", &cases.SFixed32GT{Val: 17}, 0},
{"sfixed32 - gt - invalid (equal)", &cases.SFixed32GT{Val: 16}, 1},
{"sfixed32 - gt - invalid", &cases.SFixed32GT{Val: 15}, 1},
{"sfixed32 - gte - valid", &cases.SFixed32GTE{Val: 9}, 0},
{"sfixed32 - gte - valid (equal)", &cases.SFixed32GTE{Val: 8}, 0},
{"sfixed32 - gte - invalid", &cases.SFixed32GTE{Val: 7}, 1},
{"sfixed32 - gt & lt - valid", &cases.SFixed32GTLT{Val: 5}, 0},
{"sfixed32 - gt & lt - invalid (above)", &cases.SFixed32GTLT{Val: 11}, 1},
{"sfixed32 - gt & lt - invalid (below)", &cases.SFixed32GTLT{Val: -1}, 1},
{"sfixed32 - gt & lt - invalid (max)", &cases.SFixed32GTLT{Val: 10}, 1},
{"sfixed32 - gt & lt - invalid (min)", &cases.SFixed32GTLT{Val: 0}, 1},
{"sfixed32 - exclusive gt & lt - valid (above)", &cases.SFixed32ExLTGT{Val: 11}, 0},
{"sfixed32 - exclusive gt & lt - valid (below)", &cases.SFixed32ExLTGT{Val: -1}, 0},
{"sfixed32 - exclusive gt & lt - invalid", &cases.SFixed32ExLTGT{Val: 5}, 1},
{"sfixed32 - exclusive gt & lt - invalid (max)", &cases.SFixed32ExLTGT{Val: 10}, 1},
{"sfixed32 - exclusive gt & lt - invalid (min)", &cases.SFixed32ExLTGT{Val: 0}, 1},
{"sfixed32 - gte & lte - valid", &cases.SFixed32GTELTE{Val: 200}, 0},
{"sfixed32 - gte & lte - valid (max)", &cases.SFixed32GTELTE{Val: 256}, 0},
{"sfixed32 - gte & lte - valid (min)", &cases.SFixed32GTELTE{Val: 128}, 0},
{"sfixed32 - gte & lte - invalid (above)", &cases.SFixed32GTELTE{Val: 300}, 1},
{"sfixed32 - gte & lte - invalid (below)", &cases.SFixed32GTELTE{Val: 100}, 1},
{"sfixed32 - exclusive gte & lte - valid (above)", &cases.SFixed32ExGTELTE{Val: 300}, 0},
{"sfixed32 - exclusive gte & lte - valid (below)", &cases.SFixed32ExGTELTE{Val: 100}, 0},
{"sfixed32 - exclusive gte & lte - valid (max)", &cases.SFixed32ExGTELTE{Val: 256}, 0},
{"sfixed32 - exclusive gte & lte - valid (min)", &cases.SFixed32ExGTELTE{Val: 128}, 0},
{"sfixed32 - exclusive gte & lte - invalid", &cases.SFixed32ExGTELTE{Val: 200}, 1},
{"sfixed32 - ignore_empty gte & lte - valid", &cases.SFixed32Ignore{Val: 0}, 0},
}
var sfixed64Cases = []TestCase{
{"sfixed64 - none - valid", &cases.SFixed64None{Val: 123}, 0},
{"sfixed64 - const - valid", &cases.SFixed64Const{Val: 1}, 0},
{"sfixed64 - const - invalid", &cases.SFixed64Const{Val: 2}, 1},
{"sfixed64 - in - valid", &cases.SFixed64In{Val: 3}, 0},
{"sfixed64 - in - invalid", &cases.SFixed64In{Val: 5}, 1},
{"sfixed64 - not in - valid", &cases.SFixed64NotIn{Val: 1}, 0},
{"sfixed64 - not in - invalid", &cases.SFixed64NotIn{Val: 0}, 1},
{"sfixed64 - lt - valid", &cases.SFixed64LT{Val: -1}, 0},
{"sfixed64 - lt - invalid (equal)", &cases.SFixed64LT{Val: 0}, 1},
{"sfixed64 - lt - invalid", &cases.SFixed64LT{Val: 1}, 1},
{"sfixed64 - lte - valid", &cases.SFixed64LTE{Val: 63}, 0},
{"sfixed64 - lte - valid (equal)", &cases.SFixed64LTE{Val: 64}, 0},
{"sfixed64 - lte - invalid", &cases.SFixed64LTE{Val: 65}, 1},
{"sfixed64 - gt - valid", &cases.SFixed64GT{Val: 17}, 0},
{"sfixed64 - gt - invalid (equal)", &cases.SFixed64GT{Val: 16}, 1},
{"sfixed64 - gt - invalid", &cases.SFixed64GT{Val: 15}, 1},
{"sfixed64 - gte - valid", &cases.SFixed64GTE{Val: 9}, 0},
{"sfixed64 - gte - valid (equal)", &cases.SFixed64GTE{Val: 8}, 0},
{"sfixed64 - gte - invalid", &cases.SFixed64GTE{Val: 7}, 1},
{"sfixed64 - gt & lt - valid", &cases.SFixed64GTLT{Val: 5}, 0},
{"sfixed64 - gt & lt - invalid (above)", &cases.SFixed64GTLT{Val: 11}, 1},
{"sfixed64 - gt & lt - invalid (below)", &cases.SFixed64GTLT{Val: -1}, 1},
{"sfixed64 - gt & lt - invalid (max)", &cases.SFixed64GTLT{Val: 10}, 1},
{"sfixed64 - gt & lt - invalid (min)", &cases.SFixed64GTLT{Val: 0}, 1},
{"sfixed64 - exclusive gt & lt - valid (above)", &cases.SFixed64ExLTGT{Val: 11}, 0},
{"sfixed64 - exclusive gt & lt - valid (below)", &cases.SFixed64ExLTGT{Val: -1}, 0},
{"sfixed64 - exclusive gt & lt - invalid", &cases.SFixed64ExLTGT{Val: 5}, 1},
{"sfixed64 - exclusive gt & lt - invalid (max)", &cases.SFixed64ExLTGT{Val: 10}, 1},
{"sfixed64 - exclusive gt & lt - invalid (min)", &cases.SFixed64ExLTGT{Val: 0}, 1},
{"sfixed64 - gte & lte - valid", &cases.SFixed64GTELTE{Val: 200}, 0},
{"sfixed64 - gte & lte - valid (max)", &cases.SFixed64GTELTE{Val: 256}, 0},
{"sfixed64 - gte & lte - valid (min)", &cases.SFixed64GTELTE{Val: 128}, 0},
{"sfixed64 - gte & lte - invalid (above)", &cases.SFixed64GTELTE{Val: 300}, 1},
{"sfixed64 - gte & lte - invalid (below)", &cases.SFixed64GTELTE{Val: 100}, 1},
{"sfixed64 - exclusive gte & lte - valid (above)", &cases.SFixed64ExGTELTE{Val: 300}, 0},
{"sfixed64 - exclusive gte & lte - valid (below)", &cases.SFixed64ExGTELTE{Val: 100}, 0},
{"sfixed64 - exclusive gte & lte - valid (max)", &cases.SFixed64ExGTELTE{Val: 256}, 0},
{"sfixed64 - exclusive gte & lte - valid (min)", &cases.SFixed64ExGTELTE{Val: 128}, 0},
{"sfixed64 - exclusive gte & lte - invalid", &cases.SFixed64ExGTELTE{Val: 200}, 1},
{"sfixed64 - ignore_empty gte & lte - valid", &cases.SFixed64Ignore{Val: 0}, 0},
}
var boolCases = []TestCase{
{"bool - none - valid", &cases.BoolNone{Val: true}, 0},
{"bool - const (true) - valid", &cases.BoolConstTrue{Val: true}, 0},
{"bool - const (true) - invalid", &cases.BoolConstTrue{Val: false}, 1},
{"bool - const (false) - valid", &cases.BoolConstFalse{Val: false}, 0},
{"bool - const (false) - invalid", &cases.BoolConstFalse{Val: true}, 1},
}
var stringCases = []TestCase{
{"string - none - valid", &cases.StringNone{Val: "quux"}, 0},
{"string - const - valid", &cases.StringConst{Val: "foo"}, 0},
{"string - const - invalid", &cases.StringConst{Val: "bar"}, 1},
{"string - in - valid", &cases.StringIn{Val: "bar"}, 0},
{"string - in - invalid", &cases.StringIn{Val: "quux"}, 1},
{"string - not in - valid", &cases.StringNotIn{Val: "quux"}, 0},
{"string - not in - invalid", &cases.StringNotIn{Val: "fizz"}, 1},
{"string - len - valid", &cases.StringLen{Val: "baz"}, 0},
{"string - len - valid (multibyte)", &cases.StringLen{Val: "你好吖"}, 0},
{"string - len - invalid (lt)", &cases.StringLen{Val: "go"}, 1},
{"string - len - invalid (gt)", &cases.StringLen{Val: "fizz"}, 1},
{"string - len - invalid (multibyte)", &cases.StringLen{Val: "你好"}, 1},
{"string - min len - valid", &cases.StringMinLen{Val: "protoc"}, 0},
{"string - min len - valid (min)", &cases.StringMinLen{Val: "baz"}, 0},
{"string - min len - invalid", &cases.StringMinLen{Val: "go"}, 1},
{"string - min len - invalid (multibyte)", &cases.StringMinLen{Val: "你好"}, 1},
{"string - max len - valid", &cases.StringMaxLen{Val: "foo"}, 0},
{"string - max len - valid (max)", &cases.StringMaxLen{Val: "proto"}, 0},
{"string - max len - valid (multibyte)", &cases.StringMaxLen{Val: "你好你好"}, 0},
{"string - max len - invalid", &cases.StringMaxLen{Val: "1234567890"}, 1},
{"string - min/max len - valid", &cases.StringMinMaxLen{Val: "quux"}, 0},
{"string - min/max len - valid (min)", &cases.StringMinMaxLen{Val: "foo"}, 0},
{"string - min/max len - valid (max)", &cases.StringMinMaxLen{Val: "proto"}, 0},
{"string - min/max len - valid (multibyte)", &cases.StringMinMaxLen{Val: "你好你好"}, 0},
{"string - min/max len - invalid (below)", &cases.StringMinMaxLen{Val: "go"}, 1},
{"string - min/max len - invalid (above)", &cases.StringMinMaxLen{Val: "validate"}, 1},
{"string - equal min/max len - valid", &cases.StringEqualMinMaxLen{Val: "proto"}, 0},
{"string - equal min/max len - invalid", &cases.StringEqualMinMaxLen{Val: "validate"}, 1},
{"string - len bytes - valid", &cases.StringLenBytes{Val: "pace"}, 0},
{"string - len bytes - invalid (lt)", &cases.StringLenBytes{Val: "val"}, 1},
{"string - len bytes - invalid (gt)", &cases.StringLenBytes{Val: "world"}, 1},
{"string - len bytes - invalid (multibyte)", &cases.StringLenBytes{Val: "世界和平"}, 1},
{"string - min bytes - valid", &cases.StringMinBytes{Val: "proto"}, 0},
{"string - min bytes - valid (min)", &cases.StringMinBytes{Val: "quux"}, 0},
{"string - min bytes - valid (multibyte)", &cases.StringMinBytes{Val: "你好"}, 0},
{"string - min bytes - invalid", &cases.StringMinBytes{Val: ""}, 1},
{"string - max bytes - valid", &cases.StringMaxBytes{Val: "foo"}, 0},
{"string - max bytes - valid (max)", &cases.StringMaxBytes{Val: "12345678"}, 0},
{"string - max bytes - invalid", &cases.StringMaxBytes{Val: "123456789"}, 1},
{"string - max bytes - invalid (multibyte)", &cases.StringMaxBytes{Val: "你好你好你好"}, 1},
{"string - min/max bytes - valid", &cases.StringMinMaxBytes{Val: "protoc"}, 0},
{"string - min/max bytes - valid (min)", &cases.StringMinMaxBytes{Val: "quux"}, 0},
{"string - min/max bytes - valid (max)", &cases.StringMinMaxBytes{Val: "fizzbuzz"}, 0},
{"string - min/max bytes - valid (multibyte)", &cases.StringMinMaxBytes{Val: "你好"}, 0},
{"string - min/max bytes - invalid (below)", &cases.StringMinMaxBytes{Val: "foo"}, 1},
{"string - min/max bytes - invalid (above)", &cases.StringMinMaxBytes{Val: "你好你好你"}, 1},
{"string - equal min/max bytes - valid", &cases.StringEqualMinMaxBytes{Val: "protoc"}, 0},
{"string - equal min/max bytes - invalid", &cases.StringEqualMinMaxBytes{Val: "foo"}, 1},
{"string - pattern - valid", &cases.StringPattern{Val: "Foo123"}, 0},
{"string - pattern - invalid", &cases.StringPattern{Val: "!@#$%^&*()"}, 1},
{"string - pattern - invalid (empty)", &cases.StringPattern{Val: ""}, 1},
{"string - pattern - invalid (null)", &cases.StringPattern{Val: "a\000"}, 1},
{"string - pattern (escapes) - valid", &cases.StringPatternEscapes{Val: "* \\ x"}, 0},
{"string - pattern (escapes) - invalid", &cases.StringPatternEscapes{Val: "invalid"}, 1},
{"string - pattern (escapes) - invalid (empty)", &cases.StringPatternEscapes{Val: ""}, 1},
{"string - prefix - valid", &cases.StringPrefix{Val: "foobar"}, 0},
{"string - prefix - valid (only)", &cases.StringPrefix{Val: "foo"}, 0},
{"string - prefix - invalid", &cases.StringPrefix{Val: "bar"}, 1},
{"string - prefix - invalid (case-sensitive)", &cases.StringPrefix{Val: "Foobar"}, 1},
{"string - contains - valid", &cases.StringContains{Val: "candy bars"}, 0},
{"string - contains - valid (only)", &cases.StringContains{Val: "bar"}, 0},
{"string - contains - invalid", &cases.StringContains{Val: "candy bazs"}, 1},
{"string - contains - invalid (case-sensitive)", &cases.StringContains{Val: "Candy Bars"}, 1},
{"string - not contains - valid", &cases.StringNotContains{Val: "candy bazs"}, 0},
{"string - not contains - valid (case-sensitive)", &cases.StringNotContains{Val: "Candy Bars"}, 0},
{"string - not contains - invalid", &cases.StringNotContains{Val: "candy bars"}, 1},
{"string - not contains - invalid (equal)", &cases.StringNotContains{Val: "bar"}, 1},
{"string - suffix - valid", &cases.StringSuffix{Val: "foobaz"}, 0},
{"string - suffix - valid (only)", &cases.StringSuffix{Val: "baz"}, 0},
{"string - suffix - invalid", &cases.StringSuffix{Val: "foobar"}, 1},
{"string - suffix - invalid (case-sensitive)", &cases.StringSuffix{Val: "FooBaz"}, 1},
{"string - email - valid", &cases.StringEmail{Val: "foo@bar.com"}, 0},
{"string - email - valid (name)", &cases.StringEmail{Val: "John Smith <foo@bar.com>"}, 0},
{"string - email - invalid", &cases.StringEmail{Val: "foobar"}, 1},
{"string - email - invalid (local segment too long)", &cases.StringEmail{Val: "x0123456789012345678901234567890123456789012345678901234567890123456789@example.com"}, 1},
{"string - email - invalid (hostname too long)", &cases.StringEmail{Val: "foo@x0123456789012345678901234567890123456789012345678901234567890123456789.com"}, 1},
{"string - email - invalid (bad hostname)", &cases.StringEmail{Val: "foo@-bar.com"}, 1},
{"string - email - empty", &cases.StringEmail{Val: ""}, 1},
{"string - address - valid hostname", &cases.StringAddress{Val: "example.com"}, 0},
{"string - address - valid hostname (uppercase)", &cases.StringAddress{Val: "ASD.example.com"}, 0},
{"string - address - valid hostname (hyphens)", &cases.StringAddress{Val: "foo-bar.com"}, 0},
{"string - address - valid hostname (trailing dot)", &cases.StringAddress{Val: "example.com."}, 0},
{"string - address - invalid hostname", &cases.StringAddress{Val: "!@#$%^&"}, 1},
{"string - address - invalid hostname (underscore)", &cases.StringAddress{Val: "foo_bar.com"}, 1},
{"string - address - invalid hostname (too long)", &cases.StringAddress{Val: "x0123456789012345678901234567890123456789012345678901234567890123456789.com"}, 1},
{"string - address - invalid hostname (trailing hyphens)", &cases.StringAddress{Val: "foo-bar-.com"}, 1},
{"string - address - invalid hostname (leading hyphens)", &cases.StringAddress{Val: "foo-bar.-com"}, 1},
{"string - address - invalid hostname (empty)", &cases.StringAddress{Val: "asd..asd.com"}, 1},
{"string - address - invalid hostname (IDNs)", &cases.StringAddress{Val: "你好.com"}, 1},
{"string - address - valid ip (v4)", &cases.StringAddress{Val: "192.168.0.1"}, 0},
{"string - address - valid ip (v6)", &cases.StringAddress{Val: "3e::99"}, 0},
{"string - address - invalid ip", &cases.StringAddress{Val: "ff::fff::0b"}, 1},
{"string - hostname - valid", &cases.StringHostname{Val: "example.com"}, 0},
{"string - hostname - valid (uppercase)", &cases.StringHostname{Val: "ASD.example.com"}, 0},
{"string - hostname - valid (hyphens)", &cases.StringHostname{Val: "foo-bar.com"}, 0},
{"string - hostname - valid (trailing dot)", &cases.StringHostname{Val: "example.com."}, 0},
{"string - hostname - invalid", &cases.StringHostname{Val: "!@#$%^&"}, 1},
{"string - hostname - invalid (underscore)", &cases.StringHostname{Val: "foo_bar.com"}, 1},
{"string - hostname - invalid (too long)", &cases.StringHostname{Val: "x0123456789012345678901234567890123456789012345678901234567890123456789.com"}, 1},
{"string - hostname - invalid (trailing hyphens)", &cases.StringHostname{Val: "foo-bar-.com"}, 1},
{"string - hostname - invalid (leading hyphens)", &cases.StringHostname{Val: "foo-bar.-com"}, 1},
{"string - hostname - invalid (empty)", &cases.StringHostname{Val: "asd..asd.com"}, 1},
{"string - hostname - invalid (IDNs)", &cases.StringHostname{Val: "你好.com"}, 1},
{"string - IP - valid (v4)", &cases.StringIP{Val: "192.168.0.1"}, 0},
{"string - IP - valid (v6)", &cases.StringIP{Val: "3e::99"}, 0},
{"string - IP - invalid", &cases.StringIP{Val: "foobar"}, 1},
{"string - IPv4 - valid", &cases.StringIPv4{Val: "192.168.0.1"}, 0},
{"string - IPv4 - invalid", &cases.StringIPv4{Val: "foobar"}, 1},
{"string - IPv4 - invalid (erroneous)", &cases.StringIPv4{Val: "256.0.0.0"}, 1},
{"string - IPv4 - invalid (v6)", &cases.StringIPv4{Val: "3e::99"}, 1},
{"string - IPv6 - valid", &cases.StringIPv6{Val: "2001:0db8:85a3:0000:0000:8a2e:0370:7334"}, 0},
{"string - IPv6 - valid (collapsed)", &cases.StringIPv6{Val: "2001:db8:85a3::8a2e:370:7334"}, 0},
{"string - IPv6 - invalid", &cases.StringIPv6{Val: "foobar"}, 1},
{"string - IPv6 - invalid (v4)", &cases.StringIPv6{Val: "192.168.0.1"}, 1},
{"string - IPv6 - invalid (erroneous)", &cases.StringIPv6{Val: "ff::fff::0b"}, 1},
{"string - URI - valid", &cases.StringURI{Val: "http://example.com/foo/bar?baz=quux"}, 0},
{"string - URI - invalid", &cases.StringURI{Val: "!@#$%^&*%$#"}, 1},
{"string - URI - invalid (relative)", &cases.StringURI{Val: "/foo/bar?baz=quux"}, 1},
{"string - URI - valid", &cases.StringURIRef{Val: "http://example.com/foo/bar?baz=quux"}, 0},
{"string - URI - valid (relative)", &cases.StringURIRef{Val: "/foo/bar?baz=quux"}, 0},
{"string - URI - invalid", &cases.StringURIRef{Val: "!@#$%^&*%$#"}, 1},
{"string - UUID - valid (nil)", &cases.StringUUID{Val: "00000000-0000-0000-0000-000000000000"}, 0},
{"string - UUID - valid (v1)", &cases.StringUUID{Val: "b45c0c80-8880-11e9-a5b1-000000000000"}, 0},
{"string - UUID - valid (v1 - case-insensitive)", &cases.StringUUID{Val: "B45C0C80-8880-11E9-A5B1-000000000000"}, 0},
{"string - UUID - valid (v2)", &cases.StringUUID{Val: "b45c0c80-8880-21e9-a5b1-000000000000"}, 0},
{"string - UUID - valid (v2 - case-insensitive)", &cases.StringUUID{Val: "B45C0C80-8880-21E9-A5B1-000000000000"}, 0},
{"string - UUID - valid (v3)", &cases.StringUUID{Val: "a3bb189e-8bf9-3888-9912-ace4e6543002"}, 0},
{"string - UUID - valid (v3 - case-insensitive)", &cases.StringUUID{Val: "A3BB189E-8BF9-3888-9912-ACE4E6543002"}, 0},
{"string - UUID - valid (v4)", &cases.StringUUID{Val: "8b208305-00e8-4460-a440-5e0dcd83bb0a"}, 0},
{"string - UUID - valid (v4 - case-insensitive)", &cases.StringUUID{Val: "8B208305-00E8-4460-A440-5E0DCD83BB0A"}, 0},
{"string - UUID - valid (v5)", &cases.StringUUID{Val: "a6edc906-2f9f-5fb2-a373-efac406f0ef2"}, 0},
{"string - UUID - valid (v5 - case-insensitive)", &cases.StringUUID{Val: "A6EDC906-2F9F-5FB2-A373-EFAC406F0EF2"}, 0},
{"string - UUID - invalid", &cases.StringUUID{Val: "foobar"}, 1},
{"string - UUID - invalid (bad UUID)", &cases.StringUUID{Val: "ffffffff-ffff-ffff-ffff-fffffffffffff"}, 1},
{"string - UUID - valid (ignore_empty)", &cases.StringUUIDIgnore{Val: ""}, 0},
{"string - http header name - valid", &cases.StringHttpHeaderName{Val: "clustername"}, 0},
{"string - http header name - valid", &cases.StringHttpHeaderName{Val: ":path"}, 0},
{"string - http header name - valid (nums)", &cases.StringHttpHeaderName{Val: "cluster-123"}, 0},
{"string - http header name - valid (special token)", &cases.StringHttpHeaderName{Val: "!+#&.%"}, 0},
{"string - http header name - valid (period)", &cases.StringHttpHeaderName{Val: "CLUSTER.NAME"}, 0},
{"string - http header name - invalid", &cases.StringHttpHeaderName{Val: ":"}, 1},
{"string - http header name - invalid", &cases.StringHttpHeaderName{Val: ":path:"}, 1},
{"string - http header name - invalid (space)", &cases.StringHttpHeaderName{Val: "cluster name"}, 1},
{"string - http header name - invalid (return)", &cases.StringHttpHeaderName{Val: "example\r"}, 1},
{"string - http header name - invalid (tab)", &cases.StringHttpHeaderName{Val: "example\t"}, 1},
{"string - http header name - invalid (slash)", &cases.StringHttpHeaderName{Val: "/test/long/url"}, 1},
{"string - http header value - valid", &cases.StringHttpHeaderValue{Val: "cluster.name.123"}, 0},
{"string - http header value - valid (uppercase)", &cases.StringHttpHeaderValue{Val: "/TEST/LONG/URL"}, 0},
{"string - http header value - valid (spaces)", &cases.StringHttpHeaderValue{Val: "cluster name"}, 0},
{"string - http header value - valid (tab)", &cases.StringHttpHeaderValue{Val: "example\t"}, 0},
{"string - http header value - valid (special token)", &cases.StringHttpHeaderValue{Val: "!#%&./+"}, 0},
{"string - http header value - invalid (NUL)", &cases.StringHttpHeaderValue{Val: "foo\u0000bar"}, 1},
{"string - http header value - invalid (DEL)", &cases.StringHttpHeaderValue{Val: "\u007f"}, 1},
{"string - http header value - invalid", &cases.StringHttpHeaderValue{Val: "example\r"}, 1},
{"string - non-strict valid header - valid", &cases.StringValidHeader{Val: "cluster.name.123"}, 0},
{"string - non-strict valid header - valid (uppercase)", &cases.StringValidHeader{Val: "/TEST/LONG/URL"}, 0},
{"string - non-strict valid header - valid (spaces)", &cases.StringValidHeader{Val: "cluster name"}, 0},
{"string - non-strict valid header - valid (tab)", &cases.StringValidHeader{Val: "example\t"}, 0},
{"string - non-strict valid header - valid (DEL)", &cases.StringValidHeader{Val: "\u007f"}, 0},
{"string - non-strict valid header - invalid (NUL)", &cases.StringValidHeader{Val: "foo\u0000bar"}, 1},
{"string - non-strict valid header - invalid (CR)", &cases.StringValidHeader{Val: "example\r"}, 1},
{"string - non-strict valid header - invalid (NL)", &cases.StringValidHeader{Val: "exa\u000Ample"}, 1},
}
var bytesCases = []TestCase{
{"bytes - none - valid", &cases.BytesNone{Val: []byte("quux")}, 0},
{"bytes - const - valid", &cases.BytesConst{Val: []byte("foo")}, 0},
{"bytes - const - invalid", &cases.BytesConst{Val: []byte("bar")}, 1},
{"bytes - in - valid", &cases.BytesIn{Val: []byte("bar")}, 0},
{"bytes - in - invalid", &cases.BytesIn{Val: []byte("quux")}, 1},
{"bytes - not in - valid", &cases.BytesNotIn{Val: []byte("quux")}, 0},
{"bytes - not in - invalid", &cases.BytesNotIn{Val: []byte("fizz")}, 1},
{"bytes - len - valid", &cases.BytesLen{Val: []byte("baz")}, 0},
{"bytes - len - invalid (lt)", &cases.BytesLen{Val: []byte("go")}, 1},
{"bytes - len - invalid (gt)", &cases.BytesLen{Val: []byte("fizz")}, 1},
{"bytes - min len - valid", &cases.BytesMinLen{Val: []byte("fizz")}, 0},
{"bytes - min len - valid (min)", &cases.BytesMinLen{Val: []byte("baz")}, 0},
{"bytes - min len - invalid", &cases.BytesMinLen{Val: []byte("go")}, 1},
{"bytes - max len - valid", &cases.BytesMaxLen{Val: []byte("foo")}, 0},
{"bytes - max len - valid (max)", &cases.BytesMaxLen{Val: []byte("proto")}, 0},
{"bytes - max len - invalid", &cases.BytesMaxLen{Val: []byte("1234567890")}, 1},
{"bytes - min/max len - valid", &cases.BytesMinMaxLen{Val: []byte("quux")}, 0},
{"bytes - min/max len - valid (min)", &cases.BytesMinMaxLen{Val: []byte("foo")}, 0},
{"bytes - min/max len - valid (max)", &cases.BytesMinMaxLen{Val: []byte("proto")}, 0},
{"bytes - min/max len - invalid (below)", &cases.BytesMinMaxLen{Val: []byte("go")}, 1},
{"bytes - min/max len - invalid (above)", &cases.BytesMinMaxLen{Val: []byte("validate")}, 1},
{"bytes - equal min/max len - valid", &cases.BytesEqualMinMaxLen{Val: []byte("proto")}, 0},
{"bytes - equal min/max len - invalid", &cases.BytesEqualMinMaxLen{Val: []byte("validate")}, 1},
{"bytes - pattern - valid", &cases.BytesPattern{Val: []byte("Foo123")}, 0},
{"bytes - pattern - invalid", &cases.BytesPattern{Val: []byte("你好你好")}, 1},
{"bytes - pattern - invalid (empty)", &cases.BytesPattern{Val: []byte("")}, 1},
{"bytes - prefix - valid", &cases.BytesPrefix{Val: []byte{0x99, 0x9f, 0x08}}, 0},
{"bytes - prefix - valid (only)", &cases.BytesPrefix{Val: []byte{0x99}}, 0},
{"bytes - prefix - invalid", &cases.BytesPrefix{Val: []byte("bar")}, 1},
{"bytes - contains - valid", &cases.BytesContains{Val: []byte("candy bars")}, 0},
{"bytes - contains - valid (only)", &cases.BytesContains{Val: []byte("bar")}, 0},
{"bytes - contains - invalid", &cases.BytesContains{Val: []byte("candy bazs")}, 1},
{"bytes - suffix - valid", &cases.BytesSuffix{Val: []byte{0x62, 0x75, 0x7A, 0x7A}}, 0},
{"bytes - suffix - valid (only)", &cases.BytesSuffix{Val: []byte("\x62\x75\x7A\x7A")}, 0},
{"bytes - suffix - invalid", &cases.BytesSuffix{Val: []byte("foobar")}, 1},
{"bytes - suffix - invalid (case-sensitive)", &cases.BytesSuffix{Val: []byte("FooBaz")}, 1},
{"bytes - IP - valid (v4)", &cases.BytesIP{Val: []byte{0xC0, 0xA8, 0x00, 0x01}}, 0},
{"bytes - IP - valid (v6)", &cases.BytesIP{Val: []byte("\x20\x01\x0D\xB8\x85\xA3\x00\x00\x00\x00\x8A\x2E\x03\x70\x73\x34")}, 0},
{"bytes - IP - invalid", &cases.BytesIP{Val: []byte("foobar")}, 1},
{"bytes - IPv4 - valid", &cases.BytesIPv4{Val: []byte{0xC0, 0xA8, 0x00, 0x01}}, 0},
{"bytes - IPv4 - invalid", &cases.BytesIPv4{Val: []byte("foobar")}, 1},
{"bytes - IPv4 - invalid (v6)", &cases.BytesIPv4{Val: []byte("\x20\x01\x0D\xB8\x85\xA3\x00\x00\x00\x00\x8A\x2E\x03\x70\x73\x34")}, 1},
{"bytes - IPv6 - valid", &cases.BytesIPv6{Val: []byte("\x20\x01\x0D\xB8\x85\xA3\x00\x00\x00\x00\x8A\x2E\x03\x70\x73\x34")}, 0},
{"bytes - IPv6 - invalid", &cases.BytesIPv6{Val: []byte("fooar")}, 1},
{"bytes - IPv6 - invalid (v4)", &cases.BytesIPv6{Val: []byte{0xC0, 0xA8, 0x00, 0x01}}, 1},
{"bytes - IPv6 - valid (ignore_empty)", &cases.BytesIPv6Ignore{Val: nil}, 0},
}
var enumCases = []TestCase{
{"enum - none - valid", &cases.EnumNone{Val: cases.TestEnum_ONE}, 0},
{"enum - const - valid", &cases.EnumConst{Val: cases.TestEnum_TWO}, 0},
{"enum - const - invalid", &cases.EnumConst{Val: cases.TestEnum_ONE}, 1},
{"enum alias - const - valid", &cases.EnumAliasConst{Val: cases.TestEnumAlias_C}, 0},
{"enum alias - const - valid (alias)", &cases.EnumAliasConst{Val: cases.TestEnumAlias_GAMMA}, 0},
{"enum alias - const - invalid", &cases.EnumAliasConst{Val: cases.TestEnumAlias_ALPHA}, 1},
{"enum - defined_only - valid", &cases.EnumDefined{Val: 0}, 0},
{"enum - defined_only - invalid", &cases.EnumDefined{Val: math.MaxInt32}, 1},
{"enum alias - defined_only - valid", &cases.EnumAliasDefined{Val: 1}, 0},
{"enum alias - defined_only - invalid", &cases.EnumAliasDefined{Val: math.MaxInt32}, 1},
{"enum - in - valid", &cases.EnumIn{Val: cases.TestEnum_TWO}, 0},
{"enum - in - invalid", &cases.EnumIn{Val: cases.TestEnum_ONE}, 1},
{"enum alias - in - valid", &cases.EnumAliasIn{Val: cases.TestEnumAlias_A}, 0},
{"enum alias - in - valid (alias)", &cases.EnumAliasIn{Val: cases.TestEnumAlias_ALPHA}, 0},
{"enum alias - in - invalid", &cases.EnumAliasIn{Val: cases.TestEnumAlias_BETA}, 1},
{"enum - not in - valid", &cases.EnumNotIn{Val: cases.TestEnum_ZERO}, 0},
{"enum - not in - valid (undefined)", &cases.EnumNotIn{Val: math.MaxInt32}, 0},
{"enum - not in - invalid", &cases.EnumNotIn{Val: cases.TestEnum_ONE}, 1},
{"enum alias - not in - valid", &cases.EnumAliasNotIn{Val: cases.TestEnumAlias_ALPHA}, 0},
{"enum alias - not in - invalid", &cases.EnumAliasNotIn{Val: cases.TestEnumAlias_B}, 1},
{"enum alias - not in - invalid (alias)", &cases.EnumAliasNotIn{Val: cases.TestEnumAlias_BETA}, 1},
{"enum external - defined_only - valid", &cases.EnumExternal{Val: other_package.Embed_VALUE}, 0},
{"enum external - defined_only - invalid", &cases.EnumExternal{Val: math.MaxInt32}, 1},
{"enum external - in - valid", &cases.EnumExternal3{Foo: other_package.Embed_ZERO}, 0},
{"enum external - in - invalid", &cases.EnumExternal3{Foo: other_package.Embed_ONE}, 1},
{"enum external - not in - valid", &cases.EnumExternal3{Bar: yet_another_package.Embed_ZERO}, 0},
{"enum external - not in - invalid", &cases.EnumExternal3{Bar: yet_another_package.Embed_ONE}, 1},
{"enum external - const - valid", &cases.EnumExternal4{SortDirection: sort.Direction_ASC}, 0},
{"enum external - const - invalid", &cases.EnumExternal4{SortDirection: sort.Direction_DESC}, 1},
{"enum repeated - defined_only - valid", &cases.RepeatedEnumDefined{Val: []cases.TestEnum{cases.TestEnum_ONE, cases.TestEnum_TWO}}, 0},
{"enum repeated - defined_only - invalid", &cases.RepeatedEnumDefined{Val: []cases.TestEnum{cases.TestEnum_ONE, math.MaxInt32}}, 1},
{"enum repeated (external) - defined_only - valid", &cases.RepeatedExternalEnumDefined{Val: []other_package.Embed_Enumerated{other_package.Embed_VALUE}}, 0},
{"enum repeated (external) - defined_only - invalid", &cases.RepeatedExternalEnumDefined{Val: []other_package.Embed_Enumerated{math.MaxInt32}}, 1},
{"enum repeated (another external) - defined_only - valid", &cases.RepeatedYetAnotherExternalEnumDefined{Val: []yet_another_package.Embed_Enumerated{yet_another_package.Embed_VALUE}}, 0},
{"enum repeated (external) - in - valid", &cases.RepeatedEnumExternal{Foo: []other_package.Embed_FooNumber{other_package.Embed_ZERO, other_package.Embed_TWO}}, 0},
{"enum repeated (external) - in - invalid", &cases.RepeatedEnumExternal{Foo: []other_package.Embed_FooNumber{other_package.Embed_ONE}}, 1},
{"enum repeated (external) - not in - valid", &cases.RepeatedEnumExternal{Bar: []yet_another_package.Embed_BarNumber{yet_another_package.Embed_ZERO, yet_another_package.Embed_TWO}}, 0},
{"enum repeated (external) - not in - invalid", &cases.RepeatedEnumExternal{Bar: []yet_another_package.Embed_BarNumber{yet_another_package.Embed_ONE}}, 1},
{"enum map - defined_only - valid", &cases.MapEnumDefined{Val: map[string]cases.TestEnum{"foo": cases.TestEnum_TWO}}, 0},
{"enum map - defined_only - invalid", &cases.MapEnumDefined{Val: map[string]cases.TestEnum{"foo": math.MaxInt32}}, 1},
{"enum map (external) - defined_only - valid", &cases.MapExternalEnumDefined{Val: map[string]other_package.Embed_Enumerated{"foo": other_package.Embed_VALUE}}, 0},
{"enum map (external) - defined_only - invalid", &cases.MapExternalEnumDefined{Val: map[string]other_package.Embed_Enumerated{"foo": math.MaxInt32}}, 1},
}
var messageCases = []TestCase{
{"message - none - valid", &cases.MessageNone{Val: &cases.MessageNone_NoneMsg{}}, 0},
{"message - none - valid (unset)", &cases.MessageNone{}, 0},
{"message - disabled - valid", &cases.MessageDisabled{Val: 456}, 0},
{"message - disabled - valid (invalid field)", &cases.MessageDisabled{Val: 0}, 0},
{"message - ignored - valid", &cases.MessageIgnored{Val: 456}, 0},
{"message - ignored - valid (invalid field)", &cases.MessageIgnored{Val: 0}, 0},
{"message - field - valid", &cases.Message{Val: &cases.TestMsg{Const: "foo"}}, 0},
{"message - field - valid (unset)", &cases.Message{}, 0},
{"message - field - invalid", &cases.Message{Val: &cases.TestMsg{}}, 1},
{"message - field - invalid (transitive)", &cases.Message{Val: &cases.TestMsg{Const: "foo", Nested: &cases.TestMsg{}}}, 1},
{"message - skip - valid", &cases.MessageSkip{Val: &cases.TestMsg{}}, 0},
{"message - required - valid", &cases.MessageRequired{Val: &cases.TestMsg{Const: "foo"}}, 0},
{"message - required - valid (oneof)", &cases.MessageRequiredOneof{One: &cases.MessageRequiredOneof_Val{Val: &cases.TestMsg{Const: "foo"}}}, 0},
{"message - required - invalid", &cases.MessageRequired{}, 1},
{"message - required - invalid (oneof)", &cases.MessageRequiredOneof{}, 1},
{"message - cross-package embed none - valid", &cases.MessageCrossPackage{Val: &other_package.Embed{Val: 1}}, 0},
{"message - cross-package embed none - valid (nil)", &cases.MessageCrossPackage{}, 0},
{"message - cross-package embed none - valid (empty)", &cases.MessageCrossPackage{Val: &other_package.Embed{}}, 1},
{"message - cross-package embed none - invalid", &cases.MessageCrossPackage{Val: &other_package.Embed{Val: -1}}, 1},
{"message - required - valid", &cases.MessageRequiredButOptional{Val: &cases.TestMsg{Const: "foo"}}, 0},
{"message - required - valid (unset)", &cases.MessageRequiredButOptional{}, 0},
}
var repeatedCases = []TestCase{
{"repeated - none - valid", &cases.RepeatedNone{Val: []int64{1, 2, 3}}, 0},
{"repeated - embed none - valid", &cases.RepeatedEmbedNone{Val: []*cases.Embed{{Val: 1}}}, 0},
{"repeated - embed none - valid (nil)", &cases.RepeatedEmbedNone{}, 0},
{"repeated - embed none - valid (empty)", &cases.RepeatedEmbedNone{Val: []*cases.Embed{}}, 0},
{"repeated - embed none - invalid", &cases.RepeatedEmbedNone{Val: []*cases.Embed{{Val: -1}}}, 1},
{"repeated - cross-package embed none - valid", &cases.RepeatedEmbedCrossPackageNone{Val: []*other_package.Embed{{Val: 1}}}, 0},
{"repeated - cross-package embed none - valid (nil)", &cases.RepeatedEmbedCrossPackageNone{}, 0},
{"repeated - cross-package embed none - valid (empty)", &cases.RepeatedEmbedCrossPackageNone{Val: []*other_package.Embed{}}, 0},
{"repeated - cross-package embed none - invalid", &cases.RepeatedEmbedCrossPackageNone{Val: []*other_package.Embed{{Val: -1}}}, 1},
{"repeated - min - valid", &cases.RepeatedMin{Val: []*cases.Embed{{Val: 1}, {Val: 2}, {Val: 3}}}, 0},
{"repeated - min - valid (equal)", &cases.RepeatedMin{Val: []*cases.Embed{{Val: 1}, {Val: 2}}}, 0},
{"repeated - min - invalid", &cases.RepeatedMin{Val: []*cases.Embed{{Val: 1}}}, 1},
{"repeated - min - invalid (element)", &cases.RepeatedMin{Val: []*cases.Embed{{Val: 1}, {Val: -1}}}, 1},
{"repeated - max - valid", &cases.RepeatedMax{Val: []float64{1, 2}}, 0},
{"repeated - max - valid (equal)", &cases.RepeatedMax{Val: []float64{1, 2, 3}}, 0},
{"repeated - max - invalid", &cases.RepeatedMax{Val: []float64{1, 2, 3, 4}}, 1},
{"repeated - min/max - valid", &cases.RepeatedMinMax{Val: []int32{1, 2, 3}}, 0},
{"repeated - min/max - valid (min)", &cases.RepeatedMinMax{Val: []int32{1, 2}}, 0},
{"repeated - min/max - valid (max)", &cases.RepeatedMinMax{Val: []int32{1, 2, 3, 4}}, 0},
{"repeated - min/max - invalid (below)", &cases.RepeatedMinMax{Val: []int32{}}, 1},
{"repeated - min/max - invalid (above)", &cases.RepeatedMinMax{Val: []int32{1, 2, 3, 4, 5}}, 1},
{"repeated - exact - valid", &cases.RepeatedExact{Val: []uint32{1, 2, 3}}, 0},
{"repeated - exact - invalid (below)", &cases.RepeatedExact{Val: []uint32{1, 2}}, 1},
{"repeated - exact - invalid (above)", &cases.RepeatedExact{Val: []uint32{1, 2, 3, 4}}, 1},
{"repeated - unique - valid", &cases.RepeatedUnique{Val: []string{"foo", "bar", "baz"}}, 0},
{"repeated - unique - valid (empty)", &cases.RepeatedUnique{}, 0},
{"repeated - unique - valid (case sensitivity)", &cases.RepeatedUnique{Val: []string{"foo", "Foo"}}, 0},
{"repeated - unique - invalid", &cases.RepeatedUnique{Val: []string{"foo", "bar", "foo", "baz"}}, 1},
{"repeated - items - valid", &cases.RepeatedItemRule{Val: []float32{1, 2, 3}}, 0},
{"repeated - items - valid (empty)", &cases.RepeatedItemRule{Val: []float32{}}, 0},
{"repeated - items - valid (pattern)", &cases.RepeatedItemPattern{Val: []string{"Alpha", "Beta123"}}, 0},
{"repeated - items - invalid", &cases.RepeatedItemRule{Val: []float32{1, -2, 3}}, 1},
{"repeated - items - invalid (pattern)", &cases.RepeatedItemPattern{Val: []string{"Alpha", "!@#$%^&*()"}}, 1},
{"repeated - items - invalid (in)", &cases.RepeatedItemIn{Val: []string{"baz"}}, 1},
{"repeated - items - valid (in)", &cases.RepeatedItemIn{Val: []string{"foo"}}, 0},
{"repeated - items - invalid (not_in)", &cases.RepeatedItemNotIn{Val: []string{"foo"}}, 1},
{"repeated - items - valid (not_in)", &cases.RepeatedItemNotIn{Val: []string{"baz"}}, 0},
{"repeated - items - invalid (enum in)", &cases.RepeatedEnumIn{Val: []cases.AnEnum{1}}, 1},
{"repeated - items - valid (enum in)", &cases.RepeatedEnumIn{Val: []cases.AnEnum{0}}, 0},
{"repeated - items - invalid (enum not_in)", &cases.RepeatedEnumNotIn{Val: []cases.AnEnum{0}}, 1},
{"repeated - items - valid (enum not_in)", &cases.RepeatedEnumNotIn{Val: []cases.AnEnum{1}}, 0},
{"repeated - items - invalid (embedded enum in)", &cases.RepeatedEmbeddedEnumIn{Val: []cases.RepeatedEmbeddedEnumIn_AnotherInEnum{1}}, 1},
{"repeated - items - valid (embedded enum in)", &cases.RepeatedEmbeddedEnumIn{Val: []cases.RepeatedEmbeddedEnumIn_AnotherInEnum{0}}, 0},
{"repeated - items - invalid (embedded enum not_in)", &cases.RepeatedEmbeddedEnumNotIn{Val: []cases.RepeatedEmbeddedEnumNotIn_AnotherNotInEnum{0}}, 1},
{"repeated - items - valid (embedded enum not_in)", &cases.RepeatedEmbeddedEnumNotIn{Val: []cases.RepeatedEmbeddedEnumNotIn_AnotherNotInEnum{1}}, 0},
{"repeated - items - invalid (any in)", &cases.RepeatedAnyIn{Val: []*anypb.Any{{TypeUrl: "type.googleapis.com/google.protobuf.Timestamp"}}}, 1},
{"repeated - items - valid (any in)", &cases.RepeatedAnyIn{Val: []*anypb.Any{{TypeUrl: "type.googleapis.com/google.protobuf.Duration"}}}, 0},
{"repeated - items - invalid (any not_in)", &cases.RepeatedAnyNotIn{Val: []*anypb.Any{{TypeUrl: "type.googleapis.com/google.protobuf.Timestamp"}}}, 1},
{"repeated - items - valid (any not_in)", &cases.RepeatedAnyNotIn{Val: []*anypb.Any{{TypeUrl: "type.googleapis.com/google.protobuf.Duration"}}}, 0},
{"repeated - embed skip - valid", &cases.RepeatedEmbedSkip{Val: []*cases.Embed{{Val: 1}}}, 0},
{"repeated - embed skip - valid (invalid element)", &cases.RepeatedEmbedSkip{Val: []*cases.Embed{{Val: -1}}}, 0},
{"repeated - min and items len - valid", &cases.RepeatedMinAndItemLen{Val: []string{"aaa", "bbb"}}, 0},
{"repeated - min and items len - invalid (min)", &cases.RepeatedMinAndItemLen{Val: []string{}}, 1},
{"repeated - min and items len - invalid (len)", &cases.RepeatedMinAndItemLen{Val: []string{"x"}}, 1},
{"repeated - min and max items len - valid", &cases.RepeatedMinAndMaxItemLen{Val: []string{"aaa", "bbb"}}, 0},
{"repeated - min and max items len - invalid (min_len)", &cases.RepeatedMinAndMaxItemLen{}, 1},
{"repeated - min and max items len - invalid (max_len)", &cases.RepeatedMinAndMaxItemLen{Val: []string{"aaa", "bbb", "ccc", "ddd"}}, 1},
{"repeated - duration - gte - valid", &cases.RepeatedDuration{Val: []*durationpb.Duration{{Seconds: 3}}}, 0},
{"repeated - duration - gte - valid (empty)", &cases.RepeatedDuration{}, 0},
{"repeated - duration - gte - valid (equal)", &cases.RepeatedDuration{Val: []*durationpb.Duration{{Nanos: 1000000}}}, 0},
{"repeated - duration - gte - invalid", &cases.RepeatedDuration{Val: []*durationpb.Duration{{Seconds: -1}}}, 1},
{"repeated - exact - valid (ignore_empty)", &cases.RepeatedExactIgnore{Val: nil}, 0},
}
var mapCases = []TestCase{
{"map - none - valid", &cases.MapNone{Val: map[uint32]bool{123: true, 456: false}}, 0},
{"map - min pairs - valid", &cases.MapMin{Val: map[int32]float32{1: 2, 3: 4, 5: 6}}, 0},
{"map - min pairs - valid (equal)", &cases.MapMin{Val: map[int32]float32{1: 2, 3: 4}}, 0},
{"map - min pairs - invalid", &cases.MapMin{Val: map[int32]float32{1: 2}}, 1},
{"map - max pairs - valid", &cases.MapMax{Val: map[int64]float64{1: 2, 3: 4}}, 0},
{"map - max pairs - valid (equal)", &cases.MapMax{Val: map[int64]float64{1: 2, 3: 4, 5: 6}}, 0},
{"map - max pairs - invalid", &cases.MapMax{Val: map[int64]float64{1: 2, 3: 4, 5: 6, 7: 8}}, 1},
{"map - min/max - valid", &cases.MapMinMax{Val: map[string]bool{"a": true, "b": false, "c": true}}, 0},
{"map - min/max - valid (min)", &cases.MapMinMax{Val: map[string]bool{"a": true, "b": false}}, 0},
{"map - min/max - valid (max)", &cases.MapMinMax{Val: map[string]bool{"a": true, "b": false, "c": true, "d": false}}, 0},
{"map - min/max - invalid (below)", &cases.MapMinMax{Val: map[string]bool{}}, 1},
{"map - min/max - invalid (above)", &cases.MapMinMax{Val: map[string]bool{"a": true, "b": false, "c": true, "d": false, "e": true}}, 1},
{"map - exact - valid", &cases.MapExact{Val: map[uint64]string{1: "a", 2: "b", 3: "c"}}, 0},
{"map - exact - invalid (below)", &cases.MapExact{Val: map[uint64]string{1: "a", 2: "b"}}, 1},
{"map - exact - invalid (above)", &cases.MapExact{Val: map[uint64]string{1: "a", 2: "b", 3: "c", 4: "d"}}, 1},
{"map - no sparse - valid", &cases.MapNoSparse{Val: map[uint32]*cases.MapNoSparse_Msg{1: {}, 2: {}}}, 0},
{"map - no sparse - valid (empty)", &cases.MapNoSparse{Val: map[uint32]*cases.MapNoSparse_Msg{}}, 0},
// sparse maps are no longer supported, so this case is no longer possible
//{"map - no sparse - invalid", &cases.MapNoSparse{Val: map[uint32]*cases.MapNoSparse_Msg{1: {}, 2: nil}}, 1},
{"map - keys - valid", &cases.MapKeys{Val: map[int64]string{-1: "a", -2: "b"}}, 0},
{"map - keys - valid (empty)", &cases.MapKeys{Val: map[int64]string{}}, 0},
{"map - keys - valid (pattern)", &cases.MapKeysPattern{Val: map[string]string{"A": "a"}}, 0},
{"map - keys - valid (in)", &cases.MapKeysIn{Val: map[string]string{"foo": "value"}}, 0},
{"map - keys - valid (not_in)", &cases.MapKeysNotIn{Val: map[string]string{"baz": "value"}}, 0},
{"map - keys - invalid", &cases.MapKeys{Val: map[int64]string{1: "a"}}, 1},
{"map - keys - invalid (pattern)", &cases.MapKeysPattern{Val: map[string]string{"A": "a", "!@#$%^&*()": "b"}}, 1},
{"map - keys - invalid (in)", &cases.MapKeysIn{Val: map[string]string{"baz": "value"}}, 1},
{"map - keys - invalid (not_in)", &cases.MapKeysNotIn{Val: map[string]string{"foo": "value"}}, 1},
{"map - values - valid", &cases.MapValues{Val: map[string]string{"a": "Alpha", "b": "Beta"}}, 0},
{"map - values - valid (empty)", &cases.MapValues{Val: map[string]string{}}, 0},
{"map - values - valid (pattern)", &cases.MapValuesPattern{Val: map[string]string{"a": "A"}}, 0},
{"map - values - invalid", &cases.MapValues{Val: map[string]string{"a": "A", "b": "B"}}, 2},
{"map - values - invalid (pattern)", &cases.MapValuesPattern{Val: map[string]string{"a": "A", "b": "!@#$%^&*()"}}, 1},
{"map - recursive - valid", &cases.MapRecursive{Val: map[uint32]*cases.MapRecursive_Msg{1: {Val: "abc"}}}, 0},
{"map - recursive - invalid", &cases.MapRecursive{Val: map[uint32]*cases.MapRecursive_Msg{1: {}}}, 1},
{"map - exact - valid (ignore_empty)", &cases.MapExactIgnore{Val: nil}, 0},
{"map - multiple - valid", &cases.MultipleMaps{First: map[uint32]string{1: "a", 2: "b"}, Second: map[int32]bool{-1: true, -2: false}}, 0},
}
var oneofCases = []TestCase{
{"oneof - none - valid", &cases.OneOfNone{O: &cases.OneOfNone_X{X: "foo"}}, 0},
{"oneof - none - valid (empty)", &cases.OneOfNone{}, 0},
{"oneof - field - valid (X)", &cases.OneOf{O: &cases.OneOf_X{X: "foobar"}}, 0},
{"oneof - field - valid (Y)", &cases.OneOf{O: &cases.OneOf_Y{Y: 123}}, 0},
{"oneof - field - valid (Z)", &cases.OneOf{O: &cases.OneOf_Z{Z: &cases.TestOneOfMsg{Val: true}}}, 0},
{"oneof - field - valid (empty)", &cases.OneOf{}, 0},
{"oneof - field - invalid (X)", &cases.OneOf{O: &cases.OneOf_X{X: "fizzbuzz"}}, 1},
{"oneof - field - invalid (Y)", &cases.OneOf{O: &cases.OneOf_Y{Y: -1}}, 1},
{"oneof - filed - invalid (Z)", &cases.OneOf{O: &cases.OneOf_Z{Z: &cases.TestOneOfMsg{}}}, 1},
{"oneof - required - valid", &cases.OneOfRequired{O: &cases.OneOfRequired_X{X: ""}}, 0},
{"oneof - require - invalid", &cases.OneOfRequired{}, 1},
{"oneof - ignore_empty - valid (X)", &cases.OneOfIgnoreEmpty{O: &cases.OneOfIgnoreEmpty_X{X: ""}}, 0},
{"oneof - ignore_empty - valid (Y)", &cases.OneOfIgnoreEmpty{O: &cases.OneOfIgnoreEmpty_Y{Y: []byte("")}}, 0},
{"oneof - ignore_empty - valid (Z)", &cases.OneOfIgnoreEmpty{O: &cases.OneOfIgnoreEmpty_Z{Z: 0}}, 0},
}
var wrapperCases = []TestCase{
{"wrapper - none - valid", &cases.WrapperNone{Val: &wrapperspb.Int32Value{Value: 123}}, 0},
{"wrapper - none - valid (empty)", &cases.WrapperNone{Val: nil}, 0},
{"wrapper - float - valid", &cases.WrapperFloat{Val: &wrapperspb.FloatValue{Value: 1}}, 0},
{"wrapper - float - valid (empty)", &cases.WrapperFloat{Val: nil}, 0},
{"wrapper - float - invalid", &cases.WrapperFloat{Val: &wrapperspb.FloatValue{Value: 0}}, 1},
{"wrapper - double - valid", &cases.WrapperDouble{Val: &wrapperspb.DoubleValue{Value: 1}}, 0},
{"wrapper - double - valid (empty)", &cases.WrapperDouble{Val: nil}, 0},
{"wrapper - double - invalid", &cases.WrapperDouble{Val: &wrapperspb.DoubleValue{Value: 0}}, 1},
{"wrapper - int64 - valid", &cases.WrapperInt64{Val: &wrapperspb.Int64Value{Value: 1}}, 0},
{"wrapper - int64 - valid (empty)", &cases.WrapperInt64{Val: nil}, 0},
{"wrapper - int64 - invalid", &cases.WrapperInt64{Val: &wrapperspb.Int64Value{Value: 0}}, 1},
{"wrapper - int32 - valid", &cases.WrapperInt32{Val: &wrapperspb.Int32Value{Value: 1}}, 0},
{"wrapper - int32 - valid (empty)", &cases.WrapperInt32{Val: nil}, 0},
{"wrapper - int32 - invalid", &cases.WrapperInt32{Val: &wrapperspb.Int32Value{Value: 0}}, 1},
{"wrapper - uint64 - valid", &cases.WrapperUInt64{Val: &wrapperspb.UInt64Value{Value: 1}}, 0},
{"wrapper - uint64 - valid (empty)", &cases.WrapperUInt64{Val: nil}, 0},
{"wrapper - uint64 - invalid", &cases.WrapperUInt64{Val: &wrapperspb.UInt64Value{Value: 0}}, 1},
{"wrapper - uint32 - valid", &cases.WrapperUInt32{Val: &wrapperspb.UInt32Value{Value: 1}}, 0},
{"wrapper - uint32 - valid (empty)", &cases.WrapperUInt32{Val: nil}, 0},
{"wrapper - uint32 - invalid", &cases.WrapperUInt32{Val: &wrapperspb.UInt32Value{Value: 0}}, 1},
{"wrapper - bool - valid", &cases.WrapperBool{Val: &wrapperspb.BoolValue{Value: true}}, 0},
{"wrapper - bool - valid (empty)", &cases.WrapperBool{Val: nil}, 0},
{"wrapper - bool - invalid", &cases.WrapperBool{Val: &wrapperspb.BoolValue{Value: false}}, 1},
{"wrapper - string - valid", &cases.WrapperString{Val: &wrapperspb.StringValue{Value: "foobar"}}, 0},
{"wrapper - string - valid (empty)", &cases.WrapperString{Val: nil}, 0},
{"wrapper - string - invalid", &cases.WrapperString{Val: &wrapperspb.StringValue{Value: "fizzbuzz"}}, 1},
{"wrapper - bytes - valid", &cases.WrapperBytes{Val: &wrapperspb.BytesValue{Value: []byte("foo")}}, 0},
{"wrapper - bytes - valid (empty)", &cases.WrapperBytes{Val: nil}, 0},
{"wrapper - bytes - invalid", &cases.WrapperBytes{Val: &wrapperspb.BytesValue{Value: []byte("x")}}, 1},
{"wrapper - required - string - valid", &cases.WrapperRequiredString{Val: &wrapperspb.StringValue{Value: "bar"}}, 0},
{"wrapper - required - string - invalid", &cases.WrapperRequiredString{Val: &wrapperspb.StringValue{Value: "foo"}}, 1},
{"wrapper - required - string - invalid (empty)", &cases.WrapperRequiredString{}, 1},
{"wrapper - required - string (empty) - valid", &cases.WrapperRequiredEmptyString{Val: &wrapperspb.StringValue{Value: ""}}, 0},
{"wrapper - required - string (empty) - invalid", &cases.WrapperRequiredEmptyString{Val: &wrapperspb.StringValue{Value: "foo"}}, 1},
{"wrapper - required - string (empty) - invalid (empty)", &cases.WrapperRequiredEmptyString{}, 1},
{"wrapper - optional - string (uuid) - valid", &cases.WrapperOptionalUuidString{Val: &wrapperspb.StringValue{Value: "8b72987b-024a-43b3-b4cf-647a1f925c5d"}}, 0},
{"wrapper - optional - string (uuid) - valid (empty)", &cases.WrapperOptionalUuidString{}, 0},
{"wrapper - optional - string (uuid) - invalid", &cases.WrapperOptionalUuidString{Val: &wrapperspb.StringValue{Value: "foo"}}, 1},
{"wrapper - required - float - valid", &cases.WrapperRequiredFloat{Val: &wrapperspb.FloatValue{Value: 1}}, 0},
{"wrapper - required - float - invalid", &cases.WrapperRequiredFloat{Val: &wrapperspb.FloatValue{Value: -5}}, 1},
{"wrapper - required - float - invalid (empty)", &cases.WrapperRequiredFloat{}, 1},
}
var durationCases = []TestCase{
{"duration - none - valid", &cases.DurationNone{Val: &durationpb.Duration{Seconds: 123}}, 0},
{"duration - required - valid", &cases.DurationRequired{Val: &durationpb.Duration{}}, 0},
{"duration - required - invalid", &cases.DurationRequired{Val: nil}, 1},
{"duration - const - valid", &cases.DurationConst{Val: &durationpb.Duration{Seconds: 3}}, 0},
{"duration - const - valid (empty)", &cases.DurationConst{}, 0},
{"duration - const - invalid", &cases.DurationConst{Val: &durationpb.Duration{Nanos: 3}}, 1},
{"duration - in - valid", &cases.DurationIn{Val: &durationpb.Duration{Seconds: 1}}, 0},
{"duration - in - valid (empty)", &cases.DurationIn{}, 0},
{"duration - in - invalid", &cases.DurationIn{Val: &durationpb.Duration{}}, 1},
{"duration - not in - valid", &cases.DurationNotIn{Val: &durationpb.Duration{Nanos: 1}}, 0},
{"duration - not in - valid (empty)", &cases.DurationNotIn{}, 0},
{"duration - not in - invalid", &cases.DurationNotIn{Val: &durationpb.Duration{}}, 1},
{"duration - lt - valid", &cases.DurationLT{Val: &durationpb.Duration{Nanos: -1}}, 0},
{"duration - lt - valid (empty)", &cases.DurationLT{}, 0},
{"duration - lt - invalid (equal)", &cases.DurationLT{Val: &durationpb.Duration{}}, 1},
{"duration - lt - invalid", &cases.DurationLT{Val: &durationpb.Duration{Seconds: 1}}, 1},
{"duration - lte - valid", &cases.DurationLTE{Val: &durationpb.Duration{}}, 0},
{"duration - lte - valid (empty)", &cases.DurationLTE{}, 0},
{"duration - lte - valid (equal)", &cases.DurationLTE{Val: &durationpb.Duration{Seconds: 1}}, 0},
{"duration - lte - invalid", &cases.DurationLTE{Val: &durationpb.Duration{Seconds: 1, Nanos: 1}}, 1},
{"duration - gt - valid", &cases.DurationGT{Val: &durationpb.Duration{Seconds: 1}}, 0},
{"duration - gt - valid (empty)", &cases.DurationGT{}, 0},
{"duration - gt - invalid (equal)", &cases.DurationGT{Val: &durationpb.Duration{Nanos: 1000}}, 1},
{"duration - gt - invalid", &cases.DurationGT{Val: &durationpb.Duration{}}, 1},
{"duration - gte - valid", &cases.DurationGTE{Val: &durationpb.Duration{Seconds: 3}}, 0},
{"duration - gte - valid (empty)", &cases.DurationGTE{}, 0},
{"duration - gte - valid (equal)", &cases.DurationGTE{Val: &durationpb.Duration{Nanos: 1000000}}, 0},
{"duration - gte - invalid", &cases.DurationGTE{Val: &durationpb.Duration{Seconds: -1}}, 1},
{"duration - gt & lt - valid", &cases.DurationGTLT{Val: &durationpb.Duration{Nanos: 1000}}, 0},
{"duration - gt & lt - valid (empty)", &cases.DurationGTLT{}, 0},
{"duration - gt & lt - invalid (above)", &cases.DurationGTLT{Val: &durationpb.Duration{Seconds: 1000}}, 1},
{"duration - gt & lt - invalid (below)", &cases.DurationGTLT{Val: &durationpb.Duration{Nanos: -1000}}, 1},
{"duration - gt & lt - invalid (max)", &cases.DurationGTLT{Val: &durationpb.Duration{Seconds: 1}}, 1},
{"duration - gt & lt - invalid (min)", &cases.DurationGTLT{Val: &durationpb.Duration{}}, 1},
{"duration - exclusive gt & lt - valid (empty)", &cases.DurationExLTGT{}, 0},
{"duration - exclusive gt & lt - valid (above)", &cases.DurationExLTGT{Val: &durationpb.Duration{Seconds: 2}}, 0},
{"duration - exclusive gt & lt - valid (below)", &cases.DurationExLTGT{Val: &durationpb.Duration{Nanos: -1}}, 0},
{"duration - exclusive gt & lt - invalid", &cases.DurationExLTGT{Val: &durationpb.Duration{Nanos: 1000}}, 1},
{"duration - exclusive gt & lt - invalid (max)", &cases.DurationExLTGT{Val: &durationpb.Duration{Seconds: 1}}, 1},
{"duration - exclusive gt & lt - invalid (min)", &cases.DurationExLTGT{Val: &durationpb.Duration{}}, 1},
{"duration - gte & lte - valid", &cases.DurationGTELTE{Val: &durationpb.Duration{Seconds: 60, Nanos: 1}}, 0},
{"duration - gte & lte - valid (empty)", &cases.DurationGTELTE{}, 0},
{"duration - gte & lte - valid (max)", &cases.DurationGTELTE{Val: &durationpb.Duration{Seconds: 3600}}, 0},
{"duration - gte & lte - valid (min)", &cases.DurationGTELTE{Val: &durationpb.Duration{Seconds: 60}}, 0},
{"duration - gte & lte - invalid (above)", &cases.DurationGTELTE{Val: &durationpb.Duration{Seconds: 3600, Nanos: 1}}, 1},
{"duration - gte & lte - invalid (below)", &cases.DurationGTELTE{Val: &durationpb.Duration{Seconds: 59}}, 1},
{"duration - gte & lte - valid (empty)", &cases.DurationExGTELTE{}, 0},
{"duration - exclusive gte & lte - valid (above)", &cases.DurationExGTELTE{Val: &durationpb.Duration{Seconds: 3601}}, 0},
{"duration - exclusive gte & lte - valid (below)", &cases.DurationExGTELTE{Val: &durationpb.Duration{}}, 0},
{"duration - exclusive gte & lte - valid (max)", &cases.DurationExGTELTE{Val: &durationpb.Duration{Seconds: 3600}}, 0},
{"duration - exclusive gte & lte - valid (min)", &cases.DurationExGTELTE{Val: &durationpb.Duration{Seconds: 60}}, 0},
{"duration - exclusive gte & lte - invalid", &cases.DurationExGTELTE{Val: &durationpb.Duration{Seconds: 61}}, 1},
{"duration - fields with other fields - invalid other field", &cases.DurationFieldWithOtherFields{DurationVal: nil, IntVal: 12}, 1},
}
var timestampCases = []TestCase{
{"timestamp - none - valid", &cases.TimestampNone{Val: ×tamppb.Timestamp{Seconds: 123}}, 0},
{"timestamp - required - valid", &cases.TimestampRequired{Val: ×tamppb.Timestamp{}}, 0},
{"timestamp - required - invalid", &cases.TimestampRequired{Val: nil}, 1},
{"timestamp - const - valid", &cases.TimestampConst{Val: ×tamppb.Timestamp{Seconds: 3}}, 0},
{"timestamp - const - valid (empty)", &cases.TimestampConst{}, 0},
{"timestamp - const - invalid", &cases.TimestampConst{Val: ×tamppb.Timestamp{Nanos: 3}}, 1},
{"timestamp - lt - valid", &cases.TimestampLT{Val: ×tamppb.Timestamp{Seconds: -1}}, 0},
{"timestamp - lt - valid (empty)", &cases.TimestampLT{}, 0},
{"timestamp - lt - invalid (equal)", &cases.TimestampLT{Val: ×tamppb.Timestamp{}}, 1},
{"timestamp - lt - invalid", &cases.TimestampLT{Val: ×tamppb.Timestamp{Seconds: 1}}, 1},
{"timestamp - lte - valid", &cases.TimestampLTE{Val: ×tamppb.Timestamp{}}, 0},
{"timestamp - lte - valid (empty)", &cases.TimestampLTE{}, 0},
{"timestamp - lte - valid (equal)", &cases.TimestampLTE{Val: ×tamppb.Timestamp{Seconds: 1}}, 0},
{"timestamp - lte - invalid", &cases.TimestampLTE{Val: ×tamppb.Timestamp{Seconds: 1, Nanos: 1}}, 1},
{"timestamp - gt - valid", &cases.TimestampGT{Val: ×tamppb.Timestamp{Seconds: 1}}, 0},
{"timestamp - gt - valid (empty)", &cases.TimestampGT{}, 0},
{"timestamp - gt - invalid (equal)", &cases.TimestampGT{Val: ×tamppb.Timestamp{Nanos: 1000}}, 1},
{"timestamp - gt - invalid", &cases.TimestampGT{Val: ×tamppb.Timestamp{}}, 1},
{"timestamp - gte - valid", &cases.TimestampGTE{Val: ×tamppb.Timestamp{Seconds: 3}}, 0},
{"timestamp - gte - valid (empty)", &cases.TimestampGTE{}, 0},
{"timestamp - gte - valid (equal)", &cases.TimestampGTE{Val: ×tamppb.Timestamp{Nanos: 1000000}}, 0},
{"timestamp - gte - invalid", &cases.TimestampGTE{Val: ×tamppb.Timestamp{Seconds: -1}}, 1},
{"timestamp - gt & lt - valid", &cases.TimestampGTLT{Val: ×tamppb.Timestamp{Nanos: 1000}}, 0},
{"timestamp - gt & lt - valid (empty)", &cases.TimestampGTLT{}, 0},
{"timestamp - gt & lt - invalid (above)", &cases.TimestampGTLT{Val: ×tamppb.Timestamp{Seconds: 1000}}, 1},
{"timestamp - gt & lt - invalid (below)", &cases.TimestampGTLT{Val: ×tamppb.Timestamp{Seconds: -1000}}, 1},
{"timestamp - gt & lt - invalid (max)", &cases.TimestampGTLT{Val: ×tamppb.Timestamp{Seconds: 1}}, 1},
{"timestamp - gt & lt - invalid (min)", &cases.TimestampGTLT{Val: ×tamppb.Timestamp{}}, 1},
{"timestamp - exclusive gt & lt - valid (empty)", &cases.TimestampExLTGT{}, 0},
{"timestamp - exclusive gt & lt - valid (above)", &cases.TimestampExLTGT{Val: ×tamppb.Timestamp{Seconds: 2}}, 0},
{"timestamp - exclusive gt & lt - valid (below)", &cases.TimestampExLTGT{Val: ×tamppb.Timestamp{Seconds: -1}}, 0},
{"timestamp - exclusive gt & lt - invalid", &cases.TimestampExLTGT{Val: ×tamppb.Timestamp{Nanos: 1000}}, 1},
{"timestamp - exclusive gt & lt - invalid (max)", &cases.TimestampExLTGT{Val: ×tamppb.Timestamp{Seconds: 1}}, 1},
{"timestamp - exclusive gt & lt - invalid (min)", &cases.TimestampExLTGT{Val: ×tamppb.Timestamp{}}, 1},
{"timestamp - gte & lte - valid", &cases.TimestampGTELTE{Val: ×tamppb.Timestamp{Seconds: 60, Nanos: 1}}, 0},
{"timestamp - gte & lte - valid (empty)", &cases.TimestampGTELTE{}, 0},
{"timestamp - gte & lte - valid (max)", &cases.TimestampGTELTE{Val: ×tamppb.Timestamp{Seconds: 3600}}, 0},
{"timestamp - gte & lte - valid (min)", &cases.TimestampGTELTE{Val: ×tamppb.Timestamp{Seconds: 60}}, 0},
{"timestamp - gte & lte - invalid (above)", &cases.TimestampGTELTE{Val: ×tamppb.Timestamp{Seconds: 3600, Nanos: 1}}, 1},
{"timestamp - gte & lte - invalid (below)", &cases.TimestampGTELTE{Val: ×tamppb.Timestamp{Seconds: 59}}, 1},
{"timestamp - gte & lte - valid (empty)", &cases.TimestampExGTELTE{}, 0},
{"timestamp - exclusive gte & lte - valid (above)", &cases.TimestampExGTELTE{Val: ×tamppb.Timestamp{Seconds: 3601}}, 0},
{"timestamp - exclusive gte & lte - valid (below)", &cases.TimestampExGTELTE{Val: ×tamppb.Timestamp{}}, 0},
{"timestamp - exclusive gte & lte - valid (max)", &cases.TimestampExGTELTE{Val: ×tamppb.Timestamp{Seconds: 3600}}, 0},
{"timestamp - exclusive gte & lte - valid (min)", &cases.TimestampExGTELTE{Val: ×tamppb.Timestamp{Seconds: 60}}, 0},
{"timestamp - exclusive gte & lte - invalid", &cases.TimestampExGTELTE{Val: ×tamppb.Timestamp{Seconds: 61}}, 1},
{"timestamp - lt now - valid", &cases.TimestampLTNow{Val: ×tamppb.Timestamp{}}, 0},
{"timestamp - lt now - valid (empty)", &cases.TimestampLTNow{}, 0},
{"timestamp - lt now - invalid", &cases.TimestampLTNow{Val: ×tamppb.Timestamp{Seconds: time.Now().Unix() + 7200}}, 1},
{"timestamp - gt now - valid", &cases.TimestampGTNow{Val: ×tamppb.Timestamp{Seconds: time.Now().Unix() + 7200}}, 0},
{"timestamp - gt now - valid (empty)", &cases.TimestampGTNow{}, 0},
{"timestamp - gt now - invalid", &cases.TimestampGTNow{Val: ×tamppb.Timestamp{}}, 1},
{"timestamp - within - valid", &cases.TimestampWithin{Val: timestamppb.Now()}, 0},
{"timestamp - within - valid (empty)", &cases.TimestampWithin{}, 0},
{"timestamp - within - invalid (below)", &cases.TimestampWithin{Val: ×tamppb.Timestamp{}}, 1},
{"timestamp - within - invalid (above)", &cases.TimestampWithin{Val: ×tamppb.Timestamp{Seconds: time.Now().Unix() + 7200}}, 1},
{"timestamp - lt now within - valid", &cases.TimestampLTNowWithin{Val: ×tamppb.Timestamp{Seconds: time.Now().Unix() - 1800}}, 0},
{"timestamp - lt now within - valid (empty)", &cases.TimestampLTNowWithin{}, 0},
{"timestamp - lt now within - invalid (lt)", &cases.TimestampLTNowWithin{Val: ×tamppb.Timestamp{Seconds: time.Now().Unix() + 1800}}, 1},
{"timestamp - lt now within - invalid (within)", &cases.TimestampLTNowWithin{Val: ×tamppb.Timestamp{Seconds: time.Now().Unix() - 7200}}, 1},
{"timestamp - gt now within - valid", &cases.TimestampGTNowWithin{Val: ×tamppb.Timestamp{Seconds: time.Now().Unix() + 1800}}, 0},
{"timestamp - gt now within - valid (empty)", &cases.TimestampGTNowWithin{}, 0},
{"timestamp - gt now within - invalid (gt)", &cases.TimestampGTNowWithin{Val: ×tamppb.Timestamp{Seconds: time.Now().Unix() - 1800}}, 1},
{"timestamp - gt now within - invalid (within)", &cases.TimestampGTNowWithin{Val: ×tamppb.Timestamp{Seconds: time.Now().Unix() + 7200}}, 1},
}
var anyCases = []TestCase{
{"any - none - valid", &cases.AnyNone{Val: &anypb.Any{}}, 0},
{"any - required - valid", &cases.AnyRequired{Val: &anypb.Any{}}, 0},
{"any - required - invalid", &cases.AnyRequired{Val: nil}, 1},
{"any - in - valid", &cases.AnyIn{Val: &anypb.Any{TypeUrl: "type.googleapis.com/google.protobuf.Duration"}}, 0},
{"any - in - valid (empty)", &cases.AnyIn{}, 0},
{"any - in - invalid", &cases.AnyIn{Val: &anypb.Any{TypeUrl: "type.googleapis.com/google.protobuf.Timestamp"}}, 1},
{"any - not in - valid", &cases.AnyNotIn{Val: &anypb.Any{TypeUrl: "type.googleapis.com/google.protobuf.Duration"}}, 0},
{"any - not in - valid (empty)", &cases.AnyNotIn{}, 0},
{"any - not in - invalid", &cases.AnyNotIn{Val: &anypb.Any{TypeUrl: "type.googleapis.com/google.protobuf.Timestamp"}}, 1},
}
var kitchenSink = []TestCase{
{"kitchensink - field - valid", &cases.KitchenSinkMessage{Val: &cases.ComplexTestMsg{Const: "abcd", IntConst: 5, BoolConst: false, FloatVal: &wrapperspb.FloatValue{Value: 1}, DurVal: &durationpb.Duration{Seconds: 3}, TsVal: ×tamppb.Timestamp{Seconds: 17}, FloatConst: 7, DoubleIn: 123, EnumConst: cases.ComplexTestEnum_ComplexTWO, AnyVal: &anypb.Any{TypeUrl: "type.googleapis.com/google.protobuf.Duration"}, RepTsVal: []*timestamppb.Timestamp{{Seconds: 3}}, MapVal: map[int32]string{-1: "a", -2: "b"}, BytesVal: []byte("\x00\x99"), O: &cases.ComplexTestMsg_X{X: "foobar"}}}, 0},
{"kitchensink - valid (unset)", &cases.KitchenSinkMessage{}, 0},
{"kitchensink - field - invalid", &cases.KitchenSinkMessage{Val: &cases.ComplexTestMsg{}}, 7},
{"kitchensink - field - embedded - invalid", &cases.KitchenSinkMessage{Val: &cases.ComplexTestMsg{Another: &cases.ComplexTestMsg{}}}, 14},
{"kitchensink - field - invalid (transitive)", &cases.KitchenSinkMessage{Val: &cases.ComplexTestMsg{Const: "abcd", BoolConst: true, Nested: &cases.ComplexTestMsg{}}}, 14},
{"kitchensink - many - all non-message fields invalid", &cases.KitchenSinkMessage{Val: &cases.ComplexTestMsg{BoolConst: true, FloatVal: &wrapperspb.FloatValue{}, TsVal: ×tamppb.Timestamp{}, FloatConst: 8, AnyVal: &anypb.Any{TypeUrl: "asdf"}, RepTsVal: []*timestamppb.Timestamp{{Nanos: 1}}}}, 13},
}
var nestedCases = []TestCase{
{"nested wkt uuid - field - valid", &cases.WktLevelOne{Two: &cases.WktLevelOne_WktLevelTwo{Three: &cases.WktLevelOne_WktLevelTwo_WktLevelThree{Uuid: "f81d16ef-40e2-40c6-bebc-89aaf5292f9a"}}}, 0},
{"nested wkt uuid - field - invalid", &cases.WktLevelOne{Two: &cases.WktLevelOne_WktLevelTwo{Three: &cases.WktLevelOne_WktLevelTwo_WktLevelThree{Uuid: "not-a-valid-uuid"}}}, 1},
}
|
package main
import (
"errors"
"net/http"
"os"
"regexp"
"strconv"
"github.com/naelyn/go-docker-registry/Godeps/_workspace/src/github.com/golang/glog"
"github.com/naelyn/go-docker-registry/Godeps/_workspace/src/github.com/gorilla/mux"
"github.com/naelyn/go-docker-registry/auth"
"github.com/naelyn/go-docker-registry/storage"
)
type ReqHandler func(http.ResponseWriter, *http.Request) error
var ErrNotFound = errors.New("not found")
type requestError struct {
Err error
}
func (e *requestError) Error() string {
return e.Err.Error()
}
// from https://github.com/docker/docker/blob/master/registry/registry.go
var (
validHex = regexp.MustCompile(`^([a-f0-9]{64})$`)
validNamespace = regexp.MustCompile(`^([a-z0-9_]{4,30})$`)
validRepo = regexp.MustCompile(`^([a-z0-9-_.]+)$`)
validHexForRoute = `[a-f0-9]{64}`
validNamespaceForRoute = `[a-z0-9_]{4,30}`
validRepoForRoute = `[a-z0-9-_.]+`
// TODO: add patterns for tags
)
type Handler struct {
*mux.Router
Driver storage.Driver
ra auth.RequestAllower
}
func NewHandler(dataDir string, ra auth.RequestAllower) *Handler {
router := mux.NewRouter()
r := router.PathPrefix("/v1/").Subrouter()
if ra == nil {
ra = auth.AllowAll()
}
handler := &Handler{
Router: router,
Driver: storage.NewLocalDriver(dataDir),
ra: ra,
}
// validNamespaceForRoute = regexp.MustCompile(`[a-z0-9_]{4,30}`)
// validRepoForRoute = regexp.MustCompile(`[a-z0-9-_.]+`)
var (
ID = "{id:" + validHexForRoute + "}"
NSREPO = "{ns:" + validNamespaceForRoute + "}/{repo:" + validRepoForRoute + "}"
USER = "{username:[^/]+}"
TAG = "{tag:[^/]+}"
)
mappings := []mapping{
// index.py
{"GET", "/users/", auth.Unprotected, handler.FakeGetUsers},
{"GET", "/users", auth.Unprotected, handler.FakeGetUsers},
{"POST", "/users/", auth.Unprotected, handler.FakePostUsers},
{"POST", "/users", auth.Unprotected, handler.FakePostUsers},
{"PUT", "/users/" + USER + "/", auth.Unprotected, handler.FakePutUsername},
{"PUT", "/users/" + USER, auth.Unprotected, handler.FakePutUsername},
// dummies
{"GET", "/_ping/", auth.Unprotected, handler.GetPing},
{"GET", "/_ping", auth.Unprotected, handler.GetPing},
// search
{"GET", "/search", auth.ReadAccess, handler.SearchIndex},
// images
{"GET", "/images/" + ID + "/layer", auth.ReadAccess, handler.GetImageLayer},
{"PUT", "/images/" + ID + "/layer", auth.WriteAccess, handler.PutImageLayer},
{"GET", "/images/" + ID + "/json", auth.ReadAccess, handler.GetImageJson},
{"PUT", "/images/" + ID + "/json", auth.WriteAccess, handler.PutImageJson},
{"GET", "/images/" + ID + "/ancestry", auth.ReadAccess, handler.GetImageAncestry},
{"PUT", "/images/" + ID + "/checksum", auth.WriteAccess, handler.PutImageChecksum},
// repositories
{"DELETE", "/repositories/" + NSREPO + "/", auth.WriteAccess, handler.DeleteRepository},
{"DELETE", "/repositories/" + NSREPO, auth.WriteAccess, handler.DeleteRepository},
{"DELETE", "/repositories/" + NSREPO + "/tags/", auth.WriteAccess, handler.DeleteRepository},
{"DELETE", "/repositories/" + NSREPO + "/tags", auth.WriteAccess, handler.DeleteRepository},
{"GET", "/repositories/" + NSREPO + "/images", auth.ReadAccess, handler.GetRepositoryImages},
{"PUT", "/repositories/" + NSREPO + "/images", auth.WriteAccess, handler.PutRepositoryImages},
{"PUT", "/repositories/" + NSREPO + "/", auth.WriteAccess, handler.PutRepository},
{"PUT", "/repositories/" + NSREPO, auth.WriteAccess, handler.PutRepository},
{"DELETE", "/repositories/" + NSREPO + "/tags/" + TAG, auth.WriteAccess, handler.DeleteRepositoryTags},
{"GET", "/repositories/" + NSREPO + "/tags/" + TAG, auth.ReadAccess, handler.GetRepositoryTag},
{"PUT", "/repositories/" + NSREPO + "/tags/" + TAG, auth.WriteAccess, handler.PutRepositoryTags},
{"GET", "/repositories/" + NSREPO + "/tags", auth.ReadAccess, handler.GetRepositoryTags},
}
for _, m := range mappings {
rh := m.rh
if m.access != auth.Unprotected {
rh = requiresAuth(ra, m.access, rh)
}
r.HandleFunc(m.path, errHandler(rh)).Methods(m.method)
}
router.HandleFunc("/{foo:.*}", logNotFound)
return handler
}
type mapping struct {
method string
path string
access auth.Access
rh ReqHandler
}
func errHandler(f ReqHandler) func(w http.ResponseWriter, r *http.Request) {
return func(w http.ResponseWriter, r *http.Request) {
if glog.V(2) {
glog.Infof("REQ: %s %s", r.Method, r.URL)
}
err := f(w, r)
if err == ErrPleaseAuthenticate {
w.Header().Add("WWW-Authenticate", "Token")
w.WriteHeader(http.StatusUnauthorized)
} else if err == ErrNotFound || err == storage.ErrNotFound {
http.NotFound(w, r)
} else if os.IsNotExist(err) {
http.NotFound(w, r)
} else if _, ok := err.(*requestError); ok {
glog.Error(err.Error())
w.WriteHeader(http.StatusBadRequest)
} else if err != nil {
glog.Error(err.Error())
w.WriteHeader(http.StatusInternalServerError)
}
}
}
func logNotFound(w http.ResponseWriter, r *http.Request) {
glog.Infof("REQ(404): %s %s", r.Method, r.URL)
http.NotFound(w, r)
}
func (h *Handler) writeLengthHeader(w http.ResponseWriter, v int) {
w.Header().Add("Content-Length", strconv.Itoa(v))
}
func (h *Handler) WriteJsonHeader(w http.ResponseWriter) {
w.Header().Add("Content-Type", "application/json")
}
func (h *Handler) WriteEndpointsHeader(w http.ResponseWriter, r *http.Request) {
w.Header().Add("X-Docker-Endpoints", r.Host)
}
// https://github.com/docker/docker/blob/master/docs/sources/reference/api/registry_api.md
func (h *Handler) GetPing(w http.ResponseWriter, r *http.Request) error {
h.WriteJsonHeader(w)
w.Header().Add("X-Docker-Registry-Version", "0.0.1")
w.Header().Add("X-Docker-Registry-Standalone", "true")
w.WriteHeader(200)
_, err := w.Write([]byte("{}")) // pong
return err
}
// ------------------------- index -------------------------
func (h *Handler) FakeGetUsers(w http.ResponseWriter, r *http.Request) error {
w.WriteHeader(http.StatusOK)
return nil
}
func (h *Handler) FakePostUsers(w http.ResponseWriter, r *http.Request) error {
// todo: decode json, if err return StatusBadRequest
w.WriteHeader(http.StatusCreated)
return nil
}
func (h *Handler) FakePutUsername(w http.ResponseWriter, r *http.Request) error {
vars := mux.Vars(r)
_ = vars["username"]
w.WriteHeader(http.StatusNoContent)
return nil
}
// server side:
// https://github.com/docker/docker/blob/master/registry/endpoint.go
|
package stackdriver
import (
"context"
"encoding/json"
"fmt"
"reflect"
"strings"
"time"
"google.golang.org/api/option"
"cloud.google.com/go/logging"
"cloud.google.com/go/logging/logadmin"
"github.com/egnyte/ax/pkg/backend/common"
"google.golang.org/api/iterator"
)
const QueryLogTimeout = 20 * time.Second
type StackdriverClient struct {
stackdriverClient *logadmin.Client
projectName string
logName string
}
// This is some crazy-ass structure in which the stackdriver APIs
// return its JSON values that we have to decode
type payloadValue struct {
Fields map[string]payloadEntry
}
type payloadEntry struct {
Kind struct {
StringValue *string
NumberValue *int64
BoolValue *bool
ListValue *struct {
Values []payloadEntry
}
StructValue *payloadValue
}
}
func payloadValueToJSONValue(plVal payloadValue) map[string]interface{} {
m := make(map[string]interface{})
for k, v := range plVal.Fields {
m[k] = payloadEntryToJSONValue(v)
}
return m
}
func payloadEntryToJSONValue(plEntry payloadEntry) interface{} {
kind := plEntry.Kind
if kind.StringValue != nil {
return *plEntry.Kind.StringValue
} else if kind.NumberValue != nil {
return *kind.NumberValue
} else if kind.BoolValue != nil {
return *kind.BoolValue
} else if kind.ListValue != nil {
list := make([]interface{}, len((*kind.ListValue).Values))
for idx, val := range (*kind.ListValue).Values {
list[idx] = payloadEntryToJSONValue(val)
}
return list
} else if kind.StructValue != nil {
return payloadValueToJSONValue(*kind.StructValue)
} else {
return nil
}
}
func payloadToAttributes(buf []byte) map[string]interface{} {
var plValue payloadValue
if err := json.Unmarshal(buf, &plValue); err != nil {
fmt.Printf("Could not unmarshall value: %s", string(buf))
return nil
}
return payloadValueToJSONValue(plValue)
}
func entryToLogMessage(entry *logging.Entry) common.LogMessage {
message := common.NewLogMessage()
message.Timestamp = entry.Timestamp
message.ID = entry.InsertID
switch v := entry.Payload.(type) {
case string:
message.Attributes["message"] = v
case map[string]interface{}:
message.Attributes = v
default:
buf, err := json.Marshal(entry.Payload)
if err != nil {
fmt.Printf("Could not marshall value: %v of type %v", entry.Payload, reflect.TypeOf(entry))
break
}
message.Attributes = payloadToAttributes(buf)
}
return message
}
func queryToFilter(query common.Query, projectName string, logName string) string {
pieces := []string{fmt.Sprintf(`logName = "projects/%s/logs/%s"`, projectName, logName)}
if query.QueryString != "" {
pieces = append(pieces, fmt.Sprintf(`"%s"`, query.QueryString))
}
for _, filter := range query.EqualityFilters {
pieces = append(pieces, fmt.Sprintf(`jsonPayload.%s %s "%s"`, filter.FieldName, filter.Operator, filter.Value))
}
if query.After != nil {
pieces = append(pieces, fmt.Sprintf(`timestamp > "%s"`, (*query.After).Format(time.RFC3339)))
}
if query.Before != nil {
pieces = append(pieces, fmt.Sprintf(`timestamp < "%s"`, (*query.Before).Format(time.RFC3339)))
}
return strings.Join(pieces, " AND ")
}
func (client *StackdriverClient) ImplementsAdvancedFilters() bool {
return false
}
func (client *StackdriverClient) readLogBatch(ctx context.Context, query common.Query) ([]common.LogMessage, error) {
ctx, cancel := context.WithTimeout(ctx, QueryLogTimeout)
defer cancel()
it := client.stackdriverClient.Entries(ctx, logadmin.Filter(queryToFilter(query, client.projectName, client.logName)))
messages := make([]common.LogMessage, 0, 20)
entry, err := it.Next()
// Somehow, if no results can be found, it.Next() just runs forever, hence the adding a timeout to the context
if ctx.Err() == context.DeadlineExceeded {
return messages, ctx.Err()
}
if err != nil && err != iterator.Done {
return nil, err
}
resultCounter := 1
for err != iterator.Done && resultCounter <= query.MaxResults {
msg := entryToLogMessage(entry)
msg.Attributes = common.Project(msg.Attributes, query.SelectFields)
messages = append(messages, msg)
entry, err = it.Next()
resultCounter++
}
return messages, nil
}
func (client *StackdriverClient) Query(ctx context.Context, query common.Query) <-chan common.LogMessage {
if query.Follow {
return common.ReQueryFollow(ctx, func() ([]common.LogMessage, error) {
return client.readLogBatch(ctx, query)
})
}
resultChan := make(chan common.LogMessage)
go func() {
messages, err := client.readLogBatch(ctx, query)
if err != nil {
fmt.Printf("Error while fetching logs: %s\n", err)
close(resultChan)
return
}
for _, message := range messages {
resultChan <- message
}
close(resultChan)
}()
return resultChan
}
func New(credentialsFile, projectName, logName string) *StackdriverClient {
client, err := logadmin.NewClient(context.Background(), projectName, option.WithCredentialsFile(credentialsFile))
if err != nil {
fmt.Printf("Error creating stack driver client: %v\n", err)
return nil
}
return &StackdriverClient{
stackdriverClient: client,
projectName: projectName,
logName: logName,
}
}
func (client *StackdriverClient) ListLogs() ([]string, error) {
logNames := make([]string, 0, 10)
it := client.stackdriverClient.Logs(context.Background())
s, err := it.Next()
if err != nil && err != iterator.Done {
return nil, err
}
for err != iterator.Done {
logNames = append(logNames, s)
s, err = it.Next()
if err != nil && err != iterator.Done {
return nil, err
}
}
return logNames, nil
}
var _ common.Client = &StackdriverClient{}
|
package flowcontrol
import (
"time"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"gx/ipfs/QmU44KWVkSHno7sNDTeUcL4FBgxgoidkFuTUyTXWJPXXFJ/quic-go/internal/congestion"
"gx/ipfs/QmU44KWVkSHno7sNDTeUcL4FBgxgoidkFuTUyTXWJPXXFJ/quic-go/internal/protocol"
"gx/ipfs/QmU44KWVkSHno7sNDTeUcL4FBgxgoidkFuTUyTXWJPXXFJ/quic-go/internal/utils"
"gx/ipfs/QmU44KWVkSHno7sNDTeUcL4FBgxgoidkFuTUyTXWJPXXFJ/quic-go/qerr"
)
var _ = Describe("Stream Flow controller", func() {
var (
controller *streamFlowController
queuedWindowUpdate bool
queuedConnWindowUpdate bool
)
BeforeEach(func() {
queuedWindowUpdate = false
queuedConnWindowUpdate = false
rttStats := &congestion.RTTStats{}
controller = &streamFlowController{
streamID: 10,
connection: NewConnectionFlowController(1000, 1000, func() { queuedConnWindowUpdate = true }, rttStats, utils.DefaultLogger).(*connectionFlowController),
}
controller.maxReceiveWindowSize = 10000
controller.rttStats = rttStats
controller.logger = utils.DefaultLogger
controller.queueWindowUpdate = func() { queuedWindowUpdate = true }
})
Context("Constructor", func() {
rttStats := &congestion.RTTStats{}
receiveWindow := protocol.ByteCount(2000)
maxReceiveWindow := protocol.ByteCount(3000)
sendWindow := protocol.ByteCount(4000)
It("sets the send and receive windows", func() {
cc := NewConnectionFlowController(0, 0, nil, nil, utils.DefaultLogger)
fc := NewStreamFlowController(5, true, cc, receiveWindow, maxReceiveWindow, sendWindow, nil, rttStats, utils.DefaultLogger).(*streamFlowController)
Expect(fc.streamID).To(Equal(protocol.StreamID(5)))
Expect(fc.receiveWindow).To(Equal(receiveWindow))
Expect(fc.maxReceiveWindowSize).To(Equal(maxReceiveWindow))
Expect(fc.sendWindow).To(Equal(sendWindow))
Expect(fc.contributesToConnection).To(BeTrue())
})
It("queues window updates with the correction stream ID", func() {
var queued bool
queueWindowUpdate := func(id protocol.StreamID) {
Expect(id).To(Equal(protocol.StreamID(5)))
queued = true
}
cc := NewConnectionFlowController(0, 0, nil, nil, utils.DefaultLogger)
fc := NewStreamFlowController(5, true, cc, receiveWindow, maxReceiveWindow, sendWindow, queueWindowUpdate, rttStats, utils.DefaultLogger).(*streamFlowController)
fc.AddBytesRead(receiveWindow)
fc.MaybeQueueWindowUpdate()
Expect(queued).To(BeTrue())
})
})
Context("receiving data", func() {
Context("registering received offsets", func() {
var receiveWindow protocol.ByteCount = 10000
var receiveWindowSize protocol.ByteCount = 600
BeforeEach(func() {
controller.receiveWindow = receiveWindow
controller.receiveWindowSize = receiveWindowSize
})
It("updates the highestReceived", func() {
controller.highestReceived = 1337
err := controller.UpdateHighestReceived(1338, false)
Expect(err).ToNot(HaveOccurred())
Expect(controller.highestReceived).To(Equal(protocol.ByteCount(1338)))
})
It("informs the connection flow controller about received data", func() {
controller.highestReceived = 10
controller.contributesToConnection = true
controller.connection.(*connectionFlowController).highestReceived = 100
err := controller.UpdateHighestReceived(20, false)
Expect(err).ToNot(HaveOccurred())
Expect(controller.connection.(*connectionFlowController).highestReceived).To(Equal(protocol.ByteCount(100 + 10)))
})
It("doesn't informs the connection flow controller about received data if it doesn't contribute", func() {
controller.highestReceived = 10
controller.connection.(*connectionFlowController).highestReceived = 100
err := controller.UpdateHighestReceived(20, false)
Expect(err).ToNot(HaveOccurred())
Expect(controller.connection.(*connectionFlowController).highestReceived).To(Equal(protocol.ByteCount(100)))
})
It("does not decrease the highestReceived", func() {
controller.highestReceived = 1337
err := controller.UpdateHighestReceived(1000, false)
Expect(err).ToNot(HaveOccurred())
Expect(controller.highestReceived).To(Equal(protocol.ByteCount(1337)))
})
It("does nothing when setting the same byte offset", func() {
controller.highestReceived = 1337
err := controller.UpdateHighestReceived(1337, false)
Expect(err).ToNot(HaveOccurred())
})
It("does not give a flow control violation when using the window completely", func() {
err := controller.UpdateHighestReceived(receiveWindow, false)
Expect(err).ToNot(HaveOccurred())
})
It("detects a flow control violation", func() {
err := controller.UpdateHighestReceived(receiveWindow+1, false)
Expect(err).To(MatchError("FlowControlReceivedTooMuchData: Received 10001 bytes on stream 10, allowed 10000 bytes"))
})
It("accepts a final offset higher than the highest received", func() {
controller.highestReceived = 100
err := controller.UpdateHighestReceived(101, true)
Expect(err).ToNot(HaveOccurred())
Expect(controller.highestReceived).To(Equal(protocol.ByteCount(101)))
})
It("errors when receiving a final offset smaller than the highest offset received so far", func() {
controller.highestReceived = 100
err := controller.UpdateHighestReceived(99, true)
Expect(err).To(MatchError(qerr.StreamDataAfterTermination))
})
It("accepts delayed data after receiving a final offset", func() {
err := controller.UpdateHighestReceived(300, true)
Expect(err).ToNot(HaveOccurred())
err = controller.UpdateHighestReceived(250, false)
Expect(err).ToNot(HaveOccurred())
})
It("errors when receiving a higher offset after receiving a final offset", func() {
err := controller.UpdateHighestReceived(200, true)
Expect(err).ToNot(HaveOccurred())
err = controller.UpdateHighestReceived(250, false)
Expect(err).To(MatchError(qerr.StreamDataAfterTermination))
})
It("accepts duplicate final offsets", func() {
err := controller.UpdateHighestReceived(200, true)
Expect(err).ToNot(HaveOccurred())
err = controller.UpdateHighestReceived(200, true)
Expect(err).ToNot(HaveOccurred())
Expect(controller.highestReceived).To(Equal(protocol.ByteCount(200)))
})
It("errors when receiving inconsistent final offsets", func() {
err := controller.UpdateHighestReceived(200, true)
Expect(err).ToNot(HaveOccurred())
err = controller.UpdateHighestReceived(201, true)
Expect(err).To(MatchError("StreamDataAfterTermination: Received inconsistent final offset for stream 10 (old: 200, new: 201 bytes)"))
})
})
Context("registering data read", func() {
It("saves when data is read, on a stream not contributing to the connection", func() {
controller.AddBytesRead(100)
Expect(controller.bytesRead).To(Equal(protocol.ByteCount(100)))
Expect(controller.connection.(*connectionFlowController).bytesRead).To(BeZero())
})
It("saves when data is read, on a stream not contributing to the connection", func() {
controller.contributesToConnection = true
controller.AddBytesRead(200)
Expect(controller.bytesRead).To(Equal(protocol.ByteCount(200)))
Expect(controller.connection.(*connectionFlowController).bytesRead).To(Equal(protocol.ByteCount(200)))
})
})
Context("generating window updates", func() {
var oldWindowSize protocol.ByteCount
// update the congestion such that it returns a given value for the smoothed RTT
setRtt := func(t time.Duration) {
controller.rttStats.UpdateRTT(t, 0, time.Now())
Expect(controller.rttStats.SmoothedRTT()).To(Equal(t)) // make sure it worked
}
BeforeEach(func() {
controller.receiveWindow = 100
controller.receiveWindowSize = 60
controller.bytesRead = 100 - 60
controller.connection.(*connectionFlowController).receiveWindow = 100
controller.connection.(*connectionFlowController).receiveWindowSize = 120
oldWindowSize = controller.receiveWindowSize
})
It("queues window updates", func() {
controller.MaybeQueueWindowUpdate()
Expect(queuedWindowUpdate).To(BeFalse())
controller.AddBytesRead(30)
controller.MaybeQueueWindowUpdate()
Expect(queuedWindowUpdate).To(BeTrue())
Expect(controller.GetWindowUpdate()).ToNot(BeZero())
queuedWindowUpdate = false
controller.MaybeQueueWindowUpdate()
Expect(queuedWindowUpdate).To(BeFalse())
})
It("queues connection-level window updates", func() {
controller.contributesToConnection = true
controller.MaybeQueueWindowUpdate()
Expect(queuedConnWindowUpdate).To(BeFalse())
controller.AddBytesRead(60)
controller.MaybeQueueWindowUpdate()
Expect(queuedConnWindowUpdate).To(BeTrue())
})
It("tells the connection flow controller when the window was autotuned", func() {
oldOffset := controller.bytesRead
controller.contributesToConnection = true
setRtt(scaleDuration(20 * time.Millisecond))
controller.epochStartOffset = oldOffset
controller.epochStartTime = time.Now().Add(-time.Millisecond)
controller.AddBytesRead(55)
offset := controller.GetWindowUpdate()
Expect(offset).To(Equal(protocol.ByteCount(oldOffset + 55 + 2*oldWindowSize)))
Expect(controller.receiveWindowSize).To(Equal(2 * oldWindowSize))
Expect(controller.connection.(*connectionFlowController).receiveWindowSize).To(Equal(protocol.ByteCount(float64(controller.receiveWindowSize) * protocol.ConnectionFlowControlMultiplier)))
})
It("doesn't tell the connection flow controller if it doesn't contribute", func() {
oldOffset := controller.bytesRead
controller.contributesToConnection = false
setRtt(scaleDuration(20 * time.Millisecond))
controller.epochStartOffset = oldOffset
controller.epochStartTime = time.Now().Add(-time.Millisecond)
controller.AddBytesRead(55)
offset := controller.GetWindowUpdate()
Expect(offset).ToNot(BeZero())
Expect(controller.receiveWindowSize).To(Equal(2 * oldWindowSize))
Expect(controller.connection.(*connectionFlowController).receiveWindowSize).To(Equal(protocol.ByteCount(2 * oldWindowSize))) // unchanged
})
It("doesn't increase the window after a final offset was already received", func() {
controller.AddBytesRead(30)
err := controller.UpdateHighestReceived(90, true)
Expect(err).ToNot(HaveOccurred())
controller.MaybeQueueWindowUpdate()
Expect(queuedWindowUpdate).To(BeFalse())
offset := controller.GetWindowUpdate()
Expect(offset).To(BeZero())
})
})
})
Context("sending data", func() {
It("gets the size of the send window", func() {
controller.UpdateSendWindow(15)
controller.AddBytesSent(5)
Expect(controller.SendWindowSize()).To(Equal(protocol.ByteCount(10)))
})
It("doesn't care about the connection-level window, if it doesn't contribute", func() {
controller.UpdateSendWindow(15)
controller.connection.UpdateSendWindow(1)
controller.AddBytesSent(5)
Expect(controller.SendWindowSize()).To(Equal(protocol.ByteCount(10)))
})
It("makes sure that it doesn't overflow the connection-level window", func() {
controller.contributesToConnection = true
controller.connection.UpdateSendWindow(12)
controller.UpdateSendWindow(20)
controller.AddBytesSent(10)
Expect(controller.SendWindowSize()).To(Equal(protocol.ByteCount(2)))
})
It("doesn't say that it's blocked, if only the connection is blocked", func() {
controller.contributesToConnection = true
controller.connection.UpdateSendWindow(50)
controller.UpdateSendWindow(100)
controller.AddBytesSent(50)
blocked, _ := controller.connection.IsNewlyBlocked()
Expect(blocked).To(BeTrue())
Expect(controller.IsNewlyBlocked()).To(BeFalse())
})
})
})
|
package main
import(
"log"
"net"
"context"
"app/utils"
"app/proto"
"app/models"
"google.golang.org/grpc"
)
type server struct{
proto.UnimplementedBookProfilesServer
}
func (*server) Create(c context.Context, req *proto.CreateRequest)(*proto.MainResponse, error){
db, err := utils.DBConnection()
if err != nil{
log.Fatalf("Didn't open database %v ", err)
}
log.Println(req)
new := db.Table("books").
Create(&req.Book)
if utils.IsNotFound(new) {
log.Fatal(new.Error)
}
var newBook models.GetBook
row := db.Table("books").
Where("name = ?", req.Book.Name).
Find(&newBook)
log.Println(newBook.BookId)
if utils.IsNotFound(row){
log.Fatal(row.Error)
}
res := &proto.MainResponse{
Book: &proto.BookMainInformation{
Id: int32(newBook.BookId),
Author: newBook.Author,
Name: newBook.Name,
Price: newBook.Price,
},
}
return res, nil
}
func (*server) Get(c context.Context, req *proto.GetBooksRequest)(*proto.ManyResponse, error){
db, err := utils.DBConnection()
if err != nil{
log.Fatalf("Didn't open database %v ", err)
}
var books []models.GetBook
rows := db.Table(req.Key).
Find(&books)
log.Println(books)
if utils.IsNotFound(rows){
log.Fatal(rows.Error)
}
var result []*proto.BookMainInformation
for _, objekt := range books{
res := &proto.BookMainInformation{
Id: int32(objekt.BookId),
Author: objekt.Author,
Name: objekt.Name,
Price: objekt.Price,
}
result = append(result, res)
}
return &proto.ManyResponse{
Books: result,
}, nil
}
func (*server) GetById(c context.Context, req *proto.WorkOnlyIdRequest)(*proto.GetByIdResponse, error){
db, err := utils.DBConnection()
if err != nil{
log.Fatalf("Didn't open database %v ", err)
}
var book models.GetBook
row := db.Table("books").
Where("book_id = ?", req.GetId()).
Find(&book)
if utils.IsNotFound(row){
log.Fatal(row.Error)
}
log.Println(book.BookId)
res := &proto.GetByIdResponse{
Book: &proto.Book{
Author: book.Author,
Name: book.Name,
Price: book.Price,
Genre: book.Genre,
Cover: book.Cover,
Page: book.Page,
},
}
return res, nil
}
func (*server) Discount(c context.Context, req *proto.DiscountRequest)(*proto.MainResponse, error){
db, err := utils.DBConnection()
if err != nil{
log.Fatalf("Didn't open database %v ", err)
}
var oldPrice float32
getPrice := db.Table("books").
Select("price").
Where("book_id = ?", req.Id).
Find(&oldPrice)
if utils.IsNotFound(getPrice){
log.Fatal(getPrice.Error)
}
var newPrice float32
newPrice = (float32(1) - float32(req.Percent)/float32(100)) * oldPrice
update := db.Table("books").
Where("book_id = ?", req.Id).
Update("price", newPrice)
if utils.IsNotFound(update){
log.Fatal(update.Error)
}
var updateBook models.BookMainInformation
row := db.Table("books").
Select("book_id, author, name, price").
Where("book_id = ?", req.Id).
Find(&updateBook)
if utils.IsNotFound(row){
log.Fatal(row.Error)
}
res := &proto.MainResponse{
Book: &proto.BookMainInformation{
Id: int32(updateBook.BookId),
Author: updateBook.Author,
Name: updateBook.Name,
Price: updateBook.Price,
},
}
return res, nil
}
func (*server) Delete(c context.Context, req *proto.WorkOnlyIdRequest)(*proto.MainResponse, error){
db, err := utils.DBConnection()
if err != nil{
log.Fatalf("Didn't open database %v ", err)
}
var deleteBook models.BookMainInformation
row := db.Table("books").
Select("book_id, author, name, price").
Where("book_id = ?", req.Id).
Find(&deleteBook).
Delete(&models.GetBook{})
if utils.IsNotFound(row) {
log.Fatal(row.Error)
}
res := &proto.MainResponse{
Book: &proto.BookMainInformation{
Id: int32(deleteBook.BookId),
Author: deleteBook.Author,
Name: deleteBook.Name,
Price: deleteBook.Price,
},
}
return res, nil
}
func (*server) Search(c context.Context, req *proto.SearchRequest)(*proto.ManyResponse, error){
db, err := utils.DBConnection()
if err != nil{
log.Fatalf("Didn't open database %v ", err)
}
var books []models.BookMainInformation
rows := db.Table("books").
Select("book_id, author, name, price").
Where("author ILIKE ? OR name ILIKE ?", "%" + req.GetKey() + "%", req.GetKey() + "%").
Find(&books)
if utils.IsNotFound(rows) {
log.Fatal(rows.Error)
}
var result []*proto.BookMainInformation
for _, objekt := range books{
res := &proto.BookMainInformation{
Id: int32(objekt.BookId),
Author: objekt.Author,
Name: objekt.Name,
Price: objekt.Price,
}
result = append(result, res)
}
return &proto.ManyResponse{
Books: result,
}, nil
}
func main() {
log.Println("Server is ready ...")
lis, err := net.Listen("tcp", ":9000")
if err != nil {
log.Fatal(err)
}
s := grpc.NewServer()
proto.RegisterBookProfilesServer(s, &server{})
err = s.Serve(lis)
if err != nil {
log.Fatal(err)
}
} |
// Manual
// https://developer.github.com/apps/building-oauth-apps/authorizing-oauth-apps/
package oauth_github
import (
"fmt"
"net/http"
"strings"
"github.com/a1div0/oauth"
"net/url"
"io/ioutil"
"encoding/json"
"time"
)
type OAuthGitHub struct {
ClientId string
ClientSecret string
token string
token_dt_start time.Time
redirect_uri string
}
func (s *OAuthGitHub) ServiceName() (string) {
return "github"
}
func (s *OAuthGitHub) LoginURL(verification_code_callback_url string, state string) (string) {
s.redirect_uri = verification_code_callback_url
data := url.Values{}
data.Set("client_id" , s.ClientId)
data.Set("redirect_uri" , verification_code_callback_url)
data.Set("scope" , "read:user user:email")
data.Set("state" , state)
return "https://github.com/login/oauth/authorize?" + data.Encode()
}
func (s *OAuthGitHub) OnRecieveVerificationCode(code string, u *oauth.UserData) (error) {
// Посылаем запрос токена и код подтверждения
err := s.code_to_token(code)
if err != nil {
return err
}
err = s.token_to_userdata(u)
if err != nil {
return err
}
return nil
}
func (s *OAuthGitHub) code_to_token(code string) (error) {
formData := url.Values{
"code": {code},
"client_id": {s.ClientId},
"client_secret": {s.ClientSecret},
"redirect_uri": {s.redirect_uri},
}
resp, err := http.PostForm("https://github.com/login/oauth/access_token", formData)
if err != nil {
return err
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return err
}
params, err := url.ParseQuery(string(body))
if err != nil {
return err
}
error_text, error_exist := params["error"]
if (error_exist) { // если пройдёт это условие, значит service_name "чист"
return fmt.Errorf("Error: %s", strings.Join(error_text, ""))
}
tokens, token_exist := params["access_token"]
if (!token_exist) { // если пройдёт это условие, значит service_name "чист"
return fmt.Errorf("Error: token not exist!")
}
s.token = strings.Join(tokens, "")
s.token_dt_start = time.Now()
return nil
}
func (s *OAuthGitHub) token_to_data(url string) ([]byte, error) {
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return nil, err
}
// Получаем и устанавливаем тип контента
req.Header.Set("Authorization", "token " + s.token)
// Отправляем запрос
client := &http.Client{}
resp, err := client.Do(req)
defer resp.Body.Close()
if err != nil {
return nil, err
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, err
}
return body, nil
}
func (s *OAuthGitHub) token_to_userdata(u *oauth.UserData) (error) {
json_bytes, err := s.token_to_data("https://api.github.com/user")
if err != nil {
return err
}
type GithubUserAnswerStruct struct {
Id int64 `json:"id"`
Login string `json:"login"`
NodeId string `json:"node_id"`
AvatarUrl string `json:"avatar_url"`
Email string `json:"avatar_url"`
}
var UserAnswer GithubUserAnswerStruct
err = json.Unmarshal(json_bytes, &UserAnswer)
if err != nil {
return err
}
u.ExtId = fmt.Sprintf("%d", UserAnswer.Id)
u.Name = UserAnswer.Login
json_bytes, err = s.token_to_data("https://api.github.com/user/emails")
if err != nil {
return err
}
type GithubEmail struct {
Email string `json:"email"`
Primary bool `json:"primary"`
Verified bool `json:"verified"`
Visiblity bool `json:"visiblity"`
}
var UserEmails []GithubEmail
err = json.Unmarshal(json_bytes, &UserEmails)
if err != nil {
return err
}
if (len(UserEmails) < 1) {
return fmt.Errorf("Error: Email not found!")
}
u.Email = UserEmails[0].Email
return nil
}
|
// Copyright (c) 2020 Siemens AG
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files (the "Software"), to deal in
// the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software is furnished to do so,
// subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
// Author(s): Jonas Plum
package forensicstore
import (
"encoding/json"
"fmt"
"path"
"github.com/qri-io/jsonschema"
"github.com/forensicanalysis/stixgo"
)
var Schemas map[string]*jsonschema.RootSchema // nolint:gochecknoglobals
func init() { // nolint:gochecknoinits
Schemas = make(map[string]*jsonschema.RootSchema)
nameTitle := map[string]string{}
// unmarshal schemas
for name, content := range stixgo.FS {
schema := &jsonschema.RootSchema{}
if err := json.Unmarshal(content, schema); err != nil {
panic(err)
}
nameTitle[path.Base(name)] = schema.Title
Schemas[schema.Title] = schema
}
// replace refs
for _, schema := range Schemas {
err := walkJSON(schema, func(elem jsonschema.JSONPather) error {
if sch, ok := elem.(*jsonschema.Schema); ok {
if sch.Ref != "" && sch.Ref[0] != '#' {
sch.Ref = "elementary:" + nameTitle[path.Base(sch.Ref)]
}
}
return nil
})
if err != nil {
panic(err)
}
jsonschema.DefaultSchemaPool["elementary:"+schema.Title] = &schema.Schema
}
// fetch references
for _, schema := range Schemas {
err := schema.FetchRemoteReferences()
if err != nil {
panic(fmt.Sprint("could not FetchRemoteReferences:", err))
}
}
}
func walkJSON(elem jsonschema.JSONPather, fn func(elem jsonschema.JSONPather) error) error {
if err := fn(elem); err != nil {
return err
}
if con, ok := elem.(jsonschema.JSONContainer); ok {
for _, ch := range con.JSONChildren() {
if err := walkJSON(ch, fn); err != nil {
return err
}
}
}
return nil
}
|
package main
import (
"bufio"
"encoding/gob"
"fmt"
"io/ioutil"
"net"
"os"
"os/exec"
"time"
hyperclient "github.com/Cloud-Foundations/Dominator/hypervisor/client"
imgclient "github.com/Cloud-Foundations/Dominator/imageserver/client"
"github.com/Cloud-Foundations/Dominator/lib/constants"
"github.com/Cloud-Foundations/Dominator/lib/errors"
"github.com/Cloud-Foundations/Dominator/lib/filesystem"
"github.com/Cloud-Foundations/Dominator/lib/filter"
"github.com/Cloud-Foundations/Dominator/lib/image"
"github.com/Cloud-Foundations/Dominator/lib/log"
"github.com/Cloud-Foundations/Dominator/lib/srpc"
fm_proto "github.com/Cloud-Foundations/Dominator/proto/fleetmanager"
"github.com/Cloud-Foundations/Dominator/proto/sub"
subclient "github.com/Cloud-Foundations/Dominator/sub/client"
)
func diffSubcommand(args []string, logger log.DebugLogger) error {
return diffTypedImages(args[0], args[1], args[2])
}
func diffTypedImages(tool string, lName string, rName string) error {
lfs, lFilter, err := getTypedImage(lName)
if err != nil {
return fmt.Errorf("error getting left image: %s", err)
}
rfs, rFilter, err := getTypedImage(rName)
if err != nil {
return fmt.Errorf("error getting right image: %s", err)
}
var filt *filter.Filter
if lFilter != nil && rFilter == nil {
filt = lFilter
} else if lFilter == nil && rFilter != nil {
filt = rFilter
}
if lfs, err = applyDeleteFilter(lfs, filt); err != nil {
return fmt.Errorf("error filtering left image: %s", err)
}
if rfs, err = applyDeleteFilter(rfs, filt); err != nil {
return fmt.Errorf("error filtering right image: %s", err)
}
err = diffImages(tool, lfs, rfs)
if err != nil {
return fmt.Errorf("error diffing images: %s", err)
}
return nil
}
func getTypedImage(typedName string) (
*filesystem.FileSystem, *filter.Filter, error) {
if len(typedName) < 3 || typedName[1] != ':' {
typedName = "i:" + typedName
}
switch name := typedName[2:]; typedName[0] {
case 'd':
fs, err := scanDirectory(name)
return fs, nil, err
case 'f':
fs, err := readFileSystem(name)
return fs, nil, err
case 'i':
imageSClient, _ := getClients()
if img, err := getImage(imageSClient, name); err != nil {
return nil, nil, err
} else {
return img.FileSystem, img.Filter, nil
}
case 'I':
imageSClient, _ := getClients()
if img, err := getLatestImage(imageSClient, name); err != nil {
return nil, nil, err
} else {
return img.FileSystem, img.Filter, nil
}
case 'l':
if img, err := readImage(name); err != nil {
return nil, nil, err
} else {
return img.FileSystem, img.Filter, nil
}
case 's':
fs, err := pollImage(name)
return fs, nil, err
case 'v':
fs, err := scanVm(name)
return fs, nil, err
default:
return nil, nil, errors.New("unknown image type: " + typedName[:1])
}
}
func scanDirectory(name string) (*filesystem.FileSystem, error) {
fs, err := buildImageWithHasher(nil, nil, name, nil)
if err != nil {
return nil, err
}
return fs, nil
}
func readFileSystem(name string) (*filesystem.FileSystem, error) {
file, err := os.Open(name)
if err != nil {
return nil, err
}
defer file.Close()
var fileSystem filesystem.FileSystem
if err := gob.NewDecoder(file).Decode(&fileSystem); err != nil {
return nil, err
}
fileSystem.RebuildInodePointers()
return &fileSystem, nil
}
func getImage(client *srpc.Client, name string) (*image.Image, error) {
img, err := imgclient.GetImageWithTimeout(client, name, *timeout)
if err != nil {
return nil, err
}
if img == nil {
return nil, errors.New(name + ": not found")
}
if err := img.FileSystem.RebuildInodePointers(); err != nil {
return nil, err
}
return img, nil
}
func getLatestImage(client *srpc.Client, name string) (*image.Image, error) {
imageName, err := imgclient.FindLatestImage(client, name, *ignoreExpiring)
if err != nil {
return nil, err
}
return getImage(client, imageName)
}
func getFsOfImage(client *srpc.Client, name string) (
*filesystem.FileSystem, error) {
if image, err := getImage(client, name); err != nil {
return nil, err
} else {
return image.FileSystem, nil
}
}
func readImage(name string) (*image.Image, error) {
file, err := os.Open(name)
if err != nil {
return nil, err
}
defer file.Close()
var img image.Image
if err := gob.NewDecoder(file).Decode(&img); err != nil {
return nil, err
}
img.FileSystem.RebuildInodePointers()
return &img, nil
}
func pollImage(name string) (*filesystem.FileSystem, error) {
clientName := fmt.Sprintf("%s:%d", name, constants.SubPortNumber)
srpcClient, err := srpc.DialHTTP("tcp", clientName, 0)
if err != nil {
return nil, fmt.Errorf("error dialing %s", err)
}
defer srpcClient.Close()
var request sub.PollRequest
var reply sub.PollResponse
if err = subclient.CallPoll(srpcClient, request, &reply); err != nil {
return nil, err
}
if reply.FileSystem == nil {
return nil, errors.New("no poll data")
}
reply.FileSystem.RebuildInodePointers()
return reply.FileSystem, nil
}
func scanVm(name string) (*filesystem.FileSystem, error) {
vmIpAddr, srpcClient, err := getVmIpAndHypervisor(name)
if err != nil {
return nil, err
}
defer srpcClient.Close()
fs, err := hyperclient.ScanVmRoot(srpcClient, vmIpAddr, nil)
if err != nil {
return nil, err
}
fs.RebuildInodePointers()
return fs, nil
}
func getVmIpAndHypervisor(vmHostname string) (net.IP, *srpc.Client, error) {
vmIpAddr, err := lookupIP(vmHostname)
if err != nil {
return nil, nil, err
}
hypervisorAddress, err := findHypervisor(vmIpAddr)
if err != nil {
return nil, nil, err
}
client, err := srpc.DialHTTP("tcp", hypervisorAddress, time.Second*10)
if err != nil {
return nil, nil, err
}
return vmIpAddr, client, nil
}
func findHypervisor(vmIpAddr net.IP) (string, error) {
if *hypervisorHostname != "" {
return fmt.Sprintf("%s:%d", *hypervisorHostname, *hypervisorPortNum),
nil
} else if *fleetManagerHostname != "" {
fm := fmt.Sprintf("%s:%d", *fleetManagerHostname, *fleetManagerPortNum)
client, err := srpc.DialHTTP("tcp", fm, time.Second*10)
if err != nil {
return "", err
}
defer client.Close()
return findHypervisorClient(client, vmIpAddr)
} else {
return fmt.Sprintf("localhost:%d", *hypervisorPortNum), nil
}
}
func findHypervisorClient(client *srpc.Client,
vmIpAddr net.IP) (string, error) {
request := fm_proto.GetHypervisorForVMRequest{vmIpAddr}
var reply fm_proto.GetHypervisorForVMResponse
err := client.RequestReply("FleetManager.GetHypervisorForVM", request,
&reply)
if err != nil {
return "", err
}
if err := errors.New(reply.Error); err != nil {
return "", err
}
return reply.HypervisorAddress, nil
}
func lookupIP(vmHostname string) (net.IP, error) {
if ips, err := net.LookupIP(vmHostname); err != nil {
return nil, err
} else if len(ips) != 1 {
return nil, fmt.Errorf("num IPs: %d != 1", len(ips))
} else {
return ips[0], nil
}
}
func diffImages(tool string, lfs, rfs *filesystem.FileSystem) error {
lname, err := writeImage(lfs)
defer os.Remove(lname)
if err != nil {
return err
}
rname, err := writeImage(rfs)
defer os.Remove(rname)
if err != nil {
return err
}
cmd := exec.Command(tool, lname, rname)
cmd.Stdout = os.Stdout
return cmd.Run()
}
func writeImage(fs *filesystem.FileSystem) (string, error) {
file, err := ioutil.TempFile("", "imagetool")
if err != nil {
return "", err
}
defer file.Close()
writer := bufio.NewWriter(file)
defer writer.Flush()
return file.Name(), fs.Listf(writer, listSelector, listFilter)
}
|
package main
import "strings"
func wordsTyping(sentence []string, rows, cols int) int {
joinedSentence := strings.Join(sentence, " ")
sentenceNo, rowsUsed := computeParagraph(joinedSentence, rows, cols)
completeParagraphSentences := (rows / rowsUsed) * sentenceNo
var remainingSentences int
remainingSentences, rowsUsed = computeParagraph(joinedSentence, rows%rowsUsed, cols)
return completeParagraphSentences + remainingSentences
}
func computeParagraph(sentence string, maxRows, cols int) (sentenceNo, rowsUsed int) {
cells := 0
length := len(sentence)
for i := 1; i <= maxRows; i++ {
cells += cols
remainingCells := cells % (length + 1)
if remainingCells == 0 || remainingCells == length {
rowsUsed = i
sentenceNo = cells / (length + 1)
if remainingCells == length {
sentenceNo++
}
return
}
if sentence[remainingCells] == ' ' {
cells++
} else {
for i := remainingCells - 1; i >= 0; i-- {
if sentence[i] == ' ' {
break
}
cells--
}
}
}
rowsUsed = maxRows
sentenceNo = cells / (length + 1)
return
}
|
package main
const cardsInDeck = 52
var cardSuits []string = []string{"spades", "diamonds", "clubs", "hearts"}
var cardValues []string = []string{"ace", "king", "queen", "jack", "10", "9", "8", "7", "6", "5", "4", "3", "2"}
var hands []string = []string{"royalFlush", "straightFlush", "fourOfAkind", "fullHouse", "flush", "straight", "threeOfAkind", "twoPairs", "pair", "highCard"}
var handsFullNames []string = []string{"royal flush", "straight flush", "four of a kind", "full house", "flush", "straight", "three of a kind", "two pairs", "pair", "high card"}
func findCardValuesIndex(cardValue string) int {
for i, val := range cardValues {
if cardValue == val {
return i
}
}
return 0
}
func findHandIndex(hand string) int {
for i, hnd := range hands {
if hand == hnd {
return i
}
}
return 0
}
// get3CardCombination finds all possible combinations of given
// number of community cards dealt at the table.
func get3CardCombination(numberOfCards int, cards []*card) [][]*card {
allCombinations := make([][]*card, 0)
tmp := make([]*card, numberOfCards)
cardCombinations(numberOfCards, cards, 0, tmp, &allCombinations, 0)
return allCombinations
}
// cardCombinations recursively adds subsequent card combinations
// from a temp slice to a received slice of all combinations.
func cardCombinations(numberOfCards int, cards []*card, index int, tmp []*card, allCombinations *[][]*card, i int) {
if index == numberOfCards {
tmpCpy := make([]*card, len(tmp))
copy(tmpCpy, tmp)
*allCombinations = append(*allCombinations, tmpCpy)
return
}
if i >= len(cards) {
return
}
tmp[index] = cards[i]
cardCombinations(numberOfCards, cards, index+1, tmp, allCombinations, i+1)
cardCombinations(numberOfCards, cards, index, tmp, allCombinations, i+1)
}
|
package models
import (
"github.com/alehano/gobootstrap/sys/cmd"
"github.com/alehano/gobootstrap/sys/db"
"github.com/spf13/cobra"
)
func init() {
cmd.RootCmd.AddCommand(&cobra.Command{
Use: "init_db",
Short: "Init all DB",
Long: "Init all DB tables with DBInitter interface being registered in sys/db",
RunE: func(cmd *cobra.Command, args []string) error {
return db.InitAllDBs()
},
})
}
|
package store
import (
"context"
"time"
"github.com/ankurs/Feed/Feed/service/store/cassandra"
"github.com/ankurs/Feed/Feed/service/store/db"
"github.com/ankurs/Feed/Feed/service/store/redis"
)
type RegisterRequest interface {
GetLastName() string
GetFirstName() string
GetUserName() string
GetPassword() string
GetEmail() string
}
type LoginRequest interface {
GetUserName() string
GetPassword() string
}
type LoginResponse interface {
GetToken() string
GetUserInfo() UserInfo
}
// we type alias it, so that we can saperate them out in future
type UserInfo = db.UserInfo
type FeedInfo = db.FeedInfo
type Storage interface {
Register(context.Context, RegisterRequest) (LoginResponse, error)
Login(context.Context, LoginRequest) (LoginResponse, error)
GetUser(ctx context.Context, userID string) (UserInfo, error)
AddFollow(ctx context.Context, userId, followingId string) error
RemoveFollow(ctx context.Context, userId, followingId string) error
CreateFeedItem(ctx context.Context, fi FeedInfo, ts time.Time) (string, error)
AddUserFeedItem(ctx context.Context, userId, itemId string, ts time.Time) error
AddFollowingFeedItem(ctx context.Context, userId, itemId string, ts time.Time) error
GetFollowers(ctx context.Context, userId string) <-chan db.Data
FetchFeed(ctx context.Context, userId string, before time.Time, ftype int32, limit int) ([]FeedInfo, error)
Close()
}
type Config struct {
Cassandra cassandra.Config
Redis redis.Config
}
|
package confformat
import "fmt"
const (
exampleTOML = `name="Example1"
age=99
`
exampleJSON = `{"name":"Example2","age":98}`
exampleYAML = `name: Example3
age: 97
`
)
// UnmarshalAll takes data in various formats
// and converts them into structs
func UnmarshalAll() error {
t := TOMLData{}
j := JSONData{}
y := YAMLData{}
if _, err := t.Decode([]byte(exampleTOML)); err != nil {
return err
}
fmt.Println("TOML Unmarshal =", t)
if err := j.Decode([]byte(exampleJSON)); err != nil {
return err
}
fmt.Println("JSON Unmarshal =", j)
if err := y.Decode([]byte(exampleYAML)); err != nil {
return err
}
fmt.Println("Yaml Unmarshal =", y)
return nil
}
|
package admin
import (
"blog/app/models"
"blog/app/web/responses"
)
type SystemConfigResponse struct {
}
func (r SystemConfigResponse) List(models []*models.SysConfig) (list responses.Results) {
for _, model := range models {
list = append(list, r.Item(model))
}
return list
}
func (r SystemConfigResponse) Item(model *models.SysConfig) responses.Result {
return responses.Result{
"id": model.ID,
"key": model.Key,
"value": model.Value,
"name": model.Name,
"description": model.Description,
"created_at": model.CreatedAt.Format("2006-01-02 15:04:05"),
"updated_at": model.UpdatedAt.Format("2006-01-02 15:04:05"),
}
}
|
package models
import (
"fmt"
"github.com/astaxie/beego/orm"
"strings"
"time"
"tokensky_bg_admin/common"
"tokensky_bg_admin/conf"
"tokensky_bg_admin/utils"
)
//查询的类
type BorrowLimitingQueryParam struct {
BaseQueryParam
StartTime int64 `json:"startTime"` //开始时间
EndTime int64 `json:"endTime"` //截止时间
OrderId string `json:"orderId"` //订单号
Name string `json:"name"`
}
func (a *BorrowLimiting) TableName() string {
return BorrowLimitingTBName()
}
//强屏表
type BorrowLimiting struct {
Order *BorrowOrder`orm:"pk;rel(one);column(order_id)"json:"-"form:"-"`
//用户uid
User *TokenskyUser `orm:"rel(fk);column(user_id)"json:"-"form:"-"`
//货币类型
Symbol string `orm:"column(symbol)"json:"symbol"form:"symbol"`
//强平时质押率
Pledge float64 `orm:"column(pledge)"json:"pledge"form:"pledge"`
//强平时货币价格
SymbolPrice float64 `orm:"column(symbol_price)"json:"symbolPrice"form:"symbolPrice"`
//售卖价格
TotalPrice float64 `orm:"column(total_price)"json:"totalPrice"form:"totalPrice"`
//还款额度
PayBackPrice float64 `orm:"column(pay_back_price)"json:"payBackPrice"form:"payBackPrice"`
//实际还款额度
PracticalPrice float64 `orm:"column(practical_price)"json:"practicalPrice"form:"practicalPrice"`
//执行人
AdminId int `orm:"column(admin_id)"json:"-"form:"-"`
//逾期时间
ExceedTime time.Time`orm:"type(datetime);column(exceed_time)"json:"exceedTime"form:"exceedTime"`
//售卖时间
SellTime time.Time `orm:"type(datetime);column(sell_time)"json:"sellTime"form:"sellTime"`
//创建时间
CreateTime time.Time `orm:"auto_now_add;type(datetime);column(create_time)"json:"createTime"form:"createTime"`
//关联键
Name string `orm:"-"json:"name"form:"-"`
//质押方式 1 活期钱包 2 理财包
PledgeWay int `orm:"-"json:"pledgeWay"form:"-"`
//质押货币数量
PledgeAmount float64 `orm:"-"json:"pledgeAmount"form:"-"`
//借贷金额
Amount float64 `orm:"-"json:"amount"form:"-"`
//借贷的货币类型,如:USDT
LoanSymbol string `orm:"-"json:"loanSymbol"form:"-"`
}
//获取分页数据
func BorrowLimitingPageList(params *BorrowLimitingQueryParam) ([]*BorrowLimiting, int64) {
o := orm.NewOrm()
query := o.QueryTable(BorrowLimitingTBName())
data := make([]*BorrowLimiting, 0)
//默认排序
sortorder := "createTime"
switch params.Sort {
case "create_time":
sortorder = "id"
}
if params.Order == "desc" {
sortorder = "-" + sortorder
}
if params.Name != ""{
query = query.Filter("User__nick_name__exact",params.Name)
}
if params.OrderId != ""{
query = query.Filter("Order__order_id__exact",params.OrderId)
}
total, _ := query.Count()
query.OrderBy(sortorder).Limit(params.Limit, (params.Offset-1)*params.Limit).RelatedSel().All(&data)
for _,v := range data{
if v.User != nil{
v.Name = v.User.NickName
}
if v.Order != nil{
v.PledgeWay = v.Order.PledgeWay
v.PledgeAmount = v.Order.PledgeAmount
v.Amount = v.Order.Amount
v.LoanSymbol = v.Order.LoanSymbol
}
}
return data, total
}
//售卖
func BorrowLimitingsSell(orderId string,totalPrice float64,adminId int)error{
var err error
obj := &BorrowLimiting{}
o := orm.NewOrm()
query := o.QueryTable(BorrowLimitingTBName())
err = query.Filter("order_id__exact",orderId).RelatedSel().One(obj)
if err != nil{
return err
}
if obj.Order.Status !=5 && obj.Order.Status != 6{
return fmt.Errorf("状态异常")
}
//售卖价格
err = o.Begin()
if err != nil{
return fmt.Errorf("开启事务失败")
}
//质押理财包
if obj.Order.RelevStatus == 1{
order := obj.Order
order.RepayTime = time.Now()
//order.ForcedPrice = obj.SymbolPrice
order.SellTotalPrice = totalPrice
_,err :=o.Update(order)
if err != nil{
o.Rollback()
return fmt.Errorf("保存订单表失败")
}
ids := strings.Split(obj.Order.RelevanceId,",")
if len(ids)>0{
params := map[string]interface{}{
"status": 3,
}
financialQuery := o.QueryTable(FinancialOrderTBName())
num, err := financialQuery.Filter("id__in", ids).Update(params)
if err !=nil{
o.Rollback()
return fmt.Errorf("质押物状态修改异常 err:"+err.Error())
}
if num != int64(len(ids)){
o.Rollback()
return fmt.Errorf("质押物数量不一致")
}
}
//删除记录
err = BorrowUseFinancialOrderDelete(o,obj.Order.OrderId)
if err != nil{
return fmt.Errorf("删除抵押记录异常 err:"+err.Error())
}
}
//售卖价格
obj.TotalPrice = totalPrice
//
now := time.Now()
newRecord1 := &TokenskyTransactionRecord{
CoinType:obj.Symbol,
TranType:"强平卖出",
PushTime:now,
Category:1,
Money:totalPrice,
Status:1,
RelevanceCategory:"borrowOrder",
RelevanceId:obj.Order.OrderId,
User:&TokenskyUser{UserId:obj.User.UserId},
}
_,err = o.Insert(newRecord1)
if err != nil{
o.Rollback()
return fmt.Errorf("新增强平售卖记录表失败")
}
//如果售卖价格小于还款额度 那么扣除金额为售卖价格
obj.PracticalPrice = obj.PayBackPrice
if obj.PracticalPrice>totalPrice{
obj.PracticalPrice = totalPrice
}
newRecord2 := &TokenskyTransactionRecord{
CoinType:obj.Symbol,
TranType:"强平还款",
PushTime:now,
Category:2,
Money:obj.PayBackPrice,
Status:1,
RelevanceCategory:"borrowOrder",
RelevanceId:obj.Order.OrderId,
User:&TokenskyUser{UserId:obj.User.UserId},
}
_,err = o.Insert(newRecord2)
if err != nil{
o.Rollback()
return fmt.Errorf("新增强平还款记录表失败")
}
//剩余资产
num := utils.Float64Sub(totalPrice,obj.PracticalPrice)
obj.AdminId = adminId
_,err = o.Update(obj)
if err != nil{
o.Rollback()
return fmt.Errorf("更新强平表异常")
}
//资产变动
if num > conf.FLOAT_PRECISE_8{
balanceChange := common.NewTokenskyUserBalanceChange(3,"borrowLimitingsSell","强平售卖")
balanceChange.Add(obj.User.UserId,obj.Symbol,obj.Order.OrderId,conf.CHANGE_ADD,num,"",0)
ok,_,tx := balanceChange.Send()
if !ok{
o.Rollback()
return fmt.Errorf("用户资产更变失败")
}
ok = TokenskyUserBalanceHashSetStatus(o,tx)
if !ok{
return fmt.Errorf("设置哈希表异常")
}
}
err = o.Commit()
if err != nil{
o.Rollback()
return fmt.Errorf("事务执行失败")
}
return nil
}
|
package recv
import (
"github.com/scottshotgg/proximity/pkg/listener"
)
type (
// Recv ...
Recv interface {
Open() error
Close() error
Attach(lis listener.Listener) error
}
)
|
package usecases
import (
"fmt"
"time"
"github.com/michaldziurowski/tech-challenge-time/server/timetracking/domain"
)
type Service interface {
StartSession(userId string, name string, startedAt time.Time) (int64, error)
StopSession(userId string, sessionId int64, stoppedAt time.Time) error
ResumeSession(userId string, sessionId int64, resumedAt time.Time) error
SetSessionName(userId string, sessionId int64, name string) error
GetSessionsByRange(userId string, from time.Time, to time.Time) ([]domain.SessionAggregate, error)
}
type service struct {
eventStore EventStore
repository Repository
dateProvider DateProvider
}
func NewService(e EventStore, r Repository, d DateProvider) Service {
return service{e, r, d}
}
func (s service) getSessionForUser(userId string, sessionId int64) (domain.Session, error) {
session, err := s.repository.GetSession(sessionId)
if err != nil {
return domain.Session{}, err
}
if session.UserId != userId {
return domain.Session{}, fmt.Errorf("Session doesnt belong to user.")
}
return session, nil
}
func (s service) StartSession(userId string, name string, startedAt time.Time) (int64, error) {
session := domain.Session{
UserId: userId,
Name: name,
IsOpen: true,
}
sessionId, err := s.repository.AddSession(session)
if err != nil {
return -1, err
}
startEvent := domain.SessionEvent{
Type: domain.STARTSESSION,
UserId: userId,
SessionId: sessionId,
Time: startedAt,
}
err = s.eventStore.AddEvent(startEvent)
if err != nil {
return -1, err
}
return sessionId, nil
}
func (s service) StopSession(userId string, sessionId int64, stoppedAt time.Time) error {
_, err := s.getSessionForUser(userId, sessionId)
if err != nil {
return err
}
err = s.repository.ToggleSessionState(sessionId)
if err != nil {
return err
}
stopEvent := domain.SessionEvent{
Type: domain.STOPSESSION,
UserId: userId,
SessionId: sessionId,
Time: stoppedAt,
}
err = s.eventStore.AddEvent(stopEvent)
if err != nil {
return err
}
return nil
}
func (s service) ResumeSession(userId string, sessionId int64, resumedAt time.Time) error {
_, err := s.getSessionForUser(userId, sessionId)
if err != nil {
return err
}
err = s.repository.ToggleSessionState(sessionId)
if err != nil {
return err
}
startEvent := domain.SessionEvent{
Type: domain.STARTSESSION,
UserId: userId,
SessionId: sessionId,
Time: resumedAt,
}
err = s.eventStore.AddEvent(startEvent)
if err != nil {
return err
}
return nil
}
func (s service) SetSessionName(userId string, sessionId int64, name string) error {
_, err := s.getSessionForUser(userId, sessionId)
if err != nil {
return err
}
err = s.repository.SetSessionName(sessionId, name)
if err != nil {
return err
}
return nil
}
type sessionDuration struct {
startTime time.Time
duration time.Duration
}
func (s service) GetSessionsByRange(userId string, from time.Time, to time.Time) ([]domain.SessionAggregate, error) {
events, err := s.eventStore.GetEventsByRange(userId, from, to)
if err != nil {
return nil, err
}
durations := make(map[int64]*sessionDuration)
for idx, event := range events {
if sDuration, exist := durations[event.SessionId]; exist {
switch event.Type {
case domain.STOPSESSION:
sDuration.duration += event.Time.Sub(sDuration.startTime)
case domain.STARTSESSION:
if idx == len(events)-1 {
sDuration.duration += s.dateProvider.GetCurrent().Sub(event.Time)
} else {
sDuration.startTime = event.Time
}
}
} else if event.Type == domain.STARTSESSION {
durations[event.SessionId] = &sessionDuration{startTime: event.Time}
if idx == len(events)-1 {
durations[event.SessionId].duration += s.dateProvider.GetCurrent().Sub(event.Time)
}
}
}
aggregates := make([]domain.SessionAggregate, 0, len(durations))
for sessionId, sDuration := range durations {
session, _ := s.repository.GetSession(sessionId)
aggregate := domain.SessionAggregate{
Session: session,
Duration: sDuration.duration,
}
aggregates = append(aggregates, aggregate)
}
return aggregates, nil
}
|
/*
# -*- coding: utf-8 -*-
# @Author : joker
# @Time : 2021/10/25 9:07 上午
# @File : lt_146_lru.go
# @Description :
# @Attention :
*/
package offer
type node struct {
prev *node
next *node
value int
}
type DoubleLinkedList struct {
head *node
tail *node
}
type LRUCache struct {
// key value
dataM map[int]*node
list *DoubleLinkedList
limit int
}
func Constructor(capacity int) LRUCache {
ret := LRUCache{
dataM: make(map[int]*node),
}
l := &DoubleLinkedList{
head: &node{value: 0},
tail: &node{value: 0},
}
l.head.prev = l.tail
l.tail.next = l.head
return ret
}
func (this *LRUCache) Get(key int) int {
cache, exist := this.dataM[key]
if !exist {
return -1
}
this.list.moveToHead(cache)
return cache.value
}
func (this *DoubleLinkedList) moveToHead(value *node) {
removeNode(value)
this.addToHead(value)
}
func removeNode(value *node) {
next := value.next
prev := value.prev
prev.next = next
next.prev = prev
value.next, value.prev = nil, nil
}
func (this *DoubleLinkedList) addToHead(value *node) {
next := this.head.next
prev := this.head.prev
value.next = next
value.prev = prev
next.prev = value
prev.next = value
this.head = value
}
func (this *DoubleLinkedList) removeTail() *node {
prev := this.tail.prev
next := this.tail.next
next.prev = prev
prev.next = next
ret := this.tail
this.tail = prev.next
return ret
}
func (this *LRUCache) Put(key int, value int) {
newNode := &node{
prev: nil,
next: nil,
value: value,
}
origin, exist := this.dataM[key]
if exist {
this.list.moveToHead(origin)
} else {
// 计算长度
this.list.addToHead(newNode)
this.dataM[key] = newNode
if len(this.dataM) > this.limit {
this.list.removeTail()
}
}
}
|
package main
import (
"fmt"
)
func main() {
s := "Olá, mundo! 坔"
sb := []byte(s)
fmt.Printf("%v\n%T\n", s, s)
fmt.Printf("%v\n%T\n", sb, sb)
// por caracter
for _, v := range s {
fmt.Printf("%b - %v - %T - %#U - %#x\n", v, v, v, v, v)
}
fmt.Println("")
// por byte
for i := 0; i < len(s); i++ {
fmt.Printf("%b - %v - %T - %#U - %#x\n", s[i], s[i], s[i], s[i], s[i])
}
}
|
package main
import (
"fmt"
"log"
"os"
"github.com/urfave/cli/v2"
)
const version = "0.1.0"
var revision = "HEAD"
func main() {
if err := newApp().Run(os.Args); err != nil {
exitCode := 1
if excoder, ok := err.(cli.ExitCoder); ok {
exitCode = excoder.ExitCode()
}
log.Fatal("error", err.Error())
os.Exit(exitCode)
}
}
func newApp() *cli.App {
app := &cli.App{}
app.Name = "github-toy"
app.Usage = "Toy to operate github"
app.Version = fmt.Sprintf("%s (rev:%s)", version, revision)
app.Commands = commands
return app
}
|
package omg
import (
"net/http"
"github.com/gorilla/mux"
)
func router() *mux.Router {
router := mux.NewRouter().StrictSlash(true)
router.HandleFunc("/api/v1/privatestorage/file", uploadLocalFile).Methods(http.MethodPut)
router.HandleFunc("/api/v1/privatestorage/file", getFileInfo).Methods(http.MethodGet)
router.HandleFunc("/api/v1/privatestorage/file", updataFileInfo).Methods(http.MethodPost)
router.HandleFunc("/api/v1/privatestorage/file", deleteFile).Methods(http.MethodDelete)
router.HandleFunc("/api/v1/privatestorage/uploadinfo", getURL).Methods(http.MethodGet)
return router
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.