text
stringlengths 11
4.05M
|
|---|
package transdsl
import (
"errors"
)
var (
ErrSucc = errors.New("trans succ")
ErrUnexpectedEvent = errors.New("unexpected event")
ErrTimeout = errors.New("timeout")
)
func isEqual(leftErr, rightErr error) bool {
return leftErr.Error() == rightErr.Error()
}
type Succ struct {
}
func (this *Succ) Exec(transInfo *TransInfo) error {
return ErrSucc
}
func (this *Succ) Rollback(transInfo *TransInfo) {
}
type Fail struct {
ErrCode error
}
func (this *Fail) Exec(transInfo *TransInfo) error {
return this.ErrCode
}
func (this *Fail) Rollback(transInfo *TransInfo) {
}
|
package main
import (
"bufio"
"flag"
"fmt"
"math/rand"
"os"
"regexp"
"strings"
"time"
)
//chosing a random word from the list
func chooserand(list []string) string {
rand.Seed(time.Now().Unix()) //needed so randomization works correctly
randomindexes := rand.Intn(len(list))
randword := list[randomindexes]
return randword
}
func game(word string, text []string) {
word = strings.ToLower(word) //changing this to lowercase so we don't have issues with capitalization
length := len(word)
var underscores []string
for i := 0; i < length; i++ {
underscores = append(underscores, "_")
}
underscoresJustString := strings.Join(underscores, "") //need underscores to not be a slice
var letters, allletters []string
lives := 0
win := true
for win {
hangman(lives)
if lives == 6 {
fmt.Println("YOU LOSE!")
fmt.Println("The word was:", word)
var PlayAgain string
fmt.Print("\nWould you like to play again? (Y/N) ")
fmt.Scanf("%s \n", &PlayAgain)
PlayAgain = strings.ToLower(PlayAgain)
if PlayAgain == "y" {
randword := chooserand(text)
game(randword, text)
} else {
exit("YOU LOSE!")
}
}
fmt.Println(underscoresJustString)
if lives > 0 {
fmt.Println("Letters you have already checked for that aren't in the word:", letters)
}
var letter string
temptrue := true
for temptrue {
fmt.Print("\nGuess a letter: ")
fmt.Scanf("%s \n", &letter)
//checking if more than 1 letter was entered
if len(letter) == 1 {
temptrue = false
} else {
fmt.Println("You need to enter only 1 letter")
}
letter = strings.TrimSpace(letter) //not needed?
letter = strings.ToLower(letter) //Changing letter to lowercase because words are only in lowercase
//checks if the letter was entered before
for _, let := range allletters {
if letter == let {
fmt.Println("You have already entered this letter")
temptrue = true
}
}
//checks if letter is in fact a letter
alphabetcheck := false
for r := 'a'; r < 'z'; r++ {
if letter == string(r) {
alphabetcheck = true
}
}
if alphabetcheck == false {
fmt.Println("Enter only letters, words don't contain numbers or special symbols")
temptrue = true
}
}
allletters = append(allletters, letter) //keeping list of all entered letters
if strings.Contains(word, letter) {
m := regexp.MustCompile(letter)
indexes := m.FindAllStringIndex(word, -1) //this gets us a 2D array of indexes of this letter in the word and I convert it to 1D to make stuff simpler
var index []int
var row = 0
//this gets us 1D array of indexes
for _, column := range indexes {
index = append(index, column[row])
}
for i := range index {
indexnumber := index[i]
underscoresJustString = underscoresJustString[:indexnumber] + letter + underscoresJustString[indexnumber+1:]
}
} else {
fmt.Println("Seems like this letter doesn't exist in this word!")
letters = append(letters, letter) //making list of letters that user checked for already
lives++
}
if strings.Contains(underscoresJustString, "_") {
//do nothing
} else {
word = strings.ToUpper(word)
fmt.Println("\nCONGRATULATIONS, YOU WIN! THE WORD WAS: ", word)
var PlayAgain string
fmt.Print("\nWould you like to play again? (Y/N) ")
fmt.Scanf("%s \n", &PlayAgain)
PlayAgain = strings.ToLower(PlayAgain)
if PlayAgain == "y" {
randword := chooserand(text)
game(randword, text)
} else {
exit("YOU WIN!")
}
}
}
}
func hangman(counter int) {
switch counter {
case 0:
fmt.Println("+---+\n| |\n|\n|\n|\n|\n=========") //0
case 1:
fmt.Println("+---+\n| |\n| O\n|\n|\n|\n=========") //1
case 2:
fmt.Println("+---+\n| |\n| O\n| |\n|\n|\n=========") //2
case 3:
fmt.Println("+---+\n| |\n| O\n| /|\n|\n|\n=========") //3
case 4:
fmt.Println("+---+\n| |\n| O\n| /|\\\n|\n|\n=========") //4
case 5:
fmt.Println("+---+\n| |\n| O\n| /|\\\n| /\n|\n=========") //5
case 6:
fmt.Println("+---+\n| |\n| O\n| /|\\\n| / \\\n|\n=========") //6
}
}
func main() {
txtFilename := flag.String("txt", "words.txt", "a txt file with 1 word per line")
flag.Parse()
file, err := os.Open(*txtFilename)
if err != nil {
exit("Failed to open txt file.")
}
scanner := bufio.NewScanner(file)
scanner.Split(bufio.ScanLines)
var text []string
for scanner.Scan() {
text = append(text, scanner.Text())
}
file.Close()
randword := chooserand(text)
game(randword, text)
}
func exit(msg string) {
fmt.Println(msg)
os.Exit(1)
}
|
/**
* Copyright (C) 2019, Xiongfa Li.
* All right reserved.
* @author xiongfa.li
* @date 2019/2/22
* @time 10:42
* @version V1.0
* Description:
*/
package test
import (
"container/list"
"fmt"
commonPool2 "github.com/xfali/gomem/commonPool2"
"testing"
"time"
)
type b int
func (f *b) ActivateObject(i interface{}) { fmt.Printf("ActivateObject %v\n", i) }
func (f *b) DestroyObject(i interface{}) { fmt.Printf("DestroyObject %v\n", i) }
func (f *b) MakeObject() interface{} {
fmt.Printf("MakeObject\n")
return "test"
}
func (f *b) PassivateObject(i interface{}) { fmt.Printf("PassivateObject %v\n", i) }
func (f *b) ValidateObject(i interface{}) bool {
fmt.Printf("ValidateObject %v\n", i)
return true
}
func TestCommonPool2(t *testing.T) {
//f := commonPool2.DummyFactory(func() interface{} {
// fmt.Println("create!")
// return "test"
//})
f := b(1)
pb := commonPool2.CommonPool{
MinIdle: 10,
MaxSize: 20,
BlockWhenExhausted: true,
TimeBetweenEvictionRunsMillis: 2 * time.Second,
Factory: &f,
MaxWaitMillis: time.Second * 10,
TestOnReturn: true,
TestWhileIdle: true,
TestOnBorrow: true,
TestOnCreate: true,
}
pb.Init()
defer pb.Close()
l := list.New()
go func() {
time.Sleep(time.Second)
i := 0
e := l.Front()
for e != nil {
pb.Put(e.Value)
r := e
e = e.Next()
l.Remove(r)
time.Sleep(time.Second)
if i < 5 {
i++
} else {
break
}
}
}()
for i := 0; i < 30; i++ {
now := time.Now()
buf := pb.Get()
fmt.Printf("value %v\n", buf)
fmt.Printf("use time :%d ms\n", time.Since(now)/time.Millisecond)
l.PushBack(buf)
}
}
func TestCommonPool2_2(t *testing.T) {
//f := commonPool2.DummyFactory(func() interface{} {
// fmt.Println("create!")
// return "test"
//})
f := b(1)
pb := commonPool2.CommonPool{
MinIdle: 5,
MaxSize: 20,
BlockWhenExhausted: true,
TimeBetweenEvictionRunsMillis: 1 * time.Second,
MinEvictableIdleTimeMillis: 1 * time.Second,
Factory: &f,
MaxWaitMillis: time.Second * 10,
//TestOnReturn: true,
//TestWhileIdle: true,
//TestOnBorrow: true,
//TestOnCreate: true,
}
pb.Init()
defer pb.Close()
l := list.New()
for i := 0; i < 10; i++ {
buf := pb.Get()
l.PushBack(buf)
fmt.Printf("value %v\n", buf)
}
for e := l.Front(); e != nil; e = e.Next() {
pb.Put(e.Value)
}
time.Sleep(time.Second)
now := time.Now()
time.Sleep(10 * time.Second)
fmt.Printf("%d ms\n", time.Since(now)/time.Millisecond)
}
func TestCommonPool2_subloopTimeout(t *testing.T) {
//f := commonPool2.DummyFactory(func() interface{} {
// fmt.Println("create!")
// return "test"
//})
f := b(1)
pb := commonPool2.CommonPool{
MinIdle: 1,
MaxSize: 2,
BlockWhenExhausted: true,
TimeBetweenEvictionRunsMillis: 1 * time.Second,
MinEvictableIdleTimeMillis: 1 * time.Second,
Factory: &f,
MaxWaitMillis: time.Second * 10,
TestOnReturn: true,
TestWhileIdle: true,
TestOnBorrow: true,
TestOnCreate: true,
}
pb.Init()
defer pb.Close()
l := list.New()
go func() {
time.Sleep(3 * time.Second)
pb.Put(l.Front().Value)
}()
for i := 0; i < 10; i++ {
buf := pb.Get()
l.PushBack(buf)
fmt.Printf("value %v\n", buf)
}
for e := l.Front(); e != nil; e = e.Next() {
pb.Put(e.Value)
}
time.Sleep(time.Second)
now := time.Now()
time.Sleep(10 * time.Second)
fmt.Printf("%d ms\n", time.Since(now)/time.Millisecond)
}
|
package main
import (
"bufio"
"fmt"
"log"
"os"
"regexp"
"strconv"
)
type pattern struct {
index int
leftEdge int
topEdge int
width int
height int
}
var (
patterns []pattern
grid [1000][1000]int
)
func main() {
f, err := os.Open("../input.txt")
if err != nil {
log.Fatal(err)
}
defer f.Close()
s := bufio.NewScanner(f)
for s.Scan() {
patterns = append(patterns, parseLine(s.Text()))
}
for _, p := range patterns {
for y := p.topEdge; y < (p.topEdge + p.height); y++ {
for x := p.leftEdge; x < (p.leftEdge + p.width); x++ {
grid[x][y]++
}
}
}
// Create channels for goroutines
nonOverlappingIndex := make(chan int)
overlaps := make(chan int)
go func() {
// Range over the patterns again.
// This time, if the space being increased is greater than 1,
// then we know it overlapped before.
// If it is only 1, then we know that only one claim used that space.
for _, p := range patterns {
overlapping := false
for y := p.topEdge; y < (p.topEdge + p.height); y++ {
for x := p.leftEdge; x < (p.leftEdge + p.width); x++ {
if grid[x][y] > 1 {
overlapping = true
}
}
}
if overlapping == false {
nonOverlappingIndex <- p.index
}
}
}()
go func() {
var o int
for _, column := range grid {
for _, val := range column {
if val > 1 {
o++
}
}
}
overlaps <- o
}()
fmt.Println("Non-overlapping index:", <-nonOverlappingIndex)
fmt.Println("Number of overlaps:", <-overlaps)
}
func parseLine(s string) pattern {
// #10 @ 936,278: 13x27
regex, err := regexp.Compile(`#([0-9]*) @ ([0-9]*),([0-9]*): ([0-9]*)x([0-9]*)`)
if err != nil {
log.Fatal(err)
}
matches := regex.FindStringSubmatch(s)
intConv := func(s string) int { x, _ := strconv.Atoi(s); return x }
return pattern{
index: intConv(matches[1]),
leftEdge: intConv(matches[2]),
topEdge: intConv(matches[3]),
width: intConv(matches[4]),
height: intConv(matches[5]),
}
}
|
package apiserver
import (
"fmt"
"github.com/gin-gonic/gin"
"log"
"net/http"
"oliujunk/server/apiserver/authentication"
"oliujunk/server/config"
"oliujunk/server/database"
"time"
)
func Start() {
log.Println("接口服务启动")
//gin.SetMode(gin.ReleaseMode)
router := gin.Default()
router.Use(cors())
router.POST("/api/login", login)
authenticated := router.Group("/api")
authenticated.Use(authentication.JWTAuth())
{
authenticated.GET("/current/:deviceID", getCurrent)
authenticated.GET("/datas", getDatas)
authenticated.GET("/devices", getDevices)
authenticated.POST("/devices", postDevice)
authenticated.PUT("/devices/:deviceID", putDevice)
authenticated.DELETE("/devices/:deviceID", deleteDevice)
authenticated.PUT("/users", putUser)
}
_ = router.Run(fmt.Sprintf(":%d", config.GlobalConfiguration.ApiServer.Port))
}
func cors() gin.HandlerFunc {
return func(c *gin.Context) {
method := c.Request.Method
c.Header("Access-Control-Allow-Origin", "*")
c.Header("Access-Control-Allow-Headers", "Content-Type,AccessToken,X-CSRF-Token,Authorization,Token")
c.Header("Access-Control-Allow-Methods", "POST,GET,OPTIONS,PUT,DELETE,UPDATE")
c.Header("Access-Control-Expose-Headers", "Content-Length,Access-Control-Allow-Origin,Access-Control-Allow-Headers,Content-Type")
c.Header("Access-Control-Allow-Credentials", "true")
if method == "OPTIONS" {
c.AbortWithStatus(http.StatusNoContent)
}
c.Next()
}
}
func login(context *gin.Context) {
type Result struct {
Token string `json:"token"`
}
type Param struct {
Username string `json:"username"`
Password string `json:"password"`
}
var param Param
err := context.BindJSON(¶m)
if err != nil {
context.JSON(http.StatusOK, gin.H{
"status": -1,
"message": "参数错误: " + err.Error(),
})
return
}
var user database.User
result, err := database.Orm.Table("xph_user").Where("username = ?", param.Username).Get(&user)
if err != nil {
context.JSON(http.StatusInternalServerError, gin.H{
"status": -1,
"message": "服务端异常: " + err.Error(),
})
return
}
if !result {
context.JSON(http.StatusOK, gin.H{
"status": -1,
"message": "用户名或密码错误",
})
return
}
if user.Username == param.Username && user.Password == param.Password {
token := authentication.GenerateToken(user)
context.JSON(http.StatusOK, gin.H{
"status": 0,
"message": "登录成功",
"data": Result{Token: token},
})
} else {
context.JSON(http.StatusOK, gin.H{
"status": -1,
"message": "用户名或密码错误",
})
}
}
func getCurrent(context *gin.Context) {
deviceID := context.Param("deviceID")
current := database.Current{}
_, _ = database.Orm.
Table("xph_current").
Where("device_id = ?", deviceID).
Desc("data_time").
Get(¤t)
context.JSON(http.StatusOK, gin.H{
"status": 0,
"message": "请求成功",
"data": current,
})
}
func getDatas(context *gin.Context) {
type Param struct {
DeviceID int `form:"deviceID" binding:"required"`
PageNum int `form:"pageNum" binding:"required"`
PageSize int `form:"pageSize" binding:"required"`
StartTime string `form:"startTime"`
EndTime string `form:"endTime"`
}
type Result struct {
List []database.Current `json:"list"`
Total int64 `json:"total"`
PageNum int `json:"pageNum"`
PageSize int `json:"pageSize"`
}
var param Param
err := context.ShouldBindQuery(¶m)
if err != nil {
context.JSON(http.StatusOK, gin.H{
"status": -1,
"message": "参数错误: " + err.Error(),
})
return
}
var datas []database.Current
_ = database.Orm.
Table("xph_current").
Where("device_id = ?", param.DeviceID).
And("? IS NULL OR ? = '' OR data_time >= ?", param.StartTime, param.StartTime, param.StartTime).
And("? IS NULL OR ? = '' OR data_time <= ?", param.EndTime, param.EndTime, param.EndTime).
Limit(param.PageSize, (param.PageNum-1)*param.PageSize).
Desc("data_time").
Find(&datas)
total, _ := database.Orm.
Table("xph_current").
Where("device_id = ?", param.DeviceID).
And("? IS NULL OR ? = '' OR data_time >= ?", param.StartTime, param.StartTime, param.StartTime).
And("? IS NULL OR ? = '' OR data_time <= ?", param.EndTime, param.EndTime, param.EndTime).
Desc("data_time").
Count()
context.JSON(http.StatusOK, gin.H{
"status": 0,
"message": "请求成功",
"data": Result{
Total: total,
PageNum: param.PageNum,
PageSize: param.PageSize,
List: datas,
},
})
}
func getDevices(context *gin.Context) {
claims := context.MustGet("claims").(*authentication.CustomClaims)
type Param struct {
DeviceID int `form:"deviceID"`
}
var param Param
_ = context.ShouldBindQuery(¶m)
var devices []database.Device
_ = database.Orm.
Table("xph_device").
Where("creator_id = ?", claims.UserID).
And("? IS NULL OR ? = '' OR device_id = ?", param.DeviceID, param.DeviceID, param.DeviceID).
Asc("id").
Find(&devices)
context.JSON(http.StatusOK, gin.H{
"status": 0,
"message": "请求成功",
"data": devices,
})
}
func postDevice(context *gin.Context) {
claims := context.MustGet("claims").(*authentication.CustomClaims)
var param database.Device
err := context.BindJSON(¶m)
if err != nil {
context.JSON(http.StatusOK, gin.H{
"status": -1,
"message": "参数错误: " + err.Error(),
})
return
}
param.CreatorID = claims.UserID
param.CreateTime = time.Now()
param.UpdateTime = time.Now()
var device database.Device
result, err := database.Orm.Table("xph_device").Where("device_id = ?", param.DeviceID).Get(&device)
if err != nil {
context.JSON(http.StatusInternalServerError, gin.H{
"status": -1,
"message": "服务端异常: " + err.Error(),
})
return
}
if result {
context.JSON(http.StatusOK, gin.H{
"status": -1,
"message": "设备已存在",
})
return
}
_, err = database.Orm.Table("xph_device").Insert(param)
if err != nil {
context.JSON(http.StatusInternalServerError, gin.H{
"status": -1,
"message": "服务端异常: " + err.Error(),
})
return
}
context.JSON(http.StatusOK, gin.H{
"status": 0,
"message": "添加成功",
"data": true,
})
}
func putDevice(context *gin.Context) {
claims := context.MustGet("claims").(*authentication.CustomClaims)
deviceID := context.Param("deviceID")
var param database.Device
err := context.BindJSON(¶m)
if err != nil {
context.JSON(http.StatusOK, gin.H{
"status": -1,
"message": "参数错误: " + err.Error(),
})
return
}
var device database.Device
result, err := database.Orm.Table("xph_device").
Where("creator_id = ?", claims.UserID).
And("device_id = ?", deviceID).
Get(&device)
if err != nil {
context.JSON(http.StatusInternalServerError, gin.H{
"status": -1,
"message": "服务端异常: " + err.Error(),
})
return
}
if !result {
context.JSON(http.StatusOK, gin.H{
"status": -1,
"message": "设备不存在或无操作权限",
})
return
}
_, err = database.Orm.Table("xph_device").ID(device.ID).Update(param)
if err != nil {
context.JSON(http.StatusOK, gin.H{
"status": -1,
"message": "参数错误: " + err.Error(),
})
return
}
context.JSON(http.StatusOK, gin.H{
"status": 0,
"message": "修改成功",
"data": true,
})
}
func deleteDevice(context *gin.Context) {
claims := context.MustGet("claims").(*authentication.CustomClaims)
deviceID := context.Param("deviceID")
rows, err := database.Orm.Table("xph_device").
Where("creator_id = ?", claims.UserID).
And("device_id = ?", deviceID).
Delete(database.Device{})
if err != nil {
context.JSON(http.StatusOK, gin.H{
"status": -1,
"message": "参数错误: " + err.Error(),
})
return
}
if rows >= 1 {
context.JSON(http.StatusOK, gin.H{
"status": 0,
"message": "删除成功",
"data": true,
})
} else {
context.JSON(http.StatusOK, gin.H{
"status": -1,
"message": "删除失败",
})
}
}
func putUser(context *gin.Context) {
claims := context.MustGet("claims").(*authentication.CustomClaims)
var param database.User
err := context.BindJSON(¶m)
if err != nil {
context.JSON(http.StatusOK, gin.H{
"status": -1,
"message": "参数错误: " + err.Error(),
})
return
}
_, err = database.Orm.Table("xph_user").ID(claims.UserID).Update(param)
if err != nil {
context.JSON(http.StatusOK, gin.H{
"status": -1,
"message": "参数错误: " + err.Error(),
})
return
}
context.JSON(http.StatusOK, gin.H{
"status": 0,
"message": "修改成功",
"data": true,
})
}
|
/*
Copyright 2022 The KubeVela Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package addon
import (
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/assert"
)
func TestCheckAddonName(t *testing.T) {
var err error
err = CheckAddonName("")
assert.ErrorContains(t, err, "should not be empty")
invalidNames := []string{
"-addon",
"addon-",
"Caps",
"=",
".",
}
for _, name := range invalidNames {
err = CheckAddonName(name)
assert.ErrorContains(t, err, "should only")
}
validNames := []string{
"addon-name",
"3-addon-name",
"addon-name-3",
"addon",
}
for _, name := range validNames {
err = CheckAddonName(name)
assert.NoError(t, err)
}
}
func TestInitCmd_CreateScaffold(t *testing.T) {
var err error
// empty addon name or path
cmd := InitCmd{}
err = cmd.CreateScaffold()
assert.ErrorContains(t, err, "be empty")
// invalid addon name
cmd = InitCmd{
AddonName: "-name",
Path: "name",
}
err = cmd.CreateScaffold()
assert.ErrorContains(t, err, "should only")
// dir already exists
cmd = InitCmd{
AddonName: "name",
Path: "testdata",
}
err = cmd.CreateScaffold()
assert.ErrorContains(t, err, "cannot create")
// with helm component
cmd = InitCmd{
AddonName: "with-helm",
Path: "with-helm",
HelmRepoURL: "https://charts.bitnami.com/bitnami",
HelmChartVersion: "12.0.0",
HelmChartName: "nginx",
}
err = cmd.CreateScaffold()
assert.NoError(t, err)
defer os.RemoveAll("with-helm")
_, err = os.Stat(filepath.Join("with-helm", ResourcesDirName, "helm.cue"))
assert.NoError(t, err)
// with ref-obj
cmd = InitCmd{
AddonName: "with-refobj",
Path: "with-refobj",
RefObjURLs: []string{"https:"},
}
err = cmd.CreateScaffold()
assert.ErrorContains(t, err, "not a valid url")
cmd.RefObjURLs[0] = "https://some.com"
err = cmd.CreateScaffold()
assert.NoError(t, err)
defer os.RemoveAll("with-refobj")
_, err = os.Stat(filepath.Join("with-refobj", ResourcesDirName, "from-url.cue"))
assert.NoError(t, err)
}
|
package compute
import (
"encoding/xml"
"fmt"
"log"
"net/http"
)
// Account represents the details for a compute account.
type Account struct {
// The XML name for the "Account" data contract
XMLName xml.Name `xml:"Account"`
// The compute API user name.
UserName string `xml:"userName"`
// The user's full name.
FullName string `xml:"fullName"`
// The user's first name.
FirstName string `xml:"firstName"`
// The user's last name.
LastName string `xml:"lastName"`
// The user's email address.
EmailAddress string `xml:"emailAddress"`
// The user's department.
Department string `xml:"department"`
// The Id of the user's organisation.
OrganizationID string `xml:"orgId"`
// The user's assigned roles.
AssignedRoles []Role `xml:"roles>role"`
}
// Role represents a role assigned to a compute account.
type Role struct {
// The XML name for the "Role" data contract
XMLName xml.Name `xml:"role"`
// The role name.
Name string `xml:"name"`
}
// GetAccount retrieves the current user's account information
func (client *Client) GetAccount() (*Account, error) {
client.stateLock.Lock()
defer client.stateLock.Unlock()
if client.account != nil {
return client.account, nil
}
request, err := client.newRequestV1("myaccount", http.MethodGet, nil)
if err != nil {
return nil, err
}
responseBody, statusCode, err := client.executeRequest(request)
if err != nil {
return nil, err
}
if statusCode == 401 {
return nil, fmt.Errorf("cannot connect to compute API (invalid credentials)")
}
account := &Account{}
err = xml.Unmarshal(responseBody, account)
if err != nil {
log.Printf("Received invalid XML response from the CloudControl account-details API: %s\n'%s'",
err,
string(responseBody),
)
return nil, err
}
client.account = account
return account, nil
}
|
// Test how big can the stack of a go routine be.
// runtime/proc.go: if 64OS maxstacksize=1GB else maxstacksize=250MB
//
// Allocate a bigger segment and move the stack:
// oldsize = gp->stack.hi - gp->stack.lo;
// newsize = oldsize * 2;
package main
func main() {
main() // will panic: runtime: goroutine stack exceeds 1000000000-byte limit
}
|
/*
Links
* http://arysotog.github.com/1.html
* http://arysotog.github.com/2.html
* http://arysotog.github.com/3.html
* http://arysotog.github.com/4.html
* http://arysotog.github.com/5.html
* http://arysotog.github.com/6.html
* http://arysotog.github.com/7.html
* http://arysotog.github.com/8.html
* http://arysotog.github.com/9.html
* http://arysotog.github.com/10.html
*/
package abc
|
// Copyright (C) 2019 Cisco Systems Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package vpplink
import (
"fmt"
"io"
"github.com/projectcalico/vpp-dataplane/v3/vpplink/generated/bindings/interface_types"
"github.com/projectcalico/vpp-dataplane/v3/vpplink/generated/bindings/ipsec"
"github.com/projectcalico/vpp-dataplane/v3/vpplink/types"
)
func (v *VppLink) GetIPsecTunnelProtection(tunnelInterface uint32) ([]types.IPsecTunnelProtection, error) {
client := ipsec.NewServiceClient(v.GetConnection())
stream, err := client.IpsecTunnelProtectDump(v.GetContext(), &ipsec.IpsecTunnelProtectDump{
SwIfIndex: interface_types.InterfaceIndex(tunnelInterface),
})
if err != nil {
return nil, fmt.Errorf("failed to dump tunnel interface (%v) protections: %w", tunnelInterface, err)
}
protections := make([]types.IPsecTunnelProtection, 0)
for {
response, err := stream.Recv()
if err == io.EOF {
break
}
if err != nil {
return nil, fmt.Errorf("failed to dump tunnel interface (%v) protections: %w", tunnelInterface, err)
}
p := response.Tun
protections = append(protections, types.IPsecTunnelProtection{
SwIfIndex: uint32(p.SwIfIndex),
NextHop: types.FromVppAddress(p.Nh),
OutSAIndex: p.SaOut,
InSAIndices: p.SaIn,
})
}
return protections, nil
}
func (v *VppLink) SetIPsecAsyncMode(enable bool) error {
client := ipsec.NewServiceClient(v.GetConnection())
_, err := client.IpsecSetAsyncMode(v.GetContext(), &ipsec.IpsecSetAsyncMode{
AsyncEnable: enable,
})
if err != nil {
return fmt.Errorf("failed to %v IPsec async mode: %w", strEnableDisable[enable], err)
}
return nil
}
|
/*
* Copyright 2018- The Pixie Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package pebbledb
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestKeyUpperBound_Simple(t *testing.T) {
in := "prefix"
upperBound := KeyUpperBound([]byte(in))
assert.Equal(t, "prefiy", string(upperBound))
}
func TestKeyUpperBound_Empty(t *testing.T) {
in := ""
upperBound := KeyUpperBound([]byte(in))
assert.Nil(t, upperBound)
}
func TestKeyUpperBound_Nil(t *testing.T) {
upperBound := KeyUpperBound(nil)
assert.Nil(t, upperBound)
}
func TestKeyUpperBound_AllMax(t *testing.T) {
in := []byte{255, 255, 255, 255}
upperBound := KeyUpperBound([]byte(in))
assert.Nil(t, upperBound)
}
func TestKeyUpperBound_LastMax(t *testing.T) {
in := []byte{40, 41, 42, 255}
upperBound := KeyUpperBound([]byte(in))
assert.Equal(t, []byte{40, 41, 43}, upperBound)
}
func Fuzz_KeyUpperBound(f *testing.F) {
f.Add([]byte("test"))
f.Add([]byte{40, 41, 42, 255})
f.Add([]byte{40, 41, 255, 255})
f.Add([]byte{255, 255})
f.Fuzz(func(t *testing.T, input []byte) {
bound := KeyUpperBound(input)
if len(input) == 0 {
assert.Nil(t, bound)
return
}
// Find the last index that's not 0xff (i.e. max byte)
i := len(input) - 1
for i >= 0 {
if input[i] != 255 {
break
}
i--
}
// All bytes are max
if i < 0 {
assert.Nil(t, bound)
return
}
assert.Equal(t, i+1, len(bound))
assert.Equal(t, input[:i], bound[:i])
if i < len(input) {
assert.Equal(t, input[i]+1, bound[i])
}
})
}
|
/*
Randall Munroe (author of XKCD) held a survey to give names to colors. The main outcome is a list of names for the 954 most common RGB monitor colors.
For ease of programming, here is the list in plain text: http://xkcd.com/color/rgb.txt. Beware, the first line is not data, but contains the license.
Write a program or function that takes a valid color name from above list as input and outputs the associated RGB color code. Your program does not have to handle invalid inputs in any defined manner.
Standard loopholes apply. Additionally, your answer must not use pre-defined (built-in or external) color code <-> color name maps. (This includes the linked list.) Shortest code in bytes wins. If you read from a file, the file's byte count must be included.
Examples:
dark peach -> #de7e5d
robin's egg blue -> #98eff9
pink/purple -> #ef1de7
*/
package main
import (
"strings"
)
func main() {
assert(lookup("dark peach") == "#de7e5d")
assert(lookup("robin's egg blue") == "#98eff9")
assert(lookup("pink/purple") == "#ef1de7")
}
func assert(x bool) {
if !x {
panic("assertion failed")
}
}
func lookup(name string) string {
return RGBS[name]
}
func init() {
lines := strings.Split(DATA, "\n")
for _, line := range lines {
toks := strings.Split(line, "#")
for i := range toks {
toks[i] = strings.TrimSpace(toks[i])
}
RGBS[toks[0]] = "#" + toks[1]
}
}
var RGBS = make(map[string]string)
const DATA = `cloudy blue #acc2d9
dark pastel green #56ae57
dust #b2996e
electric lime #a8ff04
fresh green #69d84f
light eggplant #894585
nasty green #70b23f
really light blue #d4ffff
tea #65ab7c
warm purple #952e8f
yellowish tan #fcfc81
cement #a5a391
dark grass green #388004
dusty teal #4c9085
grey teal #5e9b8a
macaroni and cheese #efb435
pinkish tan #d99b82
spruce #0a5f38
strong blue #0c06f7
toxic green #61de2a
windows blue #3778bf
blue blue #2242c7
blue with a hint of purple #533cc6
booger #9bb53c
bright sea green #05ffa6
dark green blue #1f6357
deep turquoise #017374
green teal #0cb577
strong pink #ff0789
bland #afa88b
deep aqua #08787f
lavender pink #dd85d7
light moss green #a6c875
light seafoam green #a7ffb5
olive yellow #c2b709
pig pink #e78ea5
deep lilac #966ebd
desert #ccad60
dusty lavender #ac86a8
purpley grey #947e94
purply #983fb2
candy pink #ff63e9
light pastel green #b2fba5
boring green #63b365
kiwi green #8ee53f
light grey green #b7e1a1
orange pink #ff6f52
tea green #bdf8a3
very light brown #d3b683
egg shell #fffcc4
eggplant purple #430541
powder pink #ffb2d0
reddish grey #997570
baby shit brown #ad900d
liliac #c48efd
stormy blue #507b9c
ugly brown #7d7103
custard #fffd78
darkish pink #da467d
deep brown #410200
greenish beige #c9d179
manilla #fffa86
off blue #5684ae
battleship grey #6b7c85
browny green #6f6c0a
bruise #7e4071
kelley green #009337
sickly yellow #d0e429
sunny yellow #fff917
azul #1d5dec
darkgreen #054907
green/yellow #b5ce08
lichen #8fb67b
light light green #c8ffb0
pale gold #fdde6c
sun yellow #ffdf22
tan green #a9be70
burple #6832e3
butterscotch #fdb147
toupe #c7ac7d
dark cream #fff39a
indian red #850e04
light lavendar #efc0fe
poison green #40fd14
baby puke green #b6c406
bright yellow green #9dff00
charcoal grey #3c4142
squash #f2ab15
cinnamon #ac4f06
light pea green #c4fe82
radioactive green #2cfa1f
raw sienna #9a6200
baby purple #ca9bf7
cocoa #875f42
light royal blue #3a2efe
orangeish #fd8d49
rust brown #8b3103
sand brown #cba560
swamp #698339
tealish green #0cdc73
burnt siena #b75203
camo #7f8f4e
dusk blue #26538d
fern #63a950
old rose #c87f89
pale light green #b1fc99
peachy pink #ff9a8a
rosy pink #f6688e
light bluish green #76fda8
light bright green #53fe5c
light neon green #4efd54
light seafoam #a0febf
tiffany blue #7bf2da
washed out green #bcf5a6
browny orange #ca6b02
nice blue #107ab0
sapphire #2138ab
greyish teal #719f91
orangey yellow #fdb915
parchment #fefcaf
straw #fcf679
very dark brown #1d0200
terracota #cb6843
ugly blue #31668a
clear blue #247afd
creme #ffffb6
foam green #90fda9
grey/green #86a17d
light gold #fddc5c
seafoam blue #78d1b6
topaz #13bbaf
violet pink #fb5ffc
wintergreen #20f986
yellow tan #ffe36e
dark fuchsia #9d0759
indigo blue #3a18b1
light yellowish green #c2ff89
pale magenta #d767ad
rich purple #720058
sunflower yellow #ffda03
green/blue #01c08d
leather #ac7434
racing green #014600
vivid purple #9900fa
dark royal blue #02066f
hazel #8e7618
muted pink #d1768f
booger green #96b403
canary #fdff63
cool grey #95a3a6
dark taupe #7f684e
darkish purple #751973
true green #089404
coral pink #ff6163
dark sage #598556
dark slate blue #214761
flat blue #3c73a8
mushroom #ba9e88
rich blue #021bf9
dirty purple #734a65
greenblue #23c48b
icky green #8fae22
light khaki #e6f2a2
warm blue #4b57db
dark hot pink #d90166
deep sea blue #015482
carmine #9d0216
dark yellow green #728f02
pale peach #ffe5ad
plum purple #4e0550
golden rod #f9bc08
neon red #ff073a
old pink #c77986
very pale blue #d6fffe
blood orange #fe4b03
grapefruit #fd5956
sand yellow #fce166
clay brown #b2713d
dark blue grey #1f3b4d
flat green #699d4c
light green blue #56fca2
warm pink #fb5581
dodger blue #3e82fc
gross green #a0bf16
ice #d6fffa
metallic blue #4f738e
pale salmon #ffb19a
sap green #5c8b15
algae #54ac68
bluey grey #89a0b0
greeny grey #7ea07a
highlighter green #1bfc06
light light blue #cafffb
light mint #b6ffbb
raw umber #a75e09
vivid blue #152eff
deep lavender #8d5eb7
dull teal #5f9e8f
light greenish blue #63f7b4
mud green #606602
pinky #fc86aa
red wine #8c0034
shit green #758000
tan brown #ab7e4c
darkblue #030764
rosa #fe86a4
lipstick #d5174e
pale mauve #fed0fc
claret #680018
dandelion #fedf08
orangered #fe420f
poop green #6f7c00
ruby #ca0147
dark #1b2431
greenish turquoise #00fbb0
pastel red #db5856
piss yellow #ddd618
bright cyan #41fdfe
dark coral #cf524e
algae green #21c36f
darkish red #a90308
reddy brown #6e1005
blush pink #fe828c
camouflage green #4b6113
lawn green #4da409
putty #beae8a
vibrant blue #0339f8
dark sand #a88f59
purple/blue #5d21d0
saffron #feb209
twilight #4e518b
warm brown #964e02
bluegrey #85a3b2
bubble gum pink #ff69af
duck egg blue #c3fbf4
greenish cyan #2afeb7
petrol #005f6a
royal #0c1793
butter #ffff81
dusty orange #f0833a
off yellow #f1f33f
pale olive green #b1d27b
orangish #fc824a
leaf #71aa34
light blue grey #b7c9e2
dried blood #4b0101
lightish purple #a552e6
rusty red #af2f0d
lavender blue #8b88f8
light grass green #9af764
light mint green #a6fbb2
sunflower #ffc512
velvet #750851
brick orange #c14a09
lightish red #fe2f4a
pure blue #0203e2
twilight blue #0a437a
violet red #a50055
yellowy brown #ae8b0c
carnation #fd798f
muddy yellow #bfac05
dark seafoam green #3eaf76
deep rose #c74767
dusty red #b9484e
grey/blue #647d8e
lemon lime #bffe28
purple/pink #d725de
brown yellow #b29705
purple brown #673a3f
wisteria #a87dc2
banana yellow #fafe4b
lipstick red #c0022f
water blue #0e87cc
brown grey #8d8468
vibrant purple #ad03de
baby green #8cff9e
barf green #94ac02
eggshell blue #c4fff7
sandy yellow #fdee73
cool green #33b864
pale #fff9d0
blue/grey #758da3
hot magenta #f504c9
greyblue #77a1b5
purpley #8756e4
baby shit green #889717
brownish pink #c27e79
dark aquamarine #017371
diarrhea #9f8303
light mustard #f7d560
pale sky blue #bdf6fe
turtle green #75b84f
bright olive #9cbb04
dark grey blue #29465b
greeny brown #696006
lemon green #adf802
light periwinkle #c1c6fc
seaweed green #35ad6b
sunshine yellow #fffd37
ugly purple #a442a0
medium pink #f36196
puke brown #947706
very light pink #fff4f2
viridian #1e9167
bile #b5c306
faded yellow #feff7f
very pale green #cffdbc
vibrant green #0add08
bright lime #87fd05
spearmint #1ef876
light aquamarine #7bfdc7
light sage #bcecac
yellowgreen #bbf90f
baby poo #ab9004
dark seafoam #1fb57a
deep teal #00555a
heather #a484ac
rust orange #c45508
dirty blue #3f829d
fern green #548d44
bright lilac #c95efb
weird green #3ae57f
peacock blue #016795
avocado green #87a922
faded orange #f0944d
grape purple #5d1451
hot green #25ff29
lime yellow #d0fe1d
mango #ffa62b
shamrock #01b44c
bubblegum #ff6cb5
purplish brown #6b4247
vomit yellow #c7c10c
pale cyan #b7fffa
key lime #aeff6e
tomato red #ec2d01
lightgreen #76ff7b
merlot #730039
night blue #040348
purpleish pink #df4ec8
apple #6ecb3c
baby poop green #8f9805
green apple #5edc1f
heliotrope #d94ff5
yellow/green #c8fd3d
almost black #070d0d
cool blue #4984b8
leafy green #51b73b
mustard brown #ac7e04
dusk #4e5481
dull brown #876e4b
frog green #58bc08
vivid green #2fef10
bright light green #2dfe54
fluro green #0aff02
kiwi #9cef43
seaweed #18d17b
navy green #35530a
ultramarine blue #1805db
iris #6258c4
pastel orange #ff964f
yellowish orange #ffab0f
perrywinkle #8f8ce7
tealish #24bca8
dark plum #3f012c
pear #cbf85f
pinkish orange #ff724c
midnight purple #280137
light urple #b36ff6
dark mint #48c072
greenish tan #bccb7a
light burgundy #a8415b
turquoise blue #06b1c4
ugly pink #cd7584
sandy #f1da7a
electric pink #ff0490
muted purple #805b87
mid green #50a747
greyish #a8a495
neon yellow #cfff04
banana #ffff7e
carnation pink #ff7fa7
tomato #ef4026
sea #3c9992
muddy brown #886806
turquoise green #04f489
buff #fef69e
fawn #cfaf7b
muted blue #3b719f
pale rose #fdc1c5
dark mint green #20c073
amethyst #9b5fc0
blue/green #0f9b8e
chestnut #742802
sick green #9db92c
pea #a4bf20
rusty orange #cd5909
stone #ada587
rose red #be013c
pale aqua #b8ffeb
deep orange #dc4d01
earth #a2653e
mossy green #638b27
grassy green #419c03
pale lime green #b1ff65
light grey blue #9dbcd4
pale grey #fdfdfe
asparagus #77ab56
blueberry #464196
purple red #990147
pale lime #befd73
greenish teal #32bf84
caramel #af6f09
deep magenta #a0025c
light peach #ffd8b1
milk chocolate #7f4e1e
ocher #bf9b0c
off green #6ba353
purply pink #f075e6
lightblue #7bc8f6
dusky blue #475f94
golden #f5bf03
light beige #fffeb6
butter yellow #fffd74
dusky purple #895b7b
french blue #436bad
ugly yellow #d0c101
greeny yellow #c6f808
orangish red #f43605
shamrock green #02c14d
orangish brown #b25f03
tree green #2a7e19
deep violet #490648
gunmetal #536267
blue/purple #5a06ef
cherry #cf0234
sandy brown #c4a661
warm grey #978a84
dark indigo #1f0954
midnight #03012d
bluey green #2bb179
grey pink #c3909b
soft purple #a66fb5
blood #770001
brown red #922b05
medium grey #7d7f7c
berry #990f4b
poo #8f7303
purpley pink #c83cb9
light salmon #fea993
snot #acbb0d
easter purple #c071fe
light yellow green #ccfd7f
dark navy blue #00022e
drab #828344
light rose #ffc5cb
rouge #ab1239
purplish red #b0054b
slime green #99cc04
baby poop #937c00
irish green #019529
pink/purple #ef1de7
dark navy #000435
greeny blue #42b395
light plum #9d5783
pinkish grey #c8aca9
dirty orange #c87606
rust red #aa2704
pale lilac #e4cbff
orangey red #fa4224
primary blue #0804f9
kermit green #5cb200
brownish purple #76424e
murky green #6c7a0e
wheat #fbdd7e
very dark purple #2a0134
bottle green #044a05
watermelon #fd4659
deep sky blue #0d75f8
fire engine red #fe0002
yellow ochre #cb9d06
pumpkin orange #fb7d07
pale olive #b9cc81
light lilac #edc8ff
lightish green #61e160
carolina blue #8ab8fe
mulberry #920a4e
shocking pink #fe02a2
auburn #9a3001
bright lime green #65fe08
celadon #befdb7
pinkish brown #b17261
poo brown #885f01
bright sky blue #02ccfe
celery #c1fd95
dirt brown #836539
strawberry #fb2943
dark lime #84b701
copper #b66325
medium brown #7f5112
muted green #5fa052
robin's egg #6dedfd
bright aqua #0bf9ea
bright lavender #c760ff
ivory #ffffcb
very light purple #f6cefc
light navy #155084
pink red #f5054f
olive brown #645403
poop brown #7a5901
mustard green #a8b504
ocean green #3d9973
very dark blue #000133
dusty green #76a973
light navy blue #2e5a88
minty green #0bf77d
adobe #bd6c48
barney #ac1db8
jade green #2baf6a
bright light blue #26f7fd
light lime #aefd6c
dark khaki #9b8f55
orange yellow #ffad01
ocre #c69c04
maize #f4d054
faded pink #de9dac
british racing green #05480d
sandstone #c9ae74
mud brown #60460f
light sea green #98f6b0
robin egg blue #8af1fe
aqua marine #2ee8bb
dark sea green #11875d
soft pink #fdb0c0
orangey brown #b16002
cherry red #f7022a
burnt yellow #d5ab09
brownish grey #86775f
camel #c69f59
purplish grey #7a687f
marine #042e60
greyish pink #c88d94
pale turquoise #a5fbd5
pastel yellow #fffe71
bluey purple #6241c7
canary yellow #fffe40
faded red #d3494e
sepia #985e2b
coffee #a6814c
bright magenta #ff08e8
mocha #9d7651
ecru #feffca
purpleish #98568d
cranberry #9e003a
darkish green #287c37
brown orange #b96902
dusky rose #ba6873
melon #ff7855
sickly green #94b21c
silver #c5c9c7
purply blue #661aee
purpleish blue #6140ef
hospital green #9be5aa
shit brown #7b5804
mid blue #276ab3
amber #feb308
easter green #8cfd7e
soft blue #6488ea
cerulean blue #056eee
golden brown #b27a01
bright turquoise #0ffef9
red pink #fa2a55
red purple #820747
greyish brown #7a6a4f
vermillion #f4320c
russet #a13905
steel grey #6f828a
lighter purple #a55af4
bright violet #ad0afd
prussian blue #004577
slate green #658d6d
dirty pink #ca7b80
dark blue green #005249
pine #2b5d34
yellowy green #bff128
dark gold #b59410
bluish #2976bb
darkish blue #014182
dull red #bb3f3f
pinky red #fc2647
bronze #a87900
pale teal #82cbb2
military green #667c3e
barbie pink #fe46a5
bubblegum pink #fe83cc
pea soup green #94a617
dark mustard #a88905
shit #7f5f00
medium purple #9e43a2
very dark green #062e03
dirt #8a6e45
dusky pink #cc7a8b
red violet #9e0168
lemon yellow #fdff38
pistachio #c0fa8b
dull yellow #eedc5b
dark lime green #7ebd01
denim blue #3b5b92
teal blue #01889f
lightish blue #3d7afd
purpley blue #5f34e7
light indigo #6d5acf
swamp green #748500
brown green #706c11
dark maroon #3c0008
hot purple #cb00f5
dark forest green #002d04
faded blue #658cbb
drab green #749551
light lime green #b9ff66
snot green #9dc100
yellowish #faee66
light blue green #7efbb3
bordeaux #7b002c
light mauve #c292a1
ocean #017b92
marigold #fcc006
muddy green #657432
dull orange #d8863b
steel #738595
electric purple #aa23ff
fluorescent green #08ff08
yellowish brown #9b7a01
blush #f29e8e
soft green #6fc276
bright orange #ff5b00
lemon #fdff52
purple grey #866f85
acid green #8ffe09
pale lavender #eecffe
violet blue #510ac9
light forest green #4f9153
burnt red #9f2305
khaki green #728639
cerise #de0c62
faded purple #916e99
apricot #ffb16d
dark olive green #3c4d03
grey brown #7f7053
green grey #77926f
true blue #010fcc
pale violet #ceaefa
periwinkle blue #8f99fb
light sky blue #c6fcff
blurple #5539cc
green brown #544e03
bluegreen #017a79
bright teal #01f9c6
brownish yellow #c9b003
pea soup #929901
forest #0b5509
barney purple #a00498
ultramarine #2000b1
purplish #94568c
puke yellow #c2be0e
bluish grey #748b97
dark periwinkle #665fd1
dark lilac #9c6da5
reddish #c44240
light maroon #a24857
dusty purple #825f87
terra cotta #c9643b
avocado #90b134
marine blue #01386a
teal green #25a36f
slate grey #59656d
lighter green #75fd63
electric green #21fc0d
dusty blue #5a86ad
golden yellow #fec615
bright yellow #fffd01
light lavender #dfc5fe
umber #b26400
poop #7f5e00
dark peach #de7e5d
jungle green #048243
eggshell #ffffd4
denim #3b638c
yellow brown #b79400
dull purple #84597e
chocolate brown #411900
wine red #7b0323
neon blue #04d9ff
dirty green #667e2c
light tan #fbeeac
ice blue #d7fffe
cadet blue #4e7496
dark mauve #874c62
very light blue #d5ffff
grey purple #826d8c
pastel pink #ffbacd
very light green #d1ffbd
dark sky blue #448ee4
evergreen #05472a
dull pink #d5869d
aubergine #3d0734
mahogany #4a0100
reddish orange #f8481c
deep green #02590f
vomit green #89a203
purple pink #e03fd8
dusty pink #d58a94
faded green #7bb274
camo green #526525
pinky purple #c94cbe
pink purple #db4bda
brownish red #9e3623
dark rose #b5485d
mud #735c12
brownish #9c6d57
emerald green #028f1e
pale brown #b1916e
dull blue #49759c
burnt umber #a0450e
medium green #39ad48
clay #b66a50
light aqua #8cffdb
light olive green #a4be5c
brownish orange #cb7723
dark aqua #05696b
purplish pink #ce5dae
dark salmon #c85a53
greenish grey #96ae8d
jade #1fa774
ugly green #7a9703
dark beige #ac9362
emerald #01a049
pale red #d9544d
light magenta #fa5ff7
sky #82cafc
light cyan #acfffc
yellow orange #fcb001
reddish purple #910951
reddish pink #fe2c54
orchid #c875c4
dirty yellow #cdc50a
orange red #fd411e
deep red #9a0200
orange brown #be6400
cobalt blue #030aa7
neon pink #fe019a
rose pink #f7879a
greyish purple #887191
raspberry #b00149
aqua green #12e193
salmon pink #fe7b7c
tangerine #ff9408
brownish green #6a6e09
red brown #8b2e16
greenish brown #696112
pumpkin #e17701
pine green #0a481e
charcoal #343837
baby pink #ffb7ce
cornflower #6a79f7
blue violet #5d06e9
chocolate #3d1c02
greyish green #82a67d
scarlet #be0119
green yellow #c9ff27
dark olive #373e02
sienna #a9561e
pastel purple #caa0ff
terracotta #ca6641
aqua blue #02d8e9
sage green #88b378
blood red #980002
deep pink #cb0162
grass #5cac2d
moss #769958
pastel blue #a2bffe
bluish green #10a674
green blue #06b48b
dark tan #af884a
greenish blue #0b8b87
pale orange #ffa756
vomit #a2a415
forrest green #154406
dark lavender #856798
dark violet #34013f
purple blue #632de9
dark cyan #0a888a
olive drab #6f7632
pinkish #d46a7e
cobalt #1e488f
neon purple #bc13fe
light turquoise #7ef4cc
apple green #76cd26
dull green #74a662
wine #80013f
powder blue #b1d1fc
off white #ffffe4
electric blue #0652ff
dark turquoise #045c5a
blue purple #5729ce
azure #069af3
bright red #ff000d
pinkish red #f10c45
cornflower blue #5170d7
light olive #acbf69
grape #6c3461
greyish blue #5e819d
purplish blue #601ef9
yellowish green #b0dd16
greenish yellow #cdfd02
medium blue #2c6fbb
dusty rose #c0737a
light violet #d6b4fc
midnight blue #020035
bluish purple #703be7
red orange #fd3c06
dark magenta #960056
greenish #40a368
ocean blue #03719c
coral #fc5a50
cream #ffffc2
reddish brown #7f2b0a
burnt sienna #b04e0f
brick #a03623
sage #87ae73
grey green #789b73
white #ffffff
robin's egg blue #98eff9
moss green #658b38
steel blue #5a7d9a
eggplant #380835
light yellow #fffe7a
leaf green #5ca904
light grey #d8dcd6
puke #a5a502
pinkish purple #d648d7
sea blue #047495
pale purple #b790d4
slate blue #5b7c99
blue grey #607c8e
hunter green #0b4008
fuchsia #ed0dd9
crimson #8c000f
pale yellow #ffff84
ochre #bf9005
mustard yellow #d2bd0a
light red #ff474c
cerulean #0485d1
pale pink #ffcfdc
deep blue #040273
rust #a83c09
light teal #90e4c1
slate #516572
goldenrod #fac205
dark yellow #d5b60a
dark grey #363737
army green #4b5d16
grey blue #6b8ba4
seafoam #80f9ad
puce #a57e52
spring green #a9f971
dark orange #c65102
sand #e2ca76
pastel green #b0ff9d
mint #9ffeb0
light orange #fdaa48
bright pink #fe01b1
chartreuse #c1f80a
deep purple #36013f
dark brown #341c02
taupe #b9a281
pea green #8eab12
puke green #9aae07
kelly green #02ab2e
seafoam green #7af9ab
blue green #137e6d
khaki #aaa662
burgundy #610023
dark teal #014d4e
brick red #8f1402
royal purple #4b006e
plum #580f41
mint green #8fff9f
gold #dbb40c
baby blue #a2cffe
yellow green #c0fb2d
bright purple #be03fd
dark red #840000
pale blue #d0fefe
grass green #3f9b0b
navy #01153e
aquamarine #04d8b2
burnt orange #c04e01
neon green #0cff0c
bright blue #0165fc
rose #cf6275
light pink #ffd1df
mustard #ceb301
indigo #380282
lime #aaff32
sea green #53fca1
periwinkle #8e82fe
dark pink #cb416b
olive green #677a04
peach #ffb07c
pale green #c7fdb5
light brown #ad8150
hot pink #ff028d
black #000000
lilac #cea2fd
navy blue #001146
royal blue #0504aa
beige #e6daa6
salmon #ff796c
olive #6e750e
maroon #650021
bright green #01ff07
dark purple #35063e
mauve #ae7181
forest green #06470c
aqua #13eac9
cyan #00ffff
tan #d1b26f
dark blue #00035b
lavender #c79fef
turquoise #06c2ac
dark green #033500
violet #9a0eea
light purple #bf77f6
lime green #89fe05
grey #929591
sky blue #75bbfd
yellow #ffff14
magenta #c20078
light green #96f97b
orange #f97306
teal #029386
light blue #95d0fc
red #e50000
brown #653700
pink #ff81c0
blue #0343df
green #15b01a
purple #7e1e9c`
|
package nooler
import (
"database/sql"
"fmt"
"log"
"net/http"
_ "github.com/go-sql-driver/mysql"
"github.com/gorilla/mux"
"github.com/seongminnpark/nooler-server/internal/app/nooler/handler"
)
type App struct {
Router *mux.Router
DB *sql.DB
}
func (app *App) Initialize(user, password, dbName string) {
connectionString := fmt.Sprintf("%s:%s@/%s", user, password, dbName)
var err error
app.DB, err = sql.Open("mysql", connectionString)
if err != nil {
log.Fatal(err)
}
app.Router = mux.NewRouter()
app.initializeRoutes()
}
func (app *App) Run(addr string) {
log.Fatal(http.ListenAndServe(addr, app.Router))
}
func (app *App) initializeRoutes() {
userHandler := handler.UserHandler{DB: app.DB}
app.Router.HandleFunc("/user", userHandler.CreateUser).Methods("POST")
app.Router.HandleFunc("/user", userHandler.GetUser).Methods("GET")
app.Router.HandleFunc("/user", userHandler.UpdateUser).Methods("PUT")
app.Router.HandleFunc("/user", userHandler.DeleteUser).Methods("DELETE")
app.Router.HandleFunc("/login", userHandler.Login).Methods("POST")
deviceHandler := handler.DeviceHandler{DB: app.DB}
app.Router.HandleFunc("/device", deviceHandler.CreateDevice).Methods("POST")
app.Router.HandleFunc("/device", deviceHandler.GetDevice).Methods("GET")
}
|
package connrt
import (
"github.com/gookit/event"
"github.com/kbence/conndetect/internal/connlib"
"github.com/kbence/conndetect/internal/utils"
)
type ConnectionPrinter struct {
Node
printer utils.Printer
time utils.Time
}
func NewConnectionPrinter(eventManager event.ManagerFace) *ConnectionPrinter {
printer := &ConnectionPrinter{
Node: Node{eventManager: eventManager},
printer: utils.NewPrinter(),
time: utils.NewTime(),
}
eventManager.On(eventNewConnection, event.ListenerFunc(printer.Handle))
return printer
}
func (p *ConnectionPrinter) Handle(e event.Event) error {
var connection *connlib.DirectionalConnection = nil
if connObj := e.Get("connection"); connObj != nil {
switch conn := connObj.(type) {
case connlib.DirectionalConnection:
connection = &conn
}
}
// Swallow the error now
// TODO: handle this error more gracefully, by eg. logging it
// or sending an error event! :o
if connection == nil {
return nil
}
p.printer.Printf(
"%s: New connection: %s -> %s\n",
p.time.Now().Format(TIME_FORMAT),
connection.Source.String(),
connection.Destination.String(),
)
return nil
}
|
package game
import (
"sync"
"sofa/proto"
"sofa/network"
)
type Sid2GameRoom struct {
sid2scene map[uint32]*GameRoom
umLock sync.RWMutex
}
func NewSid2GameRoom() *Sid2GameRoom {
return &Sid2GameRoom{sid2scene:make(map[uint32]*GameRoom)}
}
func (this *Sid2GameRoom) GainGameRoom(sid uint32) *GameRoom {
this.umLock.Lock()
defer this.umLock.Unlock()
if r, ok := this.sid2scene[sid]; ok {
return r
}
room := NewGameRoom(sid)
this.sid2scene[sid] = room
return room
}
func (this *Sid2GameRoom) RmGameRoom(sid uint32) {
this.umLock.Lock()
defer this.umLock.Unlock()
delete(this.sid2scene, sid)
}
func (this *Sid2GameRoom) GetGameRoom(sid uint32) (u *GameRoom, ok bool) {
this.umLock.RLock()
defer this.umLock.RUnlock()
u, ok = this.sid2scene[sid]
return
}
func (this *Sid2GameRoom) Len() int {
this.umLock.RLock()
defer this.umLock.RUnlock()
return len(this.sid2scene)
}
func (this *Sid2GameRoom) GetSids() (sids []uint32) {
this.umLock.RLock()
defer this.umLock.RUnlock()
for _, scene := range this.sid2scene {
sids = append(sids, scene.Sid)
}
return
}
func (this *Sid2GameRoom) GetGameRooms() (scenes []*GameRoom) {
this.umLock.RLock()
defer this.umLock.RUnlock()
for _, u := range this.sid2scene {
scenes = append(scenes, u)
}
return
}
//////////////////////////////////////////////////////////////////////////////////
type Uid2Player struct {
uid2player map[uint32]*Player // uid:*player
umLock sync.RWMutex
}
func NewUid2Player() *Uid2Player {
return &Uid2Player{uid2player:make(map[uint32]*Player)}
}
func (this *Uid2Player) AddPlayer(uid uint32, player *Player) {
this.umLock.Lock()
defer this.umLock.Unlock()
this.uid2player[uid] = player
}
func (this *Uid2Player) RmPlayer(uid uint32) {
this.umLock.Lock()
defer this.umLock.Unlock()
delete(this.uid2player, uid)
}
func (this *Uid2Player) GetPlayer(uid uint32) (u *Player, ok bool) {
this.umLock.RLock()
defer this.umLock.RUnlock()
u, ok = this.uid2player[uid]
return
}
func (this *Uid2Player) Len() int {
this.umLock.RLock()
defer this.umLock.RUnlock()
return len(this.uid2player)
}
func (this *Uid2Player) GetUids() (uids []uint32) {
this.umLock.RLock()
defer this.umLock.RUnlock()
for _, player := range this.uid2player {
uids = append(uids, player.Uid)
}
return
}
func (this *Uid2Player) GetPlayers() (players []*Player) {
this.umLock.RLock()
defer this.umLock.RUnlock()
for _, u := range this.uid2player {
players = append(players, u)
}
return
}
//////////////////////////////////////////////////////////////////////
type Conn2Player struct {
cliConn2Player map[*network.ClientConnection]*Player
cLock sync.RWMutex
}
func NewConn2Player() *Conn2Player {
return &Conn2Player{cliConn2Player : make(map[*network.ClientConnection]*Player)}
}
func (this *Conn2Player) AddPlayer(cliConn *network.ClientConnection, player *Player) {
this.cLock.Lock()
defer this.cLock.Unlock()
this.cliConn2Player[cliConn] = player
}
func (this *Conn2Player) RmPlayer(cliConn *network.ClientConnection) {
this.cLock.Lock()
defer this.cLock.Unlock()
delete(this.cliConn2Player, cliConn)
}
func (this *Conn2Player) GetPlayer(cliConn *network.ClientConnection) (u *Player, ok bool) {
this.cLock.RLock()
defer this.cLock.RUnlock()
u, ok = this.cliConn2Player[cliConn]
return
}
func (this *Conn2Player) Len() int {
this.cLock.RLock()
defer this.cLock.RUnlock()
return len(this.cliConn2Player)
}
//////////////////////////////////////////////////////////////////////
type GameStatus struct {
Status proto.GameStatus
sync.RWMutex
}
func NewGameStatus() *GameStatus {
return &GameStatus{Status: proto.GameStatus_NotStarted}
}
func (this *GameStatus) SetStatus(st proto.GameStatus) {
this.Lock()
defer this.Unlock()
this.Status = st
}
func (this *GameStatus) GetStatus() proto.GameStatus {
this.RLock()
defer this.RUnlock()
return this.Status
}
func (this *GameStatus) IsStarted() bool {
this.RLock()
defer this.RUnlock()
return this.Status == proto.GameStatus_Started
}
//////////////////////////////////////////////////////////////////////
type Uid2Seat map[uint32]uint32
type Uid2Winner map[uint32]*proto.UserData
type Seat2UserData map[uint32]*proto.UserData
|
// Copyright 2020 Kuei-chun Chen. All rights reserved.
package mdb
import (
"context"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo"
)
// HostInfo stores build information
type HostInfo struct {
Extra struct {
KernelVersion string
}
OS struct {
Name string
Type string
Version string
}
System struct {
CPUAddrSize int `bson:"cpuAddrSize,truncate"`
CPUArch string `bson:"cpuArch"`
Hostname string `bson:"hostname"`
MemLimitMB int `bson:"memLimitMB,truncate"`
MemSizeMB int `bson:"memSizeMB,truncate"`
NumaEnabled bool `bson:"numaEnabled"`
NumCores int `bson:"numCores,truncate"`
}
}
// GetHostInfo returns MongoDB build information
func GetHostInfo(client *mongo.Client) (HostInfo, error) {
ctx := context.Background()
var hostInfo HostInfo
err := client.Database("admin").RunCommand(ctx, bson.D{{Key: "hostInfo", Value: 1}}).Decode(&hostInfo)
return hostInfo, err
}
|
package store
import (
"os"
"path/filepath"
"github.com/openshift/installer/pkg/asset"
)
type fileFetcher struct {
directory string
}
// FetchByName returns the file with the given name.
func (f *fileFetcher) FetchByName(name string) (*asset.File, error) {
data, err := os.ReadFile(filepath.Join(f.directory, name))
if err != nil {
return nil, err
}
return &asset.File{Filename: name, Data: data}, nil
}
// FetchByPattern returns the files whose name match the given regexp.
func (f *fileFetcher) FetchByPattern(pattern string) (files []*asset.File, err error) {
matches, err := filepath.Glob(filepath.Join(f.directory, pattern))
if err != nil {
return nil, err
}
files = make([]*asset.File, 0, len(matches))
for _, path := range matches {
data, err := os.ReadFile(path)
if err != nil {
return nil, err
}
filename, err := filepath.Rel(f.directory, path)
if err != nil {
return nil, err
}
files = append(files, &asset.File{
Filename: filename,
Data: data,
})
}
return files, nil
}
|
package tools
import (
"os"
"github.com/sapk/sca/pkg"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
)
//TypeOrEnv parse cmd to file with env vars
func TypeOrEnv(cmd *cobra.Command, flag, envname string) string {
val, _ := cmd.Flags().GetString(flag)
if val == "" {
val = os.Getenv(envname)
}
return val
}
//SetupLogger parse cmd for log level
func SetupLogger(cmd *cobra.Command, args []string) {
if verbose, _ := cmd.Flags().GetBool(pkg.VerboseFlag); verbose {
log.SetLevel(log.DebugLevel)
} else {
log.SetLevel(log.InfoLevel)
}
}
|
package rancher
import (
"github.com/docker/libcompose/config"
"github.com/docker/libcompose/project"
)
type RancherServiceFactory struct {
Context *Context
}
func (r *RancherServiceFactory) Create(project *project.Project, name string, serviceConfig *config.ServiceConfig) (project.Service, error) {
if len(r.Context.SidekickInfo.sidekickToPrimaries[name]) > 0 {
return NewSidekick(name, serviceConfig, r.Context), nil
} else {
return NewService(name, serviceConfig, r.Context), nil
}
}
|
package extractor
import (
"testing"
)
func TestPackageComplexity(t *testing.T) {
complexity, err := PackageComplexity("github.com/freenerd/mccabe-cyclomatic/example")
if err != nil {
t.Error("expected package to open, got ", err)
}
var expected int64 = 8
if complexity != expected {
t.Errorf("expected example file complexity to be %d, got %d", expected, complexity)
}
}
|
package main
import "fmt"
//地上有一个m行n列的方格,从坐标 [0,0] 到坐标 [m-1,n-1] 。一个机器人从坐标 [0, 0] 的格子开始移动,
//它每次可以向左、右、上、下移动一格(不能移动到方格外),也不能进入行坐标和列坐标的数位之和大于k的格子。
//例如,当k为18时,机器人能够进入方格 [35, 37] ,因为3+5+3+7=18。但它不能进入方格 [35, 38],因为3+5+3+8=19。请问该机器人能够到达多少个格子?
//示例 1:
//输入:m = 2, n = 3, k = 1
//输出:3
//示例 2:
//输入:m = 3, n = 1, k = 0
//输出:1
func main() {
fmt.Println(movingCount(2, 3, 1))
//fmt.Println(movingCount(2, 3, 0))
fmt.Println(movingCount(36, 38, 18))
}
func movingCount(m int, n int, k int) int {
visit := make([][]bool, m)
for i := range visit {
visit[i] = make([]bool, n)
}
var travel func(i, j int) int
travel = func(i, j int) int {
if i < 0 || i >= m || j < 0 || j >= n || (i/10+i%10+j/10+j%10) > k || visit[i][j] {
return 0
}
visit[i][j] = true
return 1 + travel(i-1, j) + travel(i, j-1) + travel(i+1, j) + travel(i, j+1)
}
return travel(0, 0)
}
|
package main
import(
"linknodelist"
"fmt"
)
func main(){
fmt.Println("==================================================================================================")
fmt.Println("==================================================================================================")
fmt.Println("-------------实现方法如下:----------------")
fmt.Println("-------------MoveToStart()----------------")
fmt.Println("-------------MoveToEnd()------------------")
fmt.Println("-------------MoveToPos()------------------")
fmt.Println("-------------Prev()-----------------------")
fmt.Println("-------------Next()-----------------------")
fmt.Println("-------------Currpos()--------------------")
fmt.Println("-------------Initlist()-------------------")
fmt.Println("-------------Showlist()-------------------")
fmt.Println("-------------Push_back(data int)----------")
fmt.Println("-------------Push_front(data int)---------")
fmt.Println("-------------Pop_back()-------------------")
fmt.Println("-------------Pop_front()------------------")
fmt.Println("-------------Find(data int)---------------")
fmt.Println("-------------Modify(data int,newValue int)")
fmt.Println("-------------Delete_val()-----------------")
fmt.Println("-------------Clear()----------------------")
fmt.Println("-------------Length()---------------------")
fmt.Println("-------------Insert(data int,index int)---")
fmt.Println("-------------GetValue()-------------------")
fmt.Println("==================================================================================================")
fmt.Println("==================================================================================================")
fmt.Println()
var ls linknodelist.LinkNodeList
fmt.Println("----------------InitList()-------- -----")
ls.InitList()
for i:=0;i<10;i++{
if(i%2==0){
ls.Push_back(i)
}else{
ls.Push_front(i)
}
}
fmt.Println("----------------Show_list()----------------")
fmt.Println()
ls.Show_list()
fmt.Println()
fmt.Println()
fmt.Println("----------------MoveToFirst()--------------")
fmt.Println("----------------MoveToEnd()----------------")
fmt.Println("----------------MoveToPos(pos int)---------")
fmt.Println("----------------CurrPos--------------------")
fmt.Println()
// ls.Show_list()
fmt.Println("movetofirst:")
ls.MoveToStart()
fmt.Println("当前位置为:",ls.CurrPos())
fmt.Println("movetoend:")
ls.MoveToEnd()
fmt.Println("当前位置为:",ls.CurrPos())
fmt.Println("movetopos(6):")
ls.MoveToPos(6)
fmt.Println("当前位置为:",ls.CurrPos())
fmt.Println()
fmt.Println()
fmt.Println("----------------Length()-------------------")
fmt.Println()
length:=ls.Length()
fmt.Println("the size of the linknodelist is:",length)
fmt.Println()
fmt.Println("----------insert(data int,pos int)指定插入位-")
fmt.Println()
fmt.Println("插入66到为第11个结点")
ls.Insert(66,11)
ls.Show_list()
fmt.Println()
fmt.Println("插入77到为第1个结点")
ls.Insert(77,1)
ls.Show_list()
fmt.Println()
fmt.Println("插入88到为第5个结点")
ls.Insert(88,5)
ls.Show_list()
fmt.Println()
fmt.Println("插入99到为第20个结点")
ls.Insert(99,20)
ls.Show_list()
fmt.Println()
fmt.Println()
fmt.Println("-----------------Pop_back()-----------------")
fmt.Println()
if(ls.Pop_back()){
ls.Show_list()
}
fmt.Println()
fmt.Println()
fmt.Println("-----------------Pop_front()----------------")
fmt.Println()
if(ls.Pop_front()){
ls.Show_list()
}
fmt.Println()
fmt.Println()
fmt.Println("-----------------Delete_val()删除当前位置元素-")
fmt.Println()
fmt.Println("删除元素1:")
var tag bool
for ls.MoveToStart();ls.CurrPos()<ls.Length();ls.Next(){
if(ls.GetValue()==1){
ls.Delete_val()
tag=true
break
}
}
if tag{
fmt.Println("删除成功")
tag=false
}else{
fmt.Println("没有找到要删除的元素")
}
fmt.Println()
ls.Show_list()
fmt.Println()
fmt.Println("删除元素20:")
for ls.MoveToStart();ls.CurrPos()<ls.Length();ls.Next(){
if(ls.GetValue()==20){
ls.Delete_val()
tag=true
break
}
}
if tag{
fmt.Println("删除成功")
}else{
fmt.Println("没有找到要删除的元素")
}
fmt.Println()
ls.Show_list()
fmt.Println()
fmt.Println()
fmt.Println("-------------Find(data int):返回指针-------")
fmt.Println()
fmt.Println("Find(8):",ls.Find(8))
fmt.Println("Find(7):",ls.Find(7))
fmt.Println()
fmt.Println("--------------Modify(old int,new int) ------")
fmt.Println()
fmt.Print("修改2为22:")
if(ls.Modify(2,22)){
fmt.Println("修改成功")
}else{
fmt.Println("修改失败")
}
ls.Show_list()
fmt.Println()
fmt.Print("修改10为100:")
if(ls.Modify(10,100)){
fmt.Println("修改成功")
}else{
fmt.Println("修改失败")
}
ls.Show_list()
fmt.Println()
fmt.Println("----------------clear()----------------------")
fmt.Println()
ls.Clear()
ls.Show_list()
fmt.Println()
fmt.Println("============================================================================================")
fmt.Println("============================================================================================")
}
|
package sdutils
import (
"io"
"fmt"
"net/http"
"io/ioutil"
"bytes"
"encoding/json"
"time"
"errors"
)
type stardogClientImpl struct {
sdURL string
password string
username string
logger SdVaLogger
}
func (s *stardogClientImpl) doRequest(method, urlStr string, body io.Reader, contentType string, expectedCode int) ([]byte, int, error) {
return s.doRequestWithAccept(method, urlStr, body, contentType, contentType, expectedCode)
}
func (s *stardogClientImpl) doRequestWithAccept(method, urlStr string, body io.Reader, contentType string, accept string, expectedCode int) ([]byte, int, error) {
req, err := http.NewRequest(method, urlStr, body)
if err != nil {
return nil, -1, err
}
req.SetBasicAuth(s.username, s.password)
client := &http.Client{}
req.Header.Set("Content-Type", contentType)
if accept != "" {
req.Header.Set("Accept", accept)
}
resp, err := client.Do(req)
if err != nil {
return nil, -1, fmt.Errorf("Failed do the post %s", err)
}
defer resp.Body.Close()
if resp.StatusCode != expectedCode {
return nil, resp.StatusCode, fmt.Errorf("Expected %d but got %d when %s to %s", expectedCode, resp.StatusCode, method, urlStr)
}
content, err := ioutil.ReadAll(resp.Body)
s.logger.Logf(DEBUG, "Completed %s to %s", method, urlStr)
return content, resp.StatusCode, nil
}
func (s *stardogClientImpl) GetClusterInfo() (*[]string, error) {
s.logger.Logf(DEBUG, "GetClusterInfo\n")
dbURL := fmt.Sprintf("%s/admin/cluster", s.sdURL)
bodyBuf := &bytes.Buffer{}
content, code, err := s.doRequest("GET", dbURL, bodyBuf, "application/json", 200)
for i := 0; code == 503; i++ {
if i > 10 {
return nil, errors.New("timeout waiting to get cluster information")
}
s.logger.Logf(WARN, "The first request to admin/cluster failed")
time.Sleep(2 * time.Second)
content, code, err = s.doRequest("GET", dbURL, bodyBuf, "application/json", 200)
}
if err != nil {
return nil, err
}
var nodesMap map[string]interface{}
err = json.Unmarshal(content, &nodesMap)
if err != nil {
return nil, err
}
nodeList := nodesMap["nodes"]
if nodeList == nil {
return nil, errors.New("There is no available cluster information")
}
var ifaceList []interface{}
switch v := nodeList.(type) {
case []interface{}:
s.logger.Logf(DEBUG, "Interface list %s", v)
ifaceList = v
default:
// no match; here v has the same type as i
return nil, fmt.Errorf("The returned cluster information was not expected %s", v)
}
outSList := make([]string, len(ifaceList))
for i, nodeI := range ifaceList {
outSList[i] = nodeI.(string)
}
return &outSList, nil
}
|
package global
import (
"github.com/casbin/casbin/v2"
"github.com/go-redis/redis/v7"
"gorm.io/gorm"
)
// orm对外全局变量
var Eloquent *gorm.DB
// redis对外全局变量
var Rdb *redis.Client
//Casbin对外全局边变量
var CasbinEnforcer *casbin.SyncedEnforcer
|
// Unit tests for default configuration service.
//
// @author TSS
package service
import (
"testing"
coreservice "github.com/mashmb/1pass/1pass-core/core/service"
"github.com/mashmb/1pass/1pass-core/port/out"
"github.com/mashmb/1pass/1pass-parse/repo/file"
)
func setupConfigService() coreservice.ConfigService {
var configRepo out.ConfigRepo
configRepo = file.NewFileConfigRepo("../../../../assets")
return coreservice.NewDfltConfigService(configRepo)
}
func TestIsConfigAvailable(t *testing.T) {
service := setupConfigService()
expected := true
available := service.IsConfigAvailable()
if available != expected {
t.Errorf("IsConfigAvailable() = %v; expected = %v", available, expected)
}
}
func TestGetConfig(t *testing.T) {
service := setupConfigService()
config := service.GetConfig()
if config == nil {
t.Error("GetConfig() should pass because of valid config")
}
}
func TestSaveConfig(t *testing.T) {
service := setupConfigService()
expected := ""
config := service.GetConfig()
config.Vault = ""
service.SaveConfig(config)
config = service.GetConfig()
if config.Vault != expected {
t.Errorf("SaveConfig() = %v; expected = %v", config.Vault, expected)
}
expected = "./assets/onepassword_data"
config.Vault = "./assets/onepassword_data"
service.SaveConfig(config)
config = service.GetConfig()
if config.Vault != expected {
t.Errorf("SaveConfig() = %v; expected = %v", config.Vault, expected)
}
}
|
package sqldb
import (
"fmt"
log "github.com/sirupsen/logrus"
"upper.io/db.v3"
"upper.io/db.v3/lib/sqlbuilder"
)
type backfillClusterName struct {
clusterName string
tableName string
}
func (s backfillClusterName) String() string {
return fmt.Sprintf("backfillClusterName{%s,%s}", s.clusterName, s.tableName)
}
func (s backfillClusterName) apply(session sqlbuilder.Database) error {
log.WithField("clustername", s.clusterName).Info("Back-filling cluster name")
rs, err := session.
Select("uid").
From(s.tableName).
Where(db.Cond{"clustername": nil}).
Query()
if err != nil {
return err
}
for rs.Next() {
uid := ""
err := rs.Scan(&uid)
if err != nil {
return err
}
logCtx := log.WithFields(log.Fields{"clustername": s.clusterName, "uid": uid})
logCtx.Info("Back-filling cluster name")
res, err := session.
Update(s.tableName).
Set("clustername", s.clusterName).
Where(db.Cond{"clustername": nil}).
And(db.Cond{"uuid": uid}).
Exec()
if err != nil {
return err
}
rowsAffected, err := res.RowsAffected()
if err != nil {
return err
}
if rowsAffected != 1 {
logCtx.WithField("rowsAffected", rowsAffected).Warn("Expected exactly one row affected")
}
}
return nil
}
|
/*
Copyright 2020 Skyscanner Limited.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package controllers
import (
"context"
"crypto/x509"
"net"
"testing"
"time"
kmsiapi "github.com/Skyscanner/kms-issuer/api/v1alpha1"
apiutil "github.com/jetstack/cert-manager/pkg/api/util"
cmapi "github.com/jetstack/cert-manager/pkg/apis/certmanager/v1"
cmmeta "github.com/jetstack/cert-manager/pkg/apis/meta/v1"
"github.com/Skyscanner/kms-issuer/pkg/kmsca"
"github.com/jetstack/cert-manager/test/e2e/util"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
apiequality "k8s.io/apimachinery/pkg/api/equality"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
clientgoscheme "k8s.io/client-go/kubernetes/scheme"
"k8s.io/client-go/tools/record"
fakeclock "k8s.io/utils/clock/testing"
"sigs.k8s.io/controller-runtime/pkg/client"
fakeclient "sigs.k8s.io/controller-runtime/pkg/client/fake"
logf "sigs.k8s.io/controller-runtime/pkg/log"
)
var _ = Context("CertificateRequestReconciler", func() {
Describe("when a new CertificateRequest is created", func() {
It("should sign the certificate request", func() {
By("Creating a KMSIssuer")
keyID, err := ca.CreateKey(&kmsca.CreateKeyInput{
AliasName: "alias/test-key",
})
Expect(err).To(BeNil())
issuerKey := client.ObjectKey{
Name: "test-kms-issuer",
Namespace: "default",
}
issuer := &kmsiapi.KMSIssuer{
ObjectMeta: metav1.ObjectMeta{
Name: issuerKey.Name,
Namespace: issuerKey.Namespace,
},
Spec: kmsiapi.KMSIssuerSpec{
KeyID: keyID,
CommonName: "RootCA",
Duration: &metav1.Duration{},
},
}
Expect(k8sClient.Create(context.Background(), issuer)).Should(Succeed(), "failed to create test KMSIssuer resource")
Eventually(
func() bool {
issuer := &kmsiapi.KMSIssuer{}
Expect(k8sClient.Get(context.Background(), issuerKey, issuer)).Should(Succeed(), "failed to get KMSIssuer resource")
return len(issuer.Status.Certificate) > 0
},
time.Second*1, time.Millisecond*100,
).Should(BeTrue(), "Certificate should be set")
By("Creating a Certificate Request to be signed by the KMS Issuer")
crKey := client.ObjectKey{
Name: "test-kms-issuer",
Namespace: "default",
}
exampleDNSNames := []string{"dnsName1.co", "dnsName2.ninja"}
exampleIPAddresses := []net.IP{
[]byte{8, 8, 8, 8},
[]byte{1, 1, 1, 1},
}
exampleURIs := []string{"spiffe://foo.foo.example.net", "spiffe://foo.bar.example.net"}
cr, _, err := util.NewCertManagerBasicCertificateRequest( //nolint:staticcheck // TODO: fixed when refactored
crKey.Name, issuerKey.Name, "KMSIssuer",
&metav1.Duration{
Duration: time.Hour * 24 * 90,
},
exampleDNSNames, exampleIPAddresses, exampleURIs, x509.RSA,
)
cr.ObjectMeta.Namespace = crKey.Namespace
cr.Spec.IssuerRef.Group = kmsiapi.GroupVersion.Group
Expect(err).To(BeNil())
Expect(k8sClient.Create(context.Background(), cr)).Should(Succeed(), "failed to create test CertificateRequest resource")
By("Approving request so it may be signed")
Expect(k8sClient.Get(context.Background(), client.ObjectKeyFromObject(cr), cr)).Should(Succeed(), "failed to get CertificateRequest resource")
apiutil.SetCertificateRequestCondition(cr, cmapi.CertificateRequestConditionApproved, cmmeta.ConditionTrue, "Approved", "")
Expect(k8sClient.Status().Update(context.Background(), cr)).Should(Succeed(), "failed to approve CertificateRequest resource")
By("Checking the certificate is signed by the KMS issuer")
Eventually(
func() bool {
cr := &cmapi.CertificateRequest{}
Expect(k8sClient.Get(context.Background(), crKey, cr)).Should(Succeed(), "failed to get CertificateRequest resource")
return len(cr.Status.Certificate) > 0
},
time.Second*1, time.Millisecond*100,
).Should(BeTrue(), "status.Certificate field should be set")
})
})
})
func TestRequestShouldBeProcessed(t *testing.T) {
fixedTime := time.Date(2021, time.January, 1, 0, 0, 0, 0, time.UTC)
fclock := fakeclock.NewFakeClock(fixedTime)
tests := map[string]struct {
conds []cmapi.CertificateRequestCondition
checkApproved bool
expConds []cmapi.CertificateRequestCondition
expShouldProcess bool
expEvent *string
expFailureTime *metav1.Time
}{
"if request has true ready condition, exit false": {
conds: []cmapi.CertificateRequestCondition{
{
Type: cmapi.CertificateRequestConditionReady,
Status: cmmeta.ConditionTrue,
Reason: cmapi.CertificateRequestReasonIssued,
},
},
checkApproved: false,
expConds: []cmapi.CertificateRequestCondition{
{
Type: cmapi.CertificateRequestConditionReady,
Status: cmmeta.ConditionTrue,
Reason: cmapi.CertificateRequestReasonIssued,
},
},
expShouldProcess: false,
expEvent: nil,
expFailureTime: nil,
},
"if request has ready condition reason failed, exit false": {
conds: []cmapi.CertificateRequestCondition{
{
Type: cmapi.CertificateRequestConditionReady,
Status: cmmeta.ConditionFalse,
Reason: cmapi.CertificateRequestReasonFailed,
},
},
checkApproved: false,
expConds: []cmapi.CertificateRequestCondition{
{
Type: cmapi.CertificateRequestConditionReady,
Status: cmmeta.ConditionFalse,
Reason: cmapi.CertificateRequestReasonFailed,
},
},
expShouldProcess: false,
expEvent: nil,
expFailureTime: nil,
},
"if request has ready condition reason denied, exit false": {
conds: []cmapi.CertificateRequestCondition{
{
Type: cmapi.CertificateRequestConditionReady,
Status: cmmeta.ConditionFalse,
Reason: cmapi.CertificateRequestReasonDenied,
},
},
checkApproved: false,
expConds: []cmapi.CertificateRequestCondition{
{
Type: cmapi.CertificateRequestConditionReady,
Status: cmmeta.ConditionFalse,
Reason: cmapi.CertificateRequestReasonDenied,
},
},
expShouldProcess: false,
expEvent: nil,
expFailureTime: nil,
},
"if request has been denied, exit false and update ready reason with ready denied": {
conds: []cmapi.CertificateRequestCondition{
{
Type: cmapi.CertificateRequestConditionDenied,
Status: cmmeta.ConditionTrue,
Reason: "Denied",
},
},
checkApproved: false,
expConds: []cmapi.CertificateRequestCondition{
{
Type: cmapi.CertificateRequestConditionDenied,
Status: cmmeta.ConditionTrue,
Reason: "Denied",
},
{
Type: cmapi.CertificateRequestConditionReady,
Status: cmmeta.ConditionFalse,
Reason: cmapi.CertificateRequestReasonDenied,
Message: "The CertificateRequest was denied by an approval controller",
LastTransitionTime: &metav1.Time{Time: fixedTime},
},
},
expShouldProcess: false,
expEvent: strP("Warning Denied The CertificateRequest was denied by an approval controller"),
expFailureTime: &metav1.Time{Time: fixedTime},
},
"if request has been denied and has a ready denied condition, exit false": {
conds: []cmapi.CertificateRequestCondition{
{
Type: cmapi.CertificateRequestConditionDenied,
Status: cmmeta.ConditionTrue,
Reason: "Denied",
},
{
Type: cmapi.CertificateRequestConditionReady,
Status: cmmeta.ConditionFalse,
Reason: cmapi.CertificateRequestReasonDenied,
Message: "The CertificateRequest was denied by an approval controller",
LastTransitionTime: &metav1.Time{Time: fixedTime},
},
},
checkApproved: false,
expConds: []cmapi.CertificateRequestCondition{
{
Type: cmapi.CertificateRequestConditionDenied,
Status: cmmeta.ConditionTrue,
Reason: "Denied",
},
{
Type: cmapi.CertificateRequestConditionReady,
Status: cmmeta.ConditionFalse,
Reason: cmapi.CertificateRequestReasonDenied,
Message: "The CertificateRequest was denied by an approval controller",
LastTransitionTime: &metav1.Time{Time: fixedTime},
},
},
expShouldProcess: false,
expEvent: nil,
expFailureTime: nil,
},
"if request has not been approved, but check approved condition is false, return true": {
conds: []cmapi.CertificateRequestCondition{},
checkApproved: false,
expConds: []cmapi.CertificateRequestCondition{},
expShouldProcess: true,
expEvent: nil,
expFailureTime: nil,
},
"if request has not been approved, and check approved condition is true, return false": {
conds: []cmapi.CertificateRequestCondition{},
checkApproved: true,
expConds: []cmapi.CertificateRequestCondition{},
expShouldProcess: false,
expEvent: nil,
expFailureTime: nil,
},
"if request has been approved and check approved condition is true, return true": {
conds: []cmapi.CertificateRequestCondition{
{
Type: cmapi.CertificateRequestConditionApproved,
Status: cmmeta.ConditionTrue,
Reason: "Approved",
},
},
checkApproved: true,
expConds: []cmapi.CertificateRequestCondition{
{
Type: cmapi.CertificateRequestConditionApproved,
Status: cmmeta.ConditionTrue,
Reason: "Approved",
},
},
expShouldProcess: true,
expEvent: nil,
expFailureTime: nil,
},
}
for name, test := range tests {
t.Run(name, func(t *testing.T) {
apiutil.Clock = fclock
scheme := runtime.NewScheme()
_ = clientgoscheme.AddToScheme(scheme)
_ = cmapi.AddToScheme(scheme)
request := &cmapi.CertificateRequest{
ObjectMeta: metav1.ObjectMeta{
Name: "test-cr",
Namespace: "test-ns",
},
Status: cmapi.CertificateRequestStatus{
Conditions: test.conds,
},
}
fclient := fakeclient.NewClientBuilder().
WithRuntimeObjects(request).
WithScheme(scheme).
Build()
fakeRecorder := record.NewFakeRecorder(1)
c := CertificateRequestReconciler{
Client: fclient,
Log: logf.Log,
Recorder: fakeRecorder,
Clock: fclock,
CheckApprovedCondition: test.checkApproved,
}
shouldProcess, err := c.requestShouldBeProcessed(context.TODO(), logf.Log, request)
if err != nil {
t.Errorf("unexpected error: %s", err)
}
if shouldProcess != test.expShouldProcess {
t.Errorf("unexpected shouldProcess, exp=%t got=%t",
test.expShouldProcess, shouldProcess)
}
updatedRequest := new(cmapi.CertificateRequest)
err = fclient.Get(context.TODO(), client.ObjectKeyFromObject(request), updatedRequest)
if err != nil {
t.Errorf("unexpected error: %s", err)
}
if !apiequality.Semantic.DeepEqual(request.Status.Conditions, test.expConds) {
t.Errorf("unexpected conditions, exp=%#+v got=%#+v",
test.expConds, request.Status.Conditions)
}
if !apiequality.Semantic.DeepEqual(request.Status.FailureTime, test.expFailureTime) {
t.Errorf("unexpected failureTime, exp=%#+v got=%#+v",
test.expFailureTime, request.Status.FailureTime)
}
select {
case event := <-fakeRecorder.Events:
if test.expEvent == nil {
t.Errorf("expected no event, got='%s'", event)
} else if *test.expEvent != event {
t.Errorf("unexpected event, exp='%s' got='%s'", *test.expEvent, event)
}
break
default:
if test.expEvent != nil {
t.Errorf("unexpected event, exp='%s' got=''", *test.expEvent)
}
}
})
}
}
func strP(s string) *string {
return &s
}
|
package oic
import (
"fmt"
"github.com/runtimeco/go-coap"
)
type Server struct {
rxer Receiver
rm ResMgr
isTcp bool
}
func NewServer(isTcp bool) Server {
s := Server{
rxer: NewReceiver(isTcp),
rm: NewResMgr(),
isTcp: isTcp,
}
return s
}
func (s *Server) AddResource(r Resource) error {
return s.rm.Add(r)
}
// @return Response to send back, if any.
func (s *Server) Rx(data []byte) (coap.Message, error) {
m := s.rxer.Rx(data)
if m == nil {
return nil, nil
}
var typ coap.COAPType
switch m.Type() {
case coap.Confirmable:
typ = coap.Acknowledgement
case coap.NonConfirmable:
typ = coap.NonConfirmable
default:
return nil, fmt.Errorf("Don't know how to handle CoAP message with "+
"type=%d (%s)", m.Type(), m.Type().String())
}
code, payload := s.rm.Access(m)
p := coap.MessageParams{
Type: typ,
Code: code,
MessageID: NextMessageId(),
Token: m.Token(),
Payload: payload,
}
if !s.isTcp {
return coap.NewDgramMessage(p), nil
} else {
return coap.NewTcpMessage(p), nil
}
}
|
package main
import (
"html/template"
"log"
"net/http"
"path"
"strconv"
"sync"
"github.com/pkg/errors"
)
const (
editPost = "editPost.html"
showPost = "showPost.html"
listPosts = "listPosts.html"
)
var templateFiles = []string{
editPost,
showPost,
listPosts,
}
// BlogServer struct
type BlogServer struct {
mu sync.Mutex
Title string
Posts map[int]*BlogPost
Templates map[string]*template.Template
}
// BlogPost struct
type BlogPost struct {
ID int
Title string
Date string
Link string
Content string
}
func main() {
router := http.NewServeMux()
server := BlogServer{}
server.init()
router.HandleFunc("/", server.handleRoot)
router.HandleFunc("/post/", server.handlePost)
router.HandleFunc("/edit/", server.handleEdit)
port := "8080"
log.Printf("start server on port: %v", port)
log.Fatal(http.ListenAndServe(":"+port, router))
}
func (s *BlogServer) init() {
s.Title = "Blog"
s.Posts = s.loadPosts()
s.Templates = s.loadTemplates()
}
func (s *BlogServer) lock() {
s.mu.Lock()
}
func (s *BlogServer) unLock() {
s.mu.Unlock()
}
func (s *BlogServer) loadPosts() map[int]*BlogPost {
r := make(map[int]*BlogPost)
r[0] = &BlogPost{
ID: 0,
Title: "Title1",
Date: "21 Feb 2020",
Link: "https://google/link1",
Content: "Test content1",
}
r[1] = &BlogPost{
ID: 0,
Title: "Title2",
Date: "22 Feb 2020",
Link: "https://google/link2",
Content: "Test content2",
}
return r
}
func (s *BlogServer) getPostByID(postID string) (*BlogPost, error) {
s.lock()
defer s.unLock()
id, err := strconv.Atoi(postID)
if err != nil {
return nil, err
}
if blogPost, ok := s.Posts[id]; ok {
return blogPost, nil
}
return nil, errors.Errorf("No post found for: %v", id)
}
func (s *BlogServer) getTemplate(name string) *template.Template {
s.lock()
defer s.unLock()
if template, ok := s.Templates[name]; ok {
return template
}
return nil
}
func (s *BlogServer) loadTemplates() map[string]*template.Template {
r := make(map[string]*template.Template, len(templateFiles))
for _, name := range templateFiles {
t := template.Must(template.New("MyTemplate").ParseFiles(path.Join("templates", string(name))))
r[name] = t
}
return r
}
func (s *BlogServer) createNewPost(wr http.ResponseWriter, req *http.Request) {
postID := req.FormValue("id")
if len(postID) > 0 {
id, err := strconv.Atoi(postID)
if err != nil {
err := errors.Wrapf(err, "Can not parse id value: %v", postID)
http.Error(wr, err.Error(), http.StatusInternalServerError)
log.Print(err)
return
}
p := &BlogPost{}
p.ID = id
p.Title = req.FormValue("title")
p.Date = req.FormValue("date")
p.Link = req.FormValue("link")
p.Content = req.FormValue("content")
s.addPost(p)
}
}
func (s *BlogServer) addPost(post *BlogPost) {
s.lock()
defer s.unLock()
s.Posts[post.ID] = post
}
func (s *BlogServer) handleRoot(wr http.ResponseWriter, req *http.Request) {
t := s.getTemplate(listPosts)
if t == nil {
err := errors.Errorf("No template found: %v", listPosts)
http.Error(wr, err.Error(), http.StatusInternalServerError)
log.Print(err)
return
}
s.createNewPost(wr, req)
err := t.ExecuteTemplate(wr, "page", s)
if err != nil {
err = errors.Wrap(err, "Can not execute template")
http.Error(wr, err.Error(), http.StatusInternalServerError)
log.Print(err)
return
}
}
func (s *BlogServer) handlePost(wr http.ResponseWriter, req *http.Request) {
t := s.getTemplate(showPost)
if t == nil {
err := errors.Errorf("No template found: %v", showPost)
http.Error(wr, err.Error(), http.StatusInternalServerError)
log.Print(err)
return
}
postID := req.URL.Query().Get("id")
post, err := s.getPostByID(postID)
if err != nil {
err := errors.Wrap(err, "No post found")
http.Error(wr, err.Error(), http.StatusInternalServerError)
log.Print(err)
return
}
err = t.ExecuteTemplate(wr, "page", post)
if err != nil {
err = errors.Wrap(err, "Can not execute template")
http.Error(wr, err.Error(), http.StatusInternalServerError)
log.Print(err)
return
}
}
func (s *BlogServer) handleEdit(wr http.ResponseWriter, req *http.Request) {
t := s.getTemplate(editPost)
if t == nil {
err := errors.Errorf("No template found: %v", editPost)
http.Error(wr, err.Error(), http.StatusInternalServerError)
log.Print(err)
return
}
postID := req.URL.Query().Get("id")
post, err := s.getPostByID(postID)
if err != nil {
err := errors.Wrap(err, "No post found")
http.Error(wr, err.Error(), http.StatusInternalServerError)
log.Print(err)
return
}
err = t.ExecuteTemplate(wr, "page", post)
if err != nil {
err = errors.Wrap(err, "Can not execute template")
http.Error(wr, err.Error(), http.StatusInternalServerError)
log.Print(err)
return
}
}
|
package main
import (
"container/list"
"fmt"
)
func main() {
var x list.List
x.PushBack(10)
x.PushBack(20)
x.PushBack(30)
for v := x.Front() ; v!=nil ; v = v.Next(){
fmt.Println(v.Value)
}
}
|
package Problem0169
func majorityElement(nums []int) int {
// 根据题意 len[nums] > 0 且 出现次数大于 n/2 的元素存在。
x, t := nums[0], 1
for i := 1; i < len(nums); i++ {
switch {
case x == nums[i]:
t++
case t > 0:
t--
default:
// 此时 x != nums[i] 且 t == 0
// 可知 i 必定为 偶数
// 假设 nums 中出现最多的元素是 z,其出现次数为 zn > n/2
// 在 nums[:i] 中,z 出现次数 <=i/2
// 那么,在 nums[i:] 中,z 出现的次数 >= zn - i/2 > n/2 - i/2 = (n-i)/2
// 即 z 在 nums[i:] 中出现的次数,依然超过了 len(nums[i:])/2
x = nums[i]
t = 1
}
}
return x
}
|
package v1
import (
"blog/app/web/services"
"github.com/kataras/iris/v12"
"github.com/mlogclub/simple"
)
type SystemController struct {
Ctx iris.Context
SystemService services.SystemService
}
func NewSystemController() *SystemController {
return &SystemController{}
}
/**
* 获取系统配置
* @return json map[string]interface{}
*/
func (this *SystemController) GetConfig() *simple.JsonResult {
config := this.SystemService.GetSystemConfigs()
return simple.JsonData(config)
}
|
package aoc2015
import (
"strconv"
"strings"
"time"
"github.com/golang/glog"
)
// lookAndSay looks at the string and returns a string
// containing how many times a rune has repeated itself following said rune.
// For instance:
// lookAndSay("1") returns "11" (one '1')
// lookAndSay("111221") returns "312211" (three '1', two '2', one '1')
//
// If input is empty it will return an empty string.
// If input contains non-integers it may render unexpected behavior,
// but will nonethtless not panic.
func lookAndSay(input string) string {
if input == "" {
return ""
}
count := 0
last := '\x00'
hasStarted := false
var total strings.Builder
for _, current := range input {
if !hasStarted {
count = 1
last = current
hasStarted = true
continue
}
if current != last {
total.WriteString(strconv.Itoa(count))
total.WriteRune(last)
count = 1
last = current
continue
}
count++
}
total.WriteString(strconv.Itoa(count))
total.WriteRune(last)
return total.String()
}
// Day10 solves the tenth day puzzle "Elves Look, Elves Say".
//
// Input
//
// A single line containing the "seed". For example:
//
// 3113322113
//
// It is guaranteed that the input represents an integer.
func Day10(input string) (answer1, answer2 string, err error) {
// iterate forty times
for ii := 0; ii < 40; ii++ {
then := time.Now()
input = lookAndSay(input)
now := time.Now()
glog.Infof("%v took %v nanoseconds", ii+1, now.Sub(then).Nanoseconds())
}
answer1 = strconv.Itoa(len(input))
// now continue this 10 more times
for ii := 0; ii < 10; ii++ {
then := time.Now()
input = lookAndSay(input)
now := time.Now()
glog.Infof("%v took %v nanoseconds", ii+41, now.Sub(then).Nanoseconds())
}
answer2 = strconv.Itoa(len(input))
return
}
|
package accounts
import (
"net/http"
"os"
"testing"
"time"
uuid "github.com/satori/go.uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
const errorValidationJSON = `
{
"error_message": "validation failure"
}
`
type MockHandler func(req *http.Request) (*http.Response, error)
type MockClient struct {
DoFunc MockHandler
}
func (m *MockClient) Do(req *http.Request) (*http.Response, error) {
if m.DoFunc != nil {
return m.DoFunc(req)
}
return &http.Response{}, nil
}
// getEnv tries to get environment varialbe,
// returns fallback in case of failure
func getEnv(key, fallback string) string {
if value, ok := os.LookupEnv(key); ok {
return value
}
return fallback
}
func baseURL() string {
return getEnv("API_ADDR", "http://localhost:8080")
}
func getMockClient(h func() MockHandler) *Client {
c, _ := New(baseURL())
c.httpClient = &MockClient{DoFunc: h()}
return c
}
func buildAccount(attr *AccountAttributes) *Account {
acc := NewAccount(uuid.NewV4().String(), "GB", []string{"Samantha Holder"}, attr)
return acc
}
func TestWithBasePath(t *testing.T) {
client, err := New(
baseURL(),
WithBasePath("v1"),
)
require.Nil(t, err)
assert.Equal(t, "v1", client.basePath)
}
func TestWithRateLimiter(t *testing.T) {
client, err := New(
baseURL(),
WithRateLimiter(time.Duration(5), 10),
)
require.Nil(t, err)
require.NotNil(t, client.rateLimiter)
}
func TestWithTimeout(t *testing.T) {
client, err := New(
baseURL(),
WithTimeout(time.Duration(5)*time.Second),
)
require.Nil(t, err)
assert.Equal(t, time.Duration(5)*time.Second, client.reqTimeout)
}
|
package main
import (
"bytes"
"encoding/json"
"flag"
"fmt"
"net/url"
contextio "github.com/dmlyons/contextIOTest/goContextIO"
)
func main() {
key := flag.String("key", "", "Your CIO User Key")
secret := flag.String("secret", "", "Your CIO User Secret")
flag.Parse()
c := contextio.NewContextIO(*key, *secret)
params := url.Values{}
params.Set("include_body", "1")
eid := "<comments/267458040/created@basecamp.com>"
q := `/2.0/accounts/551420ac615a99de12fee488/messages/` + url.QueryEscape(eid)
j, err := c.DoJson("GET", q, params, nil)
if err != nil {
fmt.Println("ERROR:", err)
}
var out bytes.Buffer
json.Indent(&out, j, "", " ")
fmt.Println(out.String())
}
|
package servers
import (
"encoding/csv"
"os"
)
// Server ...
type Server struct {
Host string
Name string
}
// GetServers ...
func GetServers() []Server {
file, err := os.Open("resources/servers.csv")
if err != nil {
panic(err)
}
defer file.Close()
reader := csv.NewReader(file)
reader.Comma = ';'
var servers []Server
for {
record, e := reader.Read()
if e != nil {
break
}
servers = append(servers, Server{Name: record[0], Host: record[1]})
}
return servers
}
|
package migrate
import (
"reflect"
"strings"
"github.com/neuronlabs/errors"
"github.com/neuronlabs/neuron-core/class"
"github.com/neuronlabs/neuron-core/mapping"
"github.com/neuronlabs/strcase"
"github.com/neuronlabs/neuron-postgres/log"
)
// Column is a postgres field kind.
type Column struct {
// Name defines the column name
Name string
// Type is the column data type
Type DataTyper
// Variables defines the data type specific variables
Variables []string
// Constraints defines column constrains
Constraints []*Constraint
// Indexes defines the column indexes
Indexes []*Index
// Table is the pointer to the column's table
Table *Table
field *mapping.StructField
}
// Field is the column's related *mapping.StructField.
func (c *Column) Field() *mapping.StructField {
return c.field
}
func (c *Column) isNotNull() bool {
for _, cstr := range c.Constraints {
if cstr == CNotNull {
return true
}
}
return false
}
func (c *Column) setName(field *mapping.StructField) {
c.Name = strcase.ToSnake(field.Name())
}
func (c *Column) setConstraints() {
// set the field's primary index
if c.field.Kind() == mapping.KindPrimary {
c.Constraints = append(c.Constraints, CPrimaryKey)
}
// check if nullable
rf := c.field.ReflectField()
if !c.isNotNull() {
if rf.Type.Kind() != reflect.Ptr && !strings.Contains(strings.ToLower(rf.Type.Name()), "null") {
// otherwise mark the column as it is not null
c.Constraints = append(c.Constraints, CNotNull)
c.Field().StoreSet(NotNullKey, struct{}{})
}
}
}
// ColumnCreator is the function that creates custom.
type ColumnCreator func(c *Column) string
// FieldColumnName gets the column name for the provided field.
func FieldColumnName(field *mapping.StructField) (string, error) {
c, err := fieldsColumn(field)
if err != nil {
return "", err
}
return c.Name, nil
}
// FieldsColumn gets the column name for the provided field.
func FieldsColumn(field *mapping.StructField) (*Column, error) {
c, err := fieldsColumn(field)
if err != nil {
return nil, err
}
return c, nil
}
// FieldIndexes gets the column's indexes.
func FieldIndexes(field *mapping.StructField) ([]*Index, error) {
c, err := fieldsColumn(field)
if err != nil {
return nil, err
}
return c.Indexes, nil
}
func fieldsColumn(field *mapping.StructField) (*Column, error) {
col, ok := field.StoreGet(ColumnKey)
if !ok {
log.Debugf("No column found in the field: %s store.", field.NeuronName())
return nil, errors.NewDetf(class.InternalRepository, "no column found in the field's '%s' store", field.Name())
}
// parse the column
c, ok := col.(*Column)
if !ok {
log.Errorf("Column in the field's store is not a '*migrate.Column' : '%T'", col)
return nil, errors.NewDetf(class.InternalRepository, "stored column for field: '%s' is not a *migrate.Column", field.Name())
}
return c, nil
}
|
package wx_service
import (
"errors"
"fmt"
"github.com/bitly/go-simplejson"
"github.com/freelifer/gohelper/models"
"github.com/freelifer/gohelper/pkg/cache"
"github.com/freelifer/gohelper/pkg/e"
"github.com/freelifer/gohelper/pkg/settings"
"github.com/freelifer/gohelper/pkg/utils"
"github.com/gin-gonic/gin/json"
"io/ioutil"
"net/http"
"time"
)
const (
wxUrl = "https://api.weixin.qq.com/sns/jscode2session?grant_type=authorization_code"
)
var (
WX_LOGIN_UNKNOW error = errors.New("wx errmsg unknow")
)
type WxService struct {
Code string
SessionId string
}
type WeiXinData struct {
SessionKey string
Openid string
}
func (s *WxService) Login() e.Err {
data, err := GetWxOpenId(settings.WxCfg.Appid, settings.WxCfg.Secret, s.Code)
if err != nil {
return e.NewInnerErr(err.Error())
}
wxUser, e := models.CreateWxUserWhenNoExist(data.Openid)
if e != nil {
return e
}
s.SessionId = utils.NewSessionID()
b, _ := json.Marshal(wxUser)
cache.Put(s.SessionId, string(b), 60*time.Second)
// save [key, value] to radis
return nil
}
func Certificate(sessionKey string) error {
return nil
}
// From WeiXin Service, Get User's openid and sessionKey
func GetWxOpenId(appid, secret, code string) (*WeiXinData, error) {
url := fmt.Sprintf("%s&appid=%s&secret=%s&js_code=%s", wxUrl, appid, secret, code)
resp, err := http.Get(url)
if err != nil {
return nil, err
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, err
}
js, err := simplejson.NewJson(body)
if err != nil {
return nil, err
}
openid := js.Get("openid").MustString()
if len(openid) == 0 {
errmsg := js.Get("errmsg").MustString()
if len(errmsg) == 0 {
return nil, WX_LOGIN_UNKNOW
} else {
return nil, errors.New(errmsg)
}
}
var data = &WeiXinData{}
data.SessionKey = js.Get("session_key").MustString()
data.Openid = js.Get("unionid").MustString()
return data, nil
}
|
// Copyright 2018 Twitch Interactive, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License"). You may not
// use this file except in compliance with the License. A copy of the License is
// located at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// or in the "license" file accompanying this file. This file is distributed on
// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
// express or implied. See the License for the specific language governing
// permissions and limitations under the License.
package haberdasher
// Go generate directives are a convenient way to describe compilation of proto
// files. They let users run 'go generate ./...', no Makefile necessary.
//
// This particular particular one invokes protoc using '$GOPATH/src'.
//
// This is used to tell protoc where to look up .proto files, through
// --proto_path.
//
// It is also used to tell protoc where to put output generated files, through
// --twirp_out and --go_out.
//go:generate protoc --proto_path=$GOPATH/src --twirp_out=$GOPATH/src --go_out=$GOPATH/src github.com/basvanbeek/twirp-example/rpc/haberdasher/haberdasher.proto
|
package main
import (
"bufio"
"fmt"
"os"
"strconv"
"strings"
)
func main() {
// 標準入力
scanner := bufio.NewScanner(os.Stdin)
abc := []string{"A", "B", "C", "D", "E", "F", "G", "H", "I", "J"}
for scanner.Scan() {
strArray := strings.Fields(scanner.Text())
num := 0
i := 0
for _, str := range strArray {
input, _ := strconv.Atoi(str)
num = num + input
for i < num {
fmt.Print(abc[i])
i++
}
fmt.Println()
}
}
}
|
package UserDelivery
import (
"MainApplication/internal/User/UserModel"
"MainApplication/internal/User/UserUseCase"
"MainApplication/internal/errors"
"MainApplication/internal/pkg/context"
"MainApplication/proto/FileServise"
"bytes"
"fmt"
log "github.com/sirupsen/logrus"
"io"
"net/http"
"strconv"
"time"
)
type Interface interface {
Session(w http.ResponseWriter, r *http.Request)
Signup(w http.ResponseWriter, r *http.Request)
SignIn(w http.ResponseWriter, r *http.Request)
GetUserByRequest(r *http.Request) (*UserModel.User, *http.Cookie, uint16)
Profile(w http.ResponseWriter, r *http.Request)
Logout(w http.ResponseWriter, r *http.Request)
LoadFile(user *UserModel.User, r *http.Request)
GetAvatar(w http.ResponseWriter, r *http.Request)
}
type delivery struct {
Uc UserUseCase.UserUseCase
FileManager FileServise.FileServiceClient
}
func New(usecase UserUseCase.UserUseCase, fileManager FileServise.FileServiceClient) Interface {
return delivery{Uc: usecase, FileManager: fileManager}
}
func (de delivery) Session(w http.ResponseWriter, r *http.Request) {
if r.Method == http.MethodPost {
de.SignIn(w, r)
}
if r.Method == http.MethodDelete {
de.Logout(w, r)
}
}
func (de delivery) Signup(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPost {
return
}
var user UserModel.User
user.Name = context.GetStrFormValueSafety(r, "name")
user.Surname = context.GetStrFormValueSafety(r, "surname")
user.Email = context.GetStrFormValueSafety(r, "email")
user.Password = context.GetStrFormValueSafety(r, "password1")
de.LoadFile(&user, r)
err, sid := de.Uc.Signup(user)
var response []byte
if err == nil {
cookie := &http.Cookie{
Name: "session_id",
Value: sid,
Expires: time.Now().Add(15 * 10000 * time.Hour),
}
cookie.Path = "/"
http.SetCookie(w, cookie)
response = SignUpError(err, cookie)
} else {
response = SignUpError(err, nil)
}
w.Write(response)
}
func (de delivery) SignIn(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPost {
w.Write(errors.GetErrorNotPostAns())
return
}
var user UserModel.User
user.Email = context.GetStrFormValueSafety(r, "email")
user.Password = context.GetStrFormValueSafety(r, "password")
err, sid := de.Uc.SignIn(user)
var response []byte
if err == nil {
cookie := &http.Cookie{
Name: "session_id",
Value: sid,
Expires: time.Now().Add(15 * 10000 * time.Hour),
}
cookie.Path = "/"
http.SetCookie(w, cookie)
response = SignInError(err, cookie)
} else {
response = SignInError(err, nil)
}
w.Write(response)
}
func (de delivery) GetUserByRequest(r *http.Request) (*UserModel.User, *http.Cookie, uint16) {
session, err := r.Cookie("session_id")
if err == http.ErrNoCookie {
return nil, nil, 401
}
uid, ok := de.Uc.GetDB().IsOkSession(session.Value)
if ok != nil {
return nil, nil, 402
}
user, err := de.Uc.GetDB().GetUserByUID(uid)
if err != nil {
return nil, nil, 402
}
return user, session, 200
}
func (de delivery) Profile(w http.ResponseWriter, r *http.Request) {
if r.Method == http.MethodPost {
de.Signup(w, r)
return
}
user, session, err := de.GetUserByRequest(r)
if err != 200 {
w.Write(CookieError(err))
return
}
if r.Method == http.MethodGet {
w.Write(errors.GetOkAnsData(session.Value, *user))
return
} else if r.Method == http.MethodPut {
var up UserModel.User
up.Email = user.Email
up.Name = context.GetStrFormValueSafety(r, "profile_firstName")
up.Surname = context.GetStrFormValueSafety(r, "profile_lastName")
de.LoadFile(&up, r)
err := de.Uc.Profile(up)
w.Write(ProfileError(err, session))
return
}
w.Write(errors.GetErrorUnexpectedAns())
}
func (de delivery) Logout(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodDelete {
w.Write(errors.GetErrorNotPostAns())
return
} else {
_, session, err := de.GetUserByRequest(r)
if err != 200 {
w.Write(CookieError(err))
return
}
e := de.Uc.Logout(session.Value)
if e == nil {
session.Expires = time.Now().AddDate(0, 0, -1)
http.SetCookie(w, session)
}
w.Write(LogoutError(e))
return
}
w.Write(errors.GetErrorUnexpectedAns())
}
func (de delivery) LoadFile(user *UserModel.User, r *http.Request) {
file, fileHeader, err := r.FormFile("avatar")
if file == nil {
return
}
if err != nil {
return
}
buf := bytes.NewBuffer(nil)
if _, err := io.Copy(buf, file); err != nil {
log.Println("EEERR", err)
}
avatar := FileServise.Avatar{
Email: (*user).Email,
FileName: fileHeader.Filename,
Content: buf.Bytes(),
}
de.FileManager.SetAvatar(r.Context(), &avatar)
}
func (de delivery) GetAvatar(w http.ResponseWriter, r *http.Request) {
if r.Method == http.MethodOptions {
w.Write([]byte(""))
return
}
if r.Method == http.MethodGet {
user, _, Err := de.GetUserByRequest(r)
if Err != 200 {
CookieError(Err)
return
}
avatar, err := de.FileManager.GetAvatar(r.Context(), &FileServise.User{Email: user.Email})
if err != nil {
fmt.Println("GET AVATAR ERROR ", err)
}
w.Header().Set("Content-Type", "image/jpeg")
w.Header().Set("Content-Length", strconv.Itoa(len(avatar.Content)))
if _, err := w.Write(avatar.Content); err != nil {
w.Write(errors.GetErrorUnexpectedAns())
return
}
return
}
}
|
package nethelper
import (
"net"
"net/http"
logger "github.com/panlibin/vglog"
)
type handlerWrapper struct {
f func(w http.ResponseWriter, pReq *http.Request)
}
func (h *handlerWrapper) ServeHTTP(w http.ResponseWriter, pReq *http.Request) {
w.Header().Set("Access-Control-Allow-Origin", "*")
pReq.ParseForm()
h.f(w, pReq)
}
// HTTPServer http服务器
type HTTPServer struct {
server *http.Server
router *http.ServeMux
}
// NewHTTPServer 新建http服务器
func NewHTTPServer() *HTTPServer {
pObj := new(HTTPServer)
pObj.server = new(http.Server)
pObj.router = http.NewServeMux()
return pObj
}
// Start 启动
func (s *HTTPServer) Start(addr string, certFile string, keyFile string) error {
logger.Infof("start http server")
s.server.Addr = addr
s.server.Handler = s.router
ln, err := net.Listen("tcp", addr)
if err != nil {
logger.Errorf("start http server error: %v", err)
return err
}
go func() {
if certFile == "" || keyFile == "" {
s.server.Serve(ln)
} else {
s.server.ServeTLS(ln, certFile, keyFile)
}
}()
logger.Infof("http server listen on %s", addr)
return err
}
// Stop 停止
func (s *HTTPServer) Stop() {
s.server.Close()
}
// Handle 注册
func (s *HTTPServer) Handle(pattern string, f func(w http.ResponseWriter, pReq *http.Request)) {
s.router.Handle(pattern, &handlerWrapper{f})
}
|
package linkedlist
type ListNode struct {
Val int
Next *ListNode
}
// 21.将两个有序链表合并为一个新的有序链表并返回。新链表是通过拼接给定的两个链表的所有节点组成的
func mergeTwoLists(l1 *ListNode, l2 *ListNode) *ListNode {
l := &ListNode{}
ret := l
for l1 != nil && l2 != nil {
if l1.Val < l2.Val {
l.Next = l1
l1 = l1.Next
} else {
l.Next = l2
l2 = l2.Next
}
l = l.Next
}
if l1 == nil {
l.Next = l2
}
if l2 == nil {
l.Next = l1
}
return ret.Next
}
// 19.给定一个链表,删除链表的倒数第 n 个节点,并且返回链表的头结点。
func removeNthFromEnd(head *ListNode, n int) *ListNode {
if head == nil {
return nil
}
slow, fast := head, head
// 1. 快指针先走 n 步
for i := 0; i < n; i++ {
// 删除头节点 head,如链表共有 n 个元素,删除倒数第 n 个
if fast.Next == nil && i == n-1 {
head, head.Next = head.Next, nil
return head
}
// 如果保证 n 是有效的,可以不做这一步判断
// 要删除的节点不存在,如链表共 2 个元素,但 n==3
if fast.Next == nil && i < n-1 {
return nil
}
fast = fast.Next
}
// 2. 快慢指针同时走,直到快指针到达尾节点
for fast.Next != nil {
slow = slow.Next
fast = fast.Next
}
// 这里借助 fast 暂时存储要删除的节点,防止删除后链表断裂
fast = slow.Next
slow.Next = fast.Next
fast.Next = nil
return head
}
// 206.反转一个单链表
func reverseList(head *ListNode) *ListNode {
var pre *ListNode
for head != nil {
temp := head.Next
head.Next = pre
pre = head
head = temp
}
return pre
}
// 206.递归反转一个单链表
func reverseListWithRecursion1(head *ListNode) *ListNode {
return reverse(head, nil)
}
func reverse(head, pre *ListNode) *ListNode {
if head == nil {
return pre
}
temp := head.Next
head.Next = pre
return reverse(temp, head)
}
// 206.递归反转一个单链表
// 1->2->3->4->...-> k -> k+1<-k+2<-...<-n
// 假设现在处于 head == k,而 k 之后的节点都已经反转了
func reverseListWithRecursion2(head *ListNode) *ListNode {
if head == nil || head.Next == nil {
return head
}
// 将 head 之后的节点进行反转,并返回反转后链表的头节点
p := reverseList(head.Next)
// (k+1).Next = k
head.Next.Next = head
// 防止相邻节点循环指向,这里先将 k.Next 置为 nil
head.Next = nil
return p
}
// 876.给定一个带有 head 的非空单链表,返回链表的中间结点。如果有两个中间结点,则返回第二个中间结点。
func middleNode(head *ListNode) *ListNode {
slow, fast := head, head
for fast != nil && fast.Next != nil {
slow = slow.Next
fast = fast.Next.Next
}
return slow
}
|
// Copyright 2018 Diego Bernardes. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package http
import (
"encoding/json"
"fmt"
"net/url"
"strings"
"time"
"github.com/pkg/errors"
"github.com/satori/go.uuid"
"github.com/diegobernardes/flare"
"github.com/diegobernardes/flare/infra/wildcard"
)
type pagination flare.Pagination
func (p *pagination) MarshalJSON() ([]byte, error) {
return json.Marshal(&struct {
Limit int `json:"limit"`
Offset int `json:"offset"`
Total int `json:"total"`
}{
Limit: p.Limit,
Total: p.Total,
Offset: p.Offset,
})
}
type resource flare.Resource
func (r *resource) MarshalJSON() ([]byte, error) {
change := map[string]string{
"field": r.Change.Field,
}
if r.Change.Format != "" {
change["format"] = r.Change.Format
}
endpoint, err := url.QueryUnescape(r.Endpoint.String())
if err != nil {
return nil, errors.Wrap(err, "error during endpoint unescape")
}
return json.Marshal(&struct {
Id string `json:"id"`
Endpoint string `json:"endpoint"`
Change map[string]string `json:"change"`
CreatedAt string `json:"createdAt"`
}{
Id: r.ID,
Endpoint: endpoint,
Change: change,
CreatedAt: r.CreatedAt.Format(time.RFC3339),
})
}
type resourceCreate struct {
endpoint url.URL
RawEndpoint string `json:"endpoint"`
Change struct {
Field string `json:"field"`
Format string `json:"format"`
} `json:"change"`
}
func (r *resourceCreate) valid() error {
if err := r.validEndpoint(); err != nil {
return errors.Wrap(err, "invalid endpoint")
}
if err := r.validEndpointWildcard(); err != nil {
return errors.Wrap(err, "invalid endpoint")
}
if r.Change.Field == "" {
return errors.New("missing change field")
}
return nil
}
func (r *resourceCreate) validEndpoint() error {
if r.endpoint.Opaque != "" {
return fmt.Errorf("should not have opaque content '%s'", r.endpoint.Opaque)
}
if r.endpoint.User != nil {
return errors.New("should not have user")
}
if r.endpoint.Host == "" {
return errors.New("missing host")
}
if r.endpoint.Path == "" {
return errors.New("missing path")
}
if r.endpoint.RawQuery != "" {
return fmt.Errorf("should not have query string '%s'", r.endpoint.RawQuery)
}
if r.endpoint.Fragment != "" {
return fmt.Errorf("should not have fragment '%s'", r.endpoint.Fragment)
}
switch r.endpoint.Scheme {
case "http", "https":
case "":
return errors.New("missing scheme")
default:
return errors.New("unknown scheme")
}
return nil
}
func (r *resourceCreate) validEndpointWildcard() error {
if !wildcard.Present(r.endpoint.Path) {
return errors.New("missing wildcard")
}
if err := wildcard.ValidURL(r.endpoint.Path); err != nil {
return errors.Wrap(err, "can't have duplicated wildcards")
}
return nil
}
func (r *resourceCreate) toFlareResource() *flare.Resource {
return &flare.Resource{
ID: uuid.NewV4().String(),
Endpoint: r.endpoint,
Change: flare.ResourceChange{
Field: r.Change.Field,
Format: r.Change.Format,
},
}
}
func (r *resourceCreate) normalize() {
if r.RawEndpoint == "" {
return
}
r.RawEndpoint = strings.TrimSpace(r.RawEndpoint)
r.RawEndpoint = wildcard.Normalize(r.RawEndpoint)
if r.RawEndpoint[len(r.RawEndpoint)-1] == '/' {
r.RawEndpoint = r.RawEndpoint[:len(r.RawEndpoint)-1]
}
}
func (r *resourceCreate) unescape() error {
endpoint, err := url.QueryUnescape(r.RawEndpoint)
if err != nil {
return errors.Wrap(err, "error during path unescape")
}
r.RawEndpoint = endpoint
return nil
}
func (r *resourceCreate) init() error {
endpoint, err := url.Parse(r.RawEndpoint)
if err != nil {
return errors.Wrap(err, "error during endpoint parse")
}
r.endpoint = *endpoint
return nil
}
type response struct {
Pagination *pagination
Resources []resource
Resource *resource
}
func (r *response) MarshalJSON() ([]byte, error) {
var result interface{}
if r.Resource != nil {
result = r.Resource
} else {
result = map[string]interface{}{
"pagination": r.Pagination,
"resources": r.Resources,
}
}
return json.Marshal(result)
}
func transformResources(r []flare.Resource) []resource {
result := make([]resource, len(r))
for i := 0; i < len(r); i++ {
result[i] = (resource)(r[i])
}
return result
}
func transformResource(r *flare.Resource) *resource { return (*resource)(r) }
func transformPagination(p *flare.Pagination) *pagination { return (*pagination)(p) }
|
/*
# -*- coding: utf-8 -*-
# @Author : joker
# @Time : 2021/7/13 9:11 上午
# @File : jz_31_整数中1出现的次数.go
# @Description :
# @Attention :
*/
package offer
func NumberOf1Between1AndN_Solution(n int) int {
r := 0
for i := 0; i <= n; i++ {
if i&1 >= 1 {
r++
}
}
return r
}
|
package sort
import (
)
func ShellSort(arr []string) []string {
n := len(arr)
h := chooseH(n)
for h >= 1 {
// use insertion sort
for i := h; i < n; i++ {
for j := i; j >= h && arr[j] < arr[j-h]; j -= h {
tmp := arr[j]
arr[j] = arr[j-h]
arr[j-h] = tmp
}
}
h = h / 3
}
return arr
}
func chooseH(n int) int {
h := 1
for h < n / 3 {
h = 3*h + 1
}
return h
}
|
package main
import (
"fmt"
)
func main() {
fmt.Printf("Hello strings \n")
isUnique := IsUnique("abcdefghi")
fmt.Printf("IsUnique %v \n", isUnique)
isPermutation := IsPermutation("dog", "god")
fmt.Printf("IsPermutation (true) %v \n", isPermutation)
}
|
package main
import (
"path/filepath"
"sync"
bitbucket "github.com/PremiereGlobal/mkdocs-generator/bitbucket"
)
// masterFileList holds all of the files that have been processed so we don't duplicate
// Map keys should be in the formats:
// projects/<project>/repos/<repo>/raw/<filepath>
// or
// projects/<project>/repos/<repo>/browse/<filepath>
var masterFileList sync.Map
func generate() {
// Ensure the build directory is good to go
ensureBuildDir()
bbConfig := bitbucket.BitbucketClientConfig{
Url: Args.GetString("bitbucket-url"),
Username: Args.GetString("bitbucket-user"),
Password: Args.GetString("bitbucket-password"),
Workspace: Args.GetString("bitbucket-workspace"),
Logger: log,
}
bb, err := bitbucket.NewBitbucketClient(&bbConfig)
if err != nil {
log.Fatal("Unable to create Bitbucket client ", err)
}
workerCount := Args.GetInt("workers")
if workerCount <= 0 {
workerCount = 1
}
taskChan, _, wg := NewTaskQueue(workerCount)
// Get the list of projects
projects, err := bb.ListProjects()
if err != nil {
log.Fatal("Unable to list projects: ", err)
}
// Loop through the projects and add a project task to the queue
for _, p := range projects {
taskProject := p
task := projectTask{project: taskProject}
taskChan <- task
}
// Now wait for all the tasks to finish
wg.Wait()
// If user provided mkdocs directory
docsDir := Args.GetString("docs-dir")
if docsDir != "" {
makeNav(docsDir)
}
}
// ensureBuildDir ensures that the build directory exists, is a directory and
// is empty, creating it if need be
func ensureBuildDir() {
buildDir := filepath.Join(Args.GetString("build-dir"), "docs")
if ok, _ := PathExists(buildDir); ok {
if ok, _ := IsDirectory(buildDir); !ok {
log.Fatal("Build directory path exists and is not a directory")
}
if empty, _ := IsDirEmpty(buildDir); !empty {
log.Fatal("Build directory exists and is not empty")
}
} else {
log.Debug("Creating build directory ", buildDir)
err := CreateDirIfNotExist(buildDir)
if err != nil {
log.Fatal("Unable to create build directory ", err)
}
}
}
|
package main
import "fmt"
func main() {
//一.for循环嵌套
//外层执行一次 内层执行一周
//count:=0
//for i:= 0;i<5;i++{
// for j:=0;j<5 ;j++ {
// fmt.Println(i,j)
// count++
//
// }
//}
//fmt.Println(count)
//二.练习
//百钱百鸡
/*
中国古代数学家张丘建在他的《算经》中提出了一个著名的“百钱百鸡问题”:
一只公鸡值五钱,一只母鸡值三钱,三只小鸡值一钱,
现在要用百钱买百鸡,请问公鸡、母鸡、小鸡各多少只?
*/
//cock 公鸡个数 hen母鸡个数 chicken 小鸡个数
count:=0
for cock:=0;cock<=20;cock++{
for hen:=0;hen<=33 ;hen++ {
for chicken:=0;chicken<=100 ;chicken+=3 {
count++
if cock+hen+chicken ==100 && cock*5+hen*3+chicken/3==100{
fmt.Println(cock,hen,chicken)
}
}
}
}
fmt.Println(count)
// count:=0
//for cock:=0;cock<=20;cock++{
// for hen:=0;hen<=33 ;hen++ {
// count++
// chicken:=100-cock-hen
// if chicken %3 == 0 && cock*5+hen*3+chicken/3==100{
// fmt.Println(cock,hen,chicken)
//
// }
// }
//}
//fmt.Println(count)
}
|
package theme
import (
"encoding/xml"
"fmt"
"html"
"io/ioutil"
"os"
"path/filepath"
"regexp"
"strings"
"time"
"github.com/thoas/go-funk"
"k8s.io/klog"
)
// JcinkXML is an XML file used by jcink
type JcinkXML struct {
XMLName xml.Name `xml:"skin"`
Text string `xml:",chardata"`
ThemeName string `xml:"skinname"`
Date string `xml:"date"`
Stylesheet string `xml:"stylesheet"`
Wrappers string `xml:"wrappers"`
Macros struct {
Text string `xml:",chardata"`
Item []Item `xml:"item"`
} `xml:"macros"`
Templates struct {
Item []Item `xml:"item"`
} `xml:"templates"`
}
// Item is a single macro or template.
type Item struct {
Text string `xml:",chardata"`
Name string `xml:"name,attr"`
}
type Config struct {
Name string
Wrapper string
MacroFolder string
TemplateFolder string
StylesSheet string
RootDir string
}
// Construct returns a []byte of a theme XML file
func (c Config) Construct() (*string, error) {
timestamp := time.Now().Format("Monday 2 of Jan 2006 15:04:05 PM")
output := &JcinkXML{
ThemeName: c.Name,
Date: timestamp,
}
// Stylesheet
stylesheetString, err := c.getFileString(c.StylesSheet)
if err != nil {
return nil, err
}
// .post1 {} is required to pass xml upload validation in JCINK
output.Stylesheet = fmt.Sprintf("\n%s\n%s\n", stylesheetString, ".post1 {} /* this is required to pass JCINK upload validation */")
// Wrapper
wrapperString, err := c.getFileString(c.Wrapper)
if err != nil {
return nil, err
}
output.Wrappers = wrapperString
// Get Macros
macroFiles, err := c.getMacroFiles()
if err != nil {
return nil, err
}
macroOverrides, err := c.buildItems(macroFiles)
if err != nil {
return nil, err
}
macros := combineMacros(macroOverrides)
output.Macros.Item = macros
// Templates
templateFiles, err := c.getTemplateFiles()
if err != nil {
return nil, err
}
templates, err := c.buildItems(templateFiles)
if err != nil {
return nil, err
}
output.Templates.Item = templates
// marshal the xml file
data, err := xml.MarshalIndent(output, " ", " ")
if err != nil {
return nil, err
}
fixQuotes := regexp.MustCompile(`<item name=("(.+)")>`)
data = fixQuotes.ReplaceAll(data, []byte(`<item name='$2'>`))
out := fmt.Sprintf("%s%s", xml.Header, html.UnescapeString(string(data)))
return &out, nil
}
func (c Config) getMacroFiles() ([]string, error) {
finalMacroList := []string{}
macroFiles, err := c.listDirectory(c.MacroFolder)
if err != nil {
return nil, err
}
for _, macroFile := range macroFiles {
macroName := strings.Replace(filepath.Base(macroFile), filepath.Ext(macroFile), "", 1)
if !funk.Contains(allowedMacros, macroName) {
klog.Errorf("skipping non-allowed macro: %s", macroName)
continue
}
finalMacroList = append(finalMacroList, macroFile)
}
return finalMacroList, nil
}
func (c Config) getTemplateFiles() ([]string, error) {
finalTemplateList := []string{}
templateFiles, err := c.listDirectory(c.TemplateFolder)
if err != nil {
return nil, err
}
for _, templateFile := range templateFiles {
if !funk.Contains(allowedHTMLTemplates, filepath.Base(templateFile)) {
klog.Errorf("skipping non-allowed HTML template: %s", filepath.Base(templateFile))
continue
}
finalTemplateList = append(finalTemplateList, templateFile)
}
return finalTemplateList, nil
}
// buildItems returns a list of items from a list of files
func (c Config) buildItems(files []string) ([]Item, error) {
items := []Item{}
for _, file := range files {
fileString, err := c.getFileString(file)
if err != nil {
klog.Error(err)
continue
}
item := Item{
Name: getPlainFilename(file),
Text: fileString,
}
items = append(items, item)
}
return items, nil
}
// combineMacros returns the default macros with any overrides
// works by taking the list of overrides, then adding all defaults that do not exist in overrides
func combineMacros(overrides []Item) []Item {
ret := overrides
for _, defaultMacro := range defaultMacros {
if !itemListContains(overrides, defaultMacro) {
ret = append(ret, defaultMacro)
klog.V(2).Infof("adding default macro that has no override: %s", defaultMacro.Name)
}
}
return ret
}
func itemListContains(list []Item, item Item) bool {
for _, i := range list {
if item.Name == i.Name {
return true
}
}
return false
}
func getPlainFilename(fullname string) string {
// strip the extension
_, filename := filepath.Split(fullname)
extension := filepath.Ext(filename)
filename = filename[0 : len(filename)-len(extension)]
return filename
}
// GetFileString reads a file into a string
func (c Config) getFileString(filename string) (string, error) {
data, err := ioutil.ReadFile(filename)
if err != nil {
return "", err
}
retString := string(data)
klog.V(10).Infof("%s:\n%s", filename, retString)
return retString, nil
}
// listDirectory lists all of the files in a directory
func (c Config) listDirectory(dir string) ([]string, error) {
var files []string
path := filepath.Join(c.RootDir, dir)
klog.V(8).Infof("reading directory: %s", path)
err := filepath.Walk(path, func(path string, info os.FileInfo, err error) error {
if info == nil {
klog.Warningf("%s is empty", path)
return nil
}
if !info.IsDir() {
files = append(files, path)
klog.V(8).Infof("found files: %v", files)
}
return nil
})
if err != nil {
return nil, err
}
return files, nil
}
// Read reads a theme file in
func Read(filename string) error {
theme := &JcinkXML{}
data, err := ioutil.ReadFile(filename)
if err != nil {
return err
}
err = xml.Unmarshal(data, theme)
if err != nil {
return err
}
for _, template := range theme.Templates.Item {
fmt.Println(template.Name)
}
for _, macro := range theme.Macros.Item {
fmt.Printf("{Name: %s, Text: %s},\n", macro.Name, macro.Text)
}
return nil
}
|
package no_repeat_longest_str
func lengthOfLongestSubstring(s string) int {
if len(s) <= 1 {
return len(s)
}
index := make(map[byte]int)
result := 1
i := 0
j := 1
index[s[0]] = 0
for j < len(s) {
// 反向查找 str[j] 最先早出现的位置
lastPos, found := index[s[j]]
index[s[j]] = j
// i 之前的不需要管,因为存在别的字符重复了
if found && lastPos >= i {
result = max(result, j-i)
i = lastPos + 1
}
j++
}
// 考虑到最后 j = len(s)的情况
return max(result, j-i)
}
func max(a, b int) int {
if a > b {
return a
}
return b
}
|
// Copyright 2020 Clivern. All rights reserved.
// Use of this source code is governed by the MIT
// license that can be found in the LICENSE file.
package tower
import (
"net/http"
"github.com/clivern/walrus/core/driver"
"github.com/clivern/walrus/core/model"
"github.com/gin-gonic/gin"
log "github.com/sirupsen/logrus"
)
// Info controller
func Info(c *gin.Context) {
db := driver.NewEtcdDriver()
err := db.Connect()
if err != nil {
log.WithFields(log.Fields{
"correlation_id": c.GetHeader("x-correlation-id"),
"error": err.Error(),
}).Error("Internal server error")
c.JSON(http.StatusInternalServerError, gin.H{
"correlationID": c.GetHeader("x-correlation-id"),
"errorMessage": "Internal server error",
})
return
}
defer db.Close()
optionStore := model.NewOptionStore(db)
_, err = optionStore.GetOptionByKey("is_installed")
if err != nil {
c.JSON(http.StatusOK, gin.H{
"setupStatus": false,
})
return
}
c.JSON(http.StatusOK, gin.H{
"setupStatus": true,
})
}
|
package entity
import (
"time"
"github.com/fatih/structs"
)
type Balance struct {
Id int64
MerchantId int64
AddressId int64
AccountId int64
Chain string
Token string
Address string
Tag string
DepositFreeze string // 充值冻结
WithdrawFreeze string // 提现冻结
Balance string // 可用余额
Version int64 // 余额变化版本
Hash string
CreatedAt *time.Time
UpdatedAt *time.Time
}
func (p *Balance) Map() *map[string]interface{} {
m := structs.Map(p)
return &m
}
|
package ice
import (
"testing"
"fmt"
"github.com/nkbai/log"
"github.com/nkbai/goice/stun"
)
type mockcb struct {
s serverSocker
}
/*
收到一个 stun.Message, 可能是 Bind Request/Bind Response 等等.
*/
func (m *mockcb) RecieveStunMessage(localAddr, remoteAddr string, req *stun.Message) {
if req.Type != stun.BindingRequest {
return
}
log.Info(fmt.Sprintf("recevied binding request %s<----%s", localAddr, remoteAddr))
var res = new(stun.Message)
from := addrToUDPAddr(remoteAddr)
err := res.Build(
stun.NewTransactionIDSetter(req.TransactionID),
stun.NewType(stun.MethodBinding, stun.ClassSuccessResponse),
software,
&stun.XORMappedAddress{
IP: from.IP,
Port: from.Port,
},
stun.Fingerprint,
)
if err != nil {
panic(fmt.Sprintf("build res message error %s", err))
}
m.s.sendStunMessageAsync(res, localAddr, remoteAddr)
return
}
/*
ICE 协商建立连接以后,收到了对方发过来的数据,可能是经过 turn server 中转的 channel data( 不接受 sendData data request),也可能直接是数据.
如果是经过 turn server 中转的, channelNumber 一定介于0x4000-0x7fff 之间.否则一定为0
*/
func (m *mockcb) ReceiveData(localAddr, peerAddr string, data []byte) {
}
//binding request 和普通的 stun message 一样处理.
//func (s *stunServerSock) processBindingRequest(from net.Addr, req *stun.Message) {
//notauthrized:
// res.Build(stun.NewTransactionIDSetter(req.TransactionID), stun.BindingError,
// stun.CodeUnauthorised, software, stun.Fingerprint)
// s.sendStunMessageAsync(res, from)
//}
func setupTestServerSock() (s1, s2 *stunServerSock) {
var err error
mybindaddr := "127.0.0.1:8700"
peerbindaddr := "127.0.0.1:8800"
m1 := new(mockcb)
m2 := new(mockcb)
s1, err = newStunServerSock(mybindaddr, m1, "s1")
if err != nil {
log.Crit(fmt.Sprintf("create new sock error %s %s", mybindaddr, err))
}
s2, err = newStunServerSock(peerbindaddr, m2, "s2")
if err != nil {
log.Crit(fmt.Sprintf("creat new sock error %s %s", peerbindaddr, err))
}
m1.s = s1
m2.s = s2
return s1, s2
}
func TestNewServerSock(t *testing.T) {
s1, s2 := setupTestServerSock()
req, _ := stun.Build(stun.TransactionIDSetter, stun.BindingRequest, software, stun.Fingerprint)
res, err := s1.sendStunMessageSync(req, s1.Addr, s2.Addr)
if err != nil {
t.Error(err)
return
}
if res.Type != stun.BindingSuccess {
t.Error("should success")
return
}
log.Trace(fmt.Sprintf("s1 received :%s", res.String()))
}
|
/*
Copyright 2021 CodeNotary, Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package immuclienttest
import (
"bytes"
"io"
"log"
"os"
"sync"
"github.com/codenotary/immudb/cmd/helper"
"github.com/codenotary/immudb/cmd/immuclient/immuc"
"github.com/codenotary/immudb/pkg/client"
"github.com/codenotary/immudb/pkg/server/servertest"
"google.golang.org/grpc"
)
type clientTest struct {
Imc immuc.Client
Ts client.TokenService
Options client.Options
Pr helper.PasswordReader
}
type HomedirServiceMock struct {
client.HomedirService
token []byte
}
func (h *HomedirServiceMock) FileExistsInUserHomeDir(pathToFile string) (bool, error) {
return true, nil
}
func (h *HomedirServiceMock) WriteFileToUserHomeDir(content []byte, pathToFile string) error {
h.token = content
return nil
}
func (h *HomedirServiceMock) DeleteFileFromUserHomeDir(pathToFile string) error {
return nil
}
func (h *HomedirServiceMock) ReadFileFromUserHomeDir(pathToFile string) (string, error) {
return string(h.token), nil
}
func NewDefaultClientTest() *clientTest {
return &clientTest{}
}
func NewClientTest(pr helper.PasswordReader, tkns client.TokenService) *clientTest {
return &clientTest{
Ts: tkns,
Pr: pr,
}
}
func (ct *clientTest) WithOptions(opts *client.Options) *clientTest {
ct.Options = *opts
return ct
}
func (c *clientTest) Connect(dialer servertest.BuffDialer) {
dialOptions := []grpc.DialOption{
grpc.WithContextDialer(dialer), grpc.WithInsecure(),
}
ic, err := immuc.Init(c.Options.WithDialOptions(&dialOptions).WithPasswordReader(c.Pr).
WithTokenService(c.Ts))
if err != nil {
log.Fatal(err)
}
err = ic.Connect([]string{""})
if err != nil {
log.Fatal(err)
}
c.Imc = ic
}
func (c *clientTest) Login(username string) {
_, err := c.Imc.Login([]string{username})
if err != nil {
log.Fatal(err)
}
}
func CaptureStdout(f func()) string {
custReader, custWriter, err := os.Pipe()
if err != nil {
panic(err)
}
origStdout := os.Stdout
origStderr := os.Stderr
defer func() {
os.Stdout = origStdout
os.Stderr = origStderr
}()
os.Stdout, os.Stderr = custWriter, custWriter
log.SetOutput(custWriter)
out := make(chan string)
wg := new(sync.WaitGroup)
wg.Add(1)
go func() {
var buf bytes.Buffer
wg.Done()
io.Copy(&buf, custReader)
out <- buf.String()
}()
wg.Wait()
f()
custWriter.Close()
return <-out
}
type PasswordReader struct {
Pass []string
callNumber int
}
func (pr *PasswordReader) Read(msg string) ([]byte, error) {
if len(pr.Pass) <= pr.callNumber {
log.Fatal("Application requested the password more times than number of passwords supplied")
}
pass := []byte(pr.Pass[pr.callNumber])
pr.callNumber++
return pass, nil
}
|
/*
Copyright 2020 The Qmgo Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package qmgo
import (
"context"
"fmt"
"net/url"
"strings"
"time"
"github.com/qiniu/qmgo/options"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/mongo"
officialOpts "go.mongodb.org/mongo-driver/mongo/options"
"go.mongodb.org/mongo-driver/mongo/readpref"
)
// Config for initial mongodb instance
type Config struct {
// URI example: [mongodb://][user:pass@]host1[:port1][,host2[:port2],...][/database][?options]
// URI Reference: https://docs.mongodb.com/manual/reference/connection-string/
Uri string `json:"uri"`
Database string `json:"database"`
Coll string `json:"coll"`
// ConnectTimeoutMS specifies a timeout that is used for creating connections to the server.
// If set to 0, no timeout will be used.
// The default is 30 seconds.
ConnectTimeoutMS *int64 `json:"connectTimeoutMS"`
// MaxPoolSize specifies that maximum number of connections allowed in the driver's connection pool to each server.
// If this is 0, it will be set to math.MaxInt64,
// The default is 100.
MaxPoolSize *uint64 `json:"maxPoolSize"`
// MinPoolSize specifies the minimum number of connections allowed in the driver's connection pool to each server. If
// this is non-zero, each server's pool will be maintained in the background to ensure that the size does not fall below
// the minimum. This can also be set through the "minPoolSize" URI option (e.g. "minPoolSize=100"). The default is 0.
MinPoolSize *uint64 `json:"minPoolSize"`
// SocketTimeoutMS specifies how long the driver will wait for a socket read or write to return before returning a
// network error. If this is 0 meaning no timeout is used and socket operations can block indefinitely.
// The default is 300,000 ms.
SocketTimeoutMS *int64 `json:"socketTimeoutMS"`
// ReadPreference determines which servers are considered suitable for read operations.
// default is PrimaryMode
ReadPreference *ReadPref `json:"readPreference"`
// can be used to provide authentication options when configuring a Client.
Auth *Credential `json:"auth"`
}
// Credential can be used to provide authentication options when configuring a Client.
//
// AuthMechanism: the mechanism to use for authentication. Supported values include "SCRAM-SHA-256", "SCRAM-SHA-1",
// "MONGODB-CR", "PLAIN", "GSSAPI", "MONGODB-X509", and "MONGODB-AWS". This can also be set through the "authMechanism"
// URI option. (e.g. "authMechanism=PLAIN"). For more information, see
// https://docs.mongodb.com/manual/core/authentication-mechanisms/.
// AuthSource: the name of the database to use for authentication. This defaults to "$external" for MONGODB-X509,
// GSSAPI, and PLAIN and "admin" for all other mechanisms. This can also be set through the "authSource" URI option
// (e.g. "authSource=otherDb").
//
// Username: the username for authentication. This can also be set through the URI as a username:password pair before
// the first @ character. For example, a URI for user "user", password "pwd", and host "localhost:27017" would be
// "mongodb://user:pwd@localhost:27017". This is optional for X509 authentication and will be extracted from the
// client certificate if not specified.
//
// Password: the password for authentication. This must not be specified for X509 and is optional for GSSAPI
// authentication.
//
// PasswordSet: For GSSAPI, this must be true if a password is specified, even if the password is the empty string, and
// false if no password is specified, indicating that the password should be taken from the context of the running
// process. For other mechanisms, this field is ignored.
type Credential struct {
AuthMechanism string `json:"authMechanism"`
AuthSource string `json:"authSource"`
Username string `json:"username"`
Password string `json:"password"`
PasswordSet bool `json:"passwordSet"`
}
// ReadPref determines which servers are considered suitable for read operations.
type ReadPref struct {
// MaxStaleness is the maximum amount of time to allow a server to be considered eligible for selection.
// Supported from version 3.4.
MaxStalenessMS int64 `json:"maxStalenessMS"`
// indicates the user's preference on reads.
// PrimaryMode as default
Mode readpref.Mode `json:"mode"`
}
// QmgoClient specifies the instance to operate mongoDB
type QmgoClient struct {
*Collection
*Database
*Client
}
// Open creates client instance according to config
// QmgoClient can operates all qmgo.client 、qmgo.database and qmgo.collection
func Open(ctx context.Context, conf *Config, o ...options.ClientOptions) (cli *QmgoClient, err error) {
client, err := NewClient(ctx, conf, o...)
if err != nil {
fmt.Println("new client fail", err)
return
}
db := client.Database(conf.Database)
coll := db.Collection(conf.Coll)
cli = &QmgoClient{
Client: client,
Database: db,
Collection: coll,
}
return
}
// Client creates client to mongo
type Client struct {
client *mongo.Client
conf Config
registry *bsoncodec.Registry
}
// NewClient creates Qmgo MongoDB client
func NewClient(ctx context.Context, conf *Config, o ...options.ClientOptions) (cli *Client, err error) {
opt, err := newConnectOpts(conf, o...)
if err != nil {
return nil, err
}
client, err := client(ctx, opt)
if err != nil {
fmt.Println("new client fail", err)
return
}
cli = &Client{
client: client,
conf: *conf,
registry: opt.Registry,
}
return
}
// client creates connection to MongoDB
func client(ctx context.Context, opt *officialOpts.ClientOptions) (client *mongo.Client, err error) {
client, err = mongo.Connect(ctx, opt)
if err != nil {
fmt.Println(err)
return
}
// half of default connect timeout
pCtx, cancel := context.WithTimeout(ctx, 15*time.Second)
defer cancel()
if err = client.Ping(pCtx, readpref.Primary()); err != nil {
fmt.Println(err)
return
}
return
}
// newConnectOpts creates client options from conf
// Qmgo will follow this way official mongodb driver do:
// - the configuration in uri takes precedence over the configuration in the setter
// - Check the validity of the configuration in the uri, while the configuration in the setter is basically not checked
func newConnectOpts(conf *Config, o ...options.ClientOptions) (*officialOpts.ClientOptions, error) {
option := officialOpts.Client()
for _, apply := range o {
option = officialOpts.MergeClientOptions(apply.ClientOptions)
}
if conf.ConnectTimeoutMS != nil {
timeoutDur := time.Duration(*conf.ConnectTimeoutMS) * time.Millisecond
option.SetConnectTimeout(timeoutDur)
}
if conf.SocketTimeoutMS != nil {
timeoutDur := time.Duration(*conf.SocketTimeoutMS) * time.Millisecond
option.SetSocketTimeout(timeoutDur)
} else {
option.SetSocketTimeout(300 * time.Second)
}
if conf.MaxPoolSize != nil {
option.SetMaxPoolSize(*conf.MaxPoolSize)
}
if conf.MinPoolSize != nil {
option.SetMinPoolSize(*conf.MinPoolSize)
}
if conf.ReadPreference != nil {
readPreference, err := newReadPref(*conf.ReadPreference)
if err != nil {
return nil, err
}
option.SetReadPreference(readPreference)
}
if conf.Auth != nil {
auth, err := newAuth(*conf.Auth)
if err != nil {
return nil, err
}
option.SetAuth(auth)
}
option.ApplyURI(conf.Uri)
return option, nil
}
// newAuth create options.Credential from conf.Auth
func newAuth(auth Credential) (credential officialOpts.Credential, err error) {
if auth.AuthMechanism != "" {
credential.AuthMechanism = auth.AuthMechanism
}
if auth.AuthSource != "" {
credential.AuthSource = auth.AuthSource
}
if auth.Username != "" {
// Validate and process the username.
if strings.Contains(auth.Username, "/") {
err = ErrNotSupportedUsername
return
}
credential.Username, err = url.QueryUnescape(auth.Username)
if err != nil {
err = ErrNotSupportedUsername
return
}
}
credential.PasswordSet = auth.PasswordSet
if auth.Password != "" {
if strings.Contains(auth.Password, ":") {
err = ErrNotSupportedPassword
return
}
if strings.Contains(auth.Password, "/") {
err = ErrNotSupportedPassword
return
}
credential.Password, err = url.QueryUnescape(auth.Password)
if err != nil {
err = ErrNotSupportedPassword
return
}
credential.Password = auth.Password
}
return
}
// newReadPref create readpref.ReadPref from config
func newReadPref(pref ReadPref) (*readpref.ReadPref, error) {
readPrefOpts := make([]readpref.Option, 0, 1)
if pref.MaxStalenessMS != 0 {
readPrefOpts = append(readPrefOpts, readpref.WithMaxStaleness(time.Duration(pref.MaxStalenessMS)*time.Millisecond))
}
mode := readpref.PrimaryMode
if pref.Mode != 0 {
mode = pref.Mode
}
readPreference, err := readpref.New(mode, readPrefOpts...)
return readPreference, err
}
// Close closes sockets to the topology referenced by this Client.
func (c *Client) Close(ctx context.Context) error {
err := c.client.Disconnect(ctx)
return err
}
// Ping confirm connection is alive
func (c *Client) Ping(timeout int64) error {
var err error
ctx, cancel := context.WithTimeout(context.Background(), time.Duration(timeout)*time.Second)
defer cancel()
if err = c.client.Ping(ctx, readpref.Primary()); err != nil {
return err
}
return nil
}
// Database create connection to database
func (c *Client) Database(name string, options ...*options.DatabaseOptions) *Database {
opts := make([]*officialOpts.DatabaseOptions, 0, len(options))
for _, o := range options {
opts = append(opts, o.DatabaseOptions)
}
databaseOpts := officialOpts.MergeDatabaseOptions(opts...)
return &Database{database: c.client.Database(name, databaseOpts), registry: c.registry}
}
// Session create one session on client
// Watch out, close session after operation done
func (c *Client) Session(opt ...*options.SessionOptions) (*Session, error) {
sessionOpts := officialOpts.Session()
if len(opt) > 0 && opt[0].SessionOptions != nil {
sessionOpts = opt[0].SessionOptions
}
s, err := c.client.StartSession(sessionOpts)
return &Session{session: s}, err
}
// DoTransaction do whole transaction in one function
// precondition:
// - version of mongoDB server >= v4.0
// - Topology of mongoDB server is not Single
// At the same time, please pay attention to the following
// - make sure all operations in callback use the sessCtx as context parameter
// - if operations in callback takes more than(include equal) 120s, the operations will not take effect,
// - if operation in callback return qmgo.ErrTransactionRetry,
// the whole transaction will retry, so this transaction must be idempotent
// - if operations in callback return qmgo.ErrTransactionNotSupported,
// - If the ctx parameter already has a Session attached to it, it will be replaced by this session.
func (c *Client) DoTransaction(ctx context.Context, callback func(sessCtx context.Context) (interface{}, error), opts ...*options.TransactionOptions) (interface{}, error) {
if !c.transactionAllowed() {
return nil, ErrTransactionNotSupported
}
s, err := c.Session()
if err != nil {
return nil, err
}
defer s.EndSession(ctx)
return s.StartTransaction(ctx, callback, opts...)
}
// ServerVersion get the version of mongoDB server, like 4.4.0
func (c *Client) ServerVersion() string {
var buildInfo bson.Raw
err := c.client.Database("admin").RunCommand(
context.Background(),
bson.D{{"buildInfo", 1}},
).Decode(&buildInfo)
if err != nil {
fmt.Println("run command err", err)
return ""
}
v, err := buildInfo.LookupErr("version")
if err != nil {
fmt.Println("look up err", err)
return ""
}
return v.StringValue()
}
// transactionAllowed check if transaction is allowed
func (c *Client) transactionAllowed() bool {
vr, err := CompareVersions("4.0", c.ServerVersion())
if err != nil {
return false
}
if vr > 0 {
fmt.Println("transaction is not supported because mongo server version is below 4.0")
return false
}
// TODO dont know why need to do `cli, err := Open(ctx, &c.conf)` in topology() to get topo,
// Before figure it out, we only use this function in UT
//topo, err := c.topology()
//if topo == description.Single {
// fmt.Println("transaction is not supported because mongo server topology is single")
// return false
//}
return true
}
|
package quotetesting
import (
"fmt"
"net/http"
"net/http/httptest"
"strconv"
"time"
)
// NewTestServer create a new httptest server that returns a response
// build on the request parameters.
// The response in an html page with a table that prints
// kew/value pairs of query parameters.
// Special parameters:
// delay: number of msec to sleep before returning the response
// code: returned http status
func NewTestServer() *httptest.Server {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
query := r.URL.Query()
// code
code, _ := strconv.Atoi(query.Get("code"))
if code == 0 {
code = http.StatusOK
}
// delay
delaymsec, _ := strconv.Atoi(query.Get("delay"))
if delaymsec > 0 {
time.Sleep(time.Duration(delaymsec) * time.Millisecond)
}
if code != http.StatusOK {
// set the status code
http.Error(w, http.StatusText(code), code)
return
}
fmt.Fprint(w, `<html>
<head>
<title>Test Server Result</title>
</head>
<body>
<h1>Test Server Result</h1>
<table>
`)
for k, v := range query {
fmt.Fprintf(w, "<tr><th>%s</th><td>%s</td></tr>\n", k, v[0])
}
fmt.Fprint(w, `</table>
</body>
</html>`)
}))
return server
}
|
package middleware
import (
"bytes"
"dappapi/models"
"dappapi/tools"
config2 "dappapi/tools/config"
"encoding/json"
"io"
"io/ioutil"
"net/url"
"strings"
"time"
"github.com/gin-gonic/gin"
log "github.com/sirupsen/logrus"
)
type bodyLogWriter struct {
gin.ResponseWriter
body *bytes.Buffer
}
func (w bodyLogWriter) Write(b []byte) (int, error) {
w.body.Write(b)
return w.ResponseWriter.Write(b)
}
// 日志记录到文件
func LoggerToFile() gin.HandlerFunc {
return func(c *gin.Context) {
// 开始时间
startTime := time.Now()
// 返回内容
blw := &bodyLogWriter{body: bytes.NewBufferString(""), ResponseWriter: c.Writer}
c.Writer = blw
//请求头 请求体
var buf bytes.Buffer
tee := io.TeeReader(c.Request.Body, &buf)
body, _ := ioutil.ReadAll(tee)
c.Request.Body = ioutil.NopCloser(&buf)
bodyStr, _ := url.QueryUnescape(string(body[:]))
// 处理请求
c.Next()
// 结束时间
endTime := time.Now()
// 执行时间
latencyTime := endTime.Sub(startTime)
// 请求方式
reqMethod := c.Request.Method
// 请求路由
reqUri := c.Request.RequestURI
// 状态码
statusCode := c.Writer.Status()
// 请求IP
clientIP := c.ClientIP()
// 日志格式
log.Infof(" %s %3d %13v %15s %s %s \r\n",
startTime.Format("2006-01-02 15:04:05.9999"),
statusCode,
latencyTime,
clientIP,
reqMethod,
reqUri,
)
if c.Request.Method != "OPTIONS" && config2.LogConfig.Operdb {
SetDBOperLog(c, clientIP, statusCode, reqUri, reqMethod, latencyTime, blw, bodyStr)
}
}
}
// 写入操作日志表
// 该方法后续即将弃用
func SetDBOperLog(c *gin.Context, clientIP string, statusCode int, reqUri string, reqMethod string, latencyTime time.Duration, blw *bodyLogWriter, body string) {
var sysOperLog *models.Req_log = &models.Req_log{}
sysOperLog.Clinetip = clientIP
sysOperLog.Location = tools.GetLocation(clientIP)
sysOperLog.Code = statusCode
if strings.Contains(reqUri, "cpapi") {
sysOperLog.Aname = "cpapi"
} else {
sysOperLog.Aname = tools.GetUserName(c)
}
sysOperLog.Method = c.Request.Method
sysOperLog.Requri, _ = url.QueryUnescape(reqUri)
jsonstr, _ := json.Marshal(c.Request.Header)
sysOperLog.Reqheader = string(jsonstr)
sysOperLog.Reqparam = body
sysOperLog.Resbody = blw.body.String()
// sysOperLog.Reqparam, _ = tools.StructToJsonStr(b)
sysOperLog.Ctime = tools.GetCurrntTime().Unix()
sysOperLog.Costtime = latencyTime.Microseconds() / 1000
sysOperLog.Ua = c.Request.UserAgent()
if c.Err() == nil {
sysOperLog.Code = 0
}
_, _ = sysOperLog.Create()
}
|
package main
import (
"flag"
"fmt"
"math"
"net/http"
"sort"
"sync"
"time"
)
var (
url string // url
m string // method请求方法
c uint // 并发量
n uint // 请求总量
H string // Header
b string // Body
timeout int64 // 超时时长
)
// init all flag values
func init() {
flag.String("h", ``, "使用助手")
flag.StringVar(&url, "url", "http://example.com", "地址")
flag.StringVar(&m, "m", "GET", "请求方法")
flag.UintVar(&c, "c", 10, "并发量")
flag.UintVar(&n, "n", 10, "总请求量")
flag.StringVar(&H, "H", `{"Content-Type": "application/json"}`, "json格式的Header")
flag.StringVar(&b, "b", `{"key":"value"}`, "json格式的body")
flag.Int64Var(&timeout, "timeout", 1000, "单个请求的超时时间")
}
func main() {
// 将所有flag的值绑定到对象上
flag.Parse()
ab := BenchWeb(url, m, n, c)
fmt.Println("总请求数:", ab.Requests, " 成功数:", ab.CompleteRequests)
fmt.Println("总耗时:", ab.TimeTotal)
}
// ApacheBench 性能测试工具
type ApacheBench struct {
Url string // URL
Concurrency uint // 并发量
Requests uint // 总请求数
TimeTotal float64 // 总时长
CompleteRequests uint // 完成的请求数
FailedRequests uint // 失败请求数
ResponseTimeAvg float64 // 平均响应时长
ResponseTime95 float64 // 95分位响应时长
}
// 单次请求的结果
type onceReqReport struct {
elapsed time.Duration
success bool
}
// 发起单个http请求
func sendRequest(url string, method string, resultChan chan *onceReqReport, wg *sync.WaitGroup) {
res := onceReqReport{}
t0 := time.Now()
request, _ := http.NewRequest(method, url, nil)
client := http.Client{}
resp, _ := client.Do(request)
if resp.StatusCode >= 400 {
res.success = false
} else {
res.success = true
}
t1 := time.Since(t0)
res.elapsed = t1
resultChan <- &res
wg.Done()
}
// 发起批量请求,并等待所有结果返回
func concurRequest(concurrency uint, url string, method string, resultChan chan *onceReqReport) {
wg := sync.WaitGroup{}
for i := 0; i < int(concurrency); i++ {
go sendRequest(url, method, resultChan, &wg)
wg.Add(1)
}
wg.Wait()
}
// 收集数据
func collect(resChan chan *onceReqReport, result []*onceReqReport) {
for item := range resChan {
result = append(result, item)
}
}
// 统计
func static(result []*onceReqReport) (failCount uint, time95, avg float64) {
elapseSet := make([]float64, 0)
failCount = uint(0)
sum := 0.0 // 总耗时
for _, r := range result {
e := r.elapsed.Seconds()
elapseSet = append(elapseSet, e)
sum += e
if !r.success {
failCount++
}
}
// 排序
sort.Float64s(elapseSet)
// 算95分位
idx := math.Floor(float64(len(result)) * 0.95)
if len(elapseSet) > 0 {
time95 = elapseSet[int(idx)]
}
// 算平均值
avg = sum / float64(len(result))
return
}
// web性能压测
func BenchWeb(url string, method string, requestNum uint, concurrency uint) *ApacheBench {
// todo:检查url
// 按并发数发起请求
count := requestNum / concurrency // 需要发起并发的次数
if requestNum%concurrency != 0 { // 有余数就要多一次
count++
}
resultChan := make(chan *onceReqReport, concurrency) // 结果通过Chan输送
result := make([]*onceReqReport, 0)
go collect(resultChan, result) // 收集结果
t0 := time.Now()
for i := 0; i < int(count); i++ {
concurRequest(concurrency, url, method, resultChan) // 并发请求
}
tn := time.Since(t0)
// 统计结果
failCount, time95, avg := static(result)
return &ApacheBench{
Url: url,
Concurrency: concurrency,
Requests: requestNum,
TimeTotal: tn.Seconds(),
CompleteRequests: requestNum - failCount,
FailedRequests: failCount,
ResponseTimeAvg: avg,
ResponseTime95: time95,
}
}
|
package main
import "fmt"
func main() {
for i := 0; i < 3; i++ {
fmt.Println("before")
continue
fmt.Println("after")
}
fmt.Println("next statement")
}
|
package models
import (
"database/sql"
"log"
_ "github.com/mattn/go-sqlite3"
)
type Store interface {
AllUsers() (interface{}, error)
}
type Repo struct {
db *sql.DB
}
func NewDB(dataSourceName string) (*Repo, error) {
db, err := sql.Open("sqlite3", dataSourceName)
if err != nil {
log.Fatalf("DB open error", err.Error())
return nil, err
}
if err = db.Ping(); err != nil {
log.Fatalf("Ping error", err.Error())
return nil, err
}
return &Repo{db}, nil
}
|
package p9p
import "io"
func (c *Conn) Remove(fid Fid) (err error) {
defer func() { logf("Remove: %v", err) }()
m := &Msg{src: c}
if !m.writeHeader(KTremove) || !m.writebinary(fid) || !c.schedule(m) {
return m.err
}
return nil
}
func (c *Conn) Clunk(fid Fid) (err error) {
defer func() { logf("Clunk: %v", err) }()
m := &Msg{src: c}
if !m.writeHeader(KTclunk) || !m.writebinary(fid) || !c.schedule(m) {
return m.err
}
return nil
}
func (c *Conn) Walk(fid, newfid Fid, names ...string) (q []Qid, err error) {
defer func() { logf("Walk: %v %v %v", fid, newfid, names) }()
m := &Msg{src: c}
if !m.writeHeader(KTwalk) || !m.writebinary(fid) || !m.writebinary(newfid) || !m.writeNames(names...) {
return q, m.err
}
if !c.schedule(m) {
return q, m.err
}
m.readQuids(&q)
return q, m.err
}
func (c *Conn) ReadFid(fid Fid, offset int64, p []byte) (n int, err error) {
var nn int32
defer func() { logf("Read: %v %v %v", fid, offset, p) }()
m := &Msg{src: c}
if !m.writeHeader(KTread) || !m.writebinary(&struct {
Fid Fid
Ofs int64
N int32
}{fid, offset, int32(len(p))}) {
return n, m.err
}
if !c.schedule(m) || !m.readbinary(&nn) || !m.readbytes(p) {
if m.err == io.ErrUnexpectedEOF {
return n, m.err
}
}
return int(nn), m.err
}
func (c *Conn) WriteFid(fid Fid, offset int64, p []byte) (n int, err error) {
var nn int32
defer func() { logf("Write: %v %v %v", fid, offset, p) }()
m := &Msg{src: c}
if !m.writeHeader(KTwrite) || !m.writebinary(&struct {
Fid Fid
Ofs int64
N int32
}{fid, offset, int32(len(p))}) {
return n, m.err
}
if !m.write(p) || !c.schedule(m) || !m.readbinary(&nn) {
return n, m.err
}
return int(nn), m.err
}
|
package aoc2020
import (
"sort"
"strconv"
aoc "github.com/janreggie/aoc/internal"
)
// Day09 solves the ninth day puzzle "Encoding Error"
//
// Input
//
// A file containing positive integers separated by newlines. For example:
//
// 35
// 20
// 15
// 25
// 47
// 40
// 62
// 55
// 65
// 95
// 102
// 117
// 150
// 182
// 127
//
// It is guaranteed that the input is at least 26 lines long.
func Day09(input string) (answer1, answer2 string, err error) {
allNumbers, err := aoc.SplitLinesToInts(input)
if err != nil {
return
}
// checkValidSumOfTwo returns true if there are two (distinct) numbers in past that sum to present.
// This will be for later...
checkValidSumOfTwo := func(past []int, present int) bool {
pastSorted := make([]int, len(past))
copy(pastSorted, past)
sort.Ints(pastSorted)
// Consider that there are two indices, ii starting from 0, and jj starting from len-1.
ii, jj := 0, len(pastSorted)-1
for ii != jj {
if sum := pastSorted[ii] + pastSorted[jj]; sum == present {
return true
} else if sum < present {
ii++
} else if sum > present {
jj--
}
}
return false
}
invalidNumber := 0
for ii := 0; ii < len(allNumbers)-26; ii++ {
past := allNumbers[ii : ii+25]
present := allNumbers[ii+25]
if !checkValidSumOfTwo(past, present) {
invalidNumber = present
break
}
}
answer1 = strconv.Itoa(invalidNumber)
// Now for the second part. Declare first some helper fxns for later
sum := func(ss []int) int {
result := 0
for _, vv := range ss {
result += vv
}
return result
}
min := func(ss []int) int {
result := ss[0]
for _, vv := range ss {
if vv < result {
result = vv
}
}
return result
}
max := func(ss []int) int {
result := ss[0]
for _, vv := range ss {
if vv > result {
result = vv
}
}
return result
}
//Suppose two indices ii and jj exist
ii, jj := 0, 25
// and now the sum of allNumbers[ii:jj] is determined.
// If the number is less than invalidNumber, increase the range by incrementing jj.
// If greater than, decrease the range by incrementing ii.
encryptionWeakness := 0
for ii != jj && jj <= len(allNumbers) {
rr := allNumbers[ii:jj]
ss := sum(rr)
// fmt.Printf("Checking range %d to %d which sum to %d\n", ii, jj, ss)
if ss == invalidNumber {
// Note jj-1 since that is the upper bound of the slice
encryptionWeakness = min(rr) + max(rr)
break
} else if ss < invalidNumber {
jj++
} else if ss > invalidNumber {
ii++
}
}
answer2 = strconv.Itoa(encryptionWeakness)
return
}
|
package gommon
import (
"errors"
"fmt"
)
var builtInFunc map[string]func(*Cons) (*Cons, error)
func (this *Cons) Eval() (*Cons, error) {
first := new(Cons)
last := first
p := this
for {
if t, ok := p.Car.(*Cons); ok {
if name, ok := t.Car.(AtomSymbol); ok {
if fn, ok := builtInFunc[string(name)]; ok {
var err error
last.Car, err = fn(t.Cdr)
if err != nil {
return nil, err
}
} else {
return nil, fmt.Errorf("%s: Not found", name)
}
} else {
return nil, errors.New("list: can not evaluate")
}
} else {
last.Car = p.Car
}
if p.Cdr == nil {
last.Cdr = nil
return first, nil
}
p = p.Cdr
tmp := new(Cons)
last.Cdr = tmp
last = tmp
}
}
func init() {
builtInFunc = map[string]func(*Cons) (*Cons, error){
"print": CmdPrint,
"quote": CmdQuote,
}
}
|
package test
import (
"context"
"encoding/base64"
"fmt"
"io/ioutil"
"math"
"os"
"path"
"strings"
"time"
"gopkg.in/op/go-logging.v1"
"build"
"core"
"metrics"
)
var log = logging.MustGetLogger("test")
const dummyOutput = "=== RUN DummyTest\n--- PASS: DummyTest (0.00s)\nPASS\n"
const dummyCoverage = "<?xml version=\"1.0\" ?><coverage></coverage>"
// Test runs the tests for a single target.
func Test(tid int, state *core.BuildState, label core.BuildLabel) {
state.LogBuildResult(tid, label, core.TargetTesting, "Testing...")
startTime := time.Now()
target := state.Graph.TargetOrDie(label)
test(tid, state, label, target)
metrics.Record(target, time.Since(startTime))
}
func test(tid int, state *core.BuildState, label core.BuildLabel, target *core.BuildTarget) {
startTime := time.Now()
hash, err := build.RuntimeHash(state, target)
if err != nil {
state.LogBuildError(tid, label, core.TargetTestFailed, err, "Failed to calculate target hash")
return
}
// Check the cached output files if the target wasn't rebuilt.
hash = core.CollapseHash(hash)
hashStr := base64.RawURLEncoding.EncodeToString(hash)
resultsFileName := fmt.Sprintf(".test_results_%s_%s", label.Name, hashStr)
coverageFileName := fmt.Sprintf(".test_coverage_%s_%s", label.Name, hashStr)
outputFile := path.Join(target.TestDir(), "test.results")
coverageFile := path.Join(target.TestDir(), "test.coverage")
cachedOutputFile := path.Join(target.OutDir(), resultsFileName)
cachedCoverageFile := path.Join(target.OutDir(), coverageFileName)
needCoverage := state.NeedCoverage && !target.NoTestOutput
cachedTest := func() {
log.Debug("Not re-running test %s; got cached results.", label)
coverage := parseCoverageFile(target, cachedCoverageFile)
results, err := parseTestResults(target, cachedOutputFile, true)
target.Results.Duration = time.Since(startTime)
target.Results.Cached = true
if err != nil {
state.LogBuildError(tid, label, core.TargetTestFailed, err, "Failed to parse cached test file %s", cachedOutputFile)
} else if results.Failed > 0 {
panic("Test results with failures shouldn't be cached.")
} else {
logTestSuccess(state, tid, label, &results, &coverage)
}
}
moveAndCacheOutputFiles := func(results *core.TestResults, coverage *core.TestCoverage) bool {
// Never cache test results when given arguments; the results may be incomplete.
if len(state.TestArgs) > 0 {
log.Debug("Not caching results for %s, we passed it arguments", label)
return true
}
if err := moveAndCacheOutputFile(state, target, hash, outputFile, cachedOutputFile, resultsFileName, dummyOutput); err != nil {
state.LogTestResult(tid, label, core.TargetTestFailed, results, coverage, err, "Failed to move test output file")
return false
}
if needCoverage || core.PathExists(coverageFile) {
if err := moveAndCacheOutputFile(state, target, hash, coverageFile, cachedCoverageFile, coverageFileName, dummyCoverage); err != nil {
state.LogTestResult(tid, label, core.TargetTestFailed, results, coverage, err, "Failed to move test coverage file")
return false
}
}
for _, output := range target.TestOutputs {
tmpFile := path.Join(target.TestDir(), output)
outFile := path.Join(target.OutDir(), output)
if err := moveAndCacheOutputFile(state, target, hash, tmpFile, outFile, output, ""); err != nil {
state.LogTestResult(tid, label, core.TargetTestFailed, results, coverage, err, "Failed to move test output file")
return false
}
}
return true
}
needToRun := func() bool {
if target.State() == core.Unchanged && core.PathExists(cachedOutputFile) {
// Output file exists already and appears to be valid. We might still need to rerun though
// if the coverage files aren't available.
if needCoverage && !core.PathExists(cachedCoverageFile) {
return true
}
return false
}
// Check the cache for these artifacts.
if state.Cache == nil {
return true
}
if !state.Cache.RetrieveExtra(target, hash, resultsFileName) {
return true
}
if needCoverage && !state.Cache.RetrieveExtra(target, hash, coverageFileName) {
return true
}
for _, output := range target.TestOutputs {
if !state.Cache.RetrieveExtra(target, hash, output) {
return true
}
}
return false
}
// Don't cache when doing multiple runs, presumably the user explicitly wants to check it.
if state.NumTestRuns <= 1 && !needToRun() {
cachedTest()
return
}
// Remove any cached test result file.
if err := RemoveCachedTestFiles(target); err != nil {
state.LogBuildError(tid, label, core.TargetTestFailed, err, "Failed to remove cached test files")
return
}
numSucceeded := 0
numFlakes := 0
numRuns, successesRequired := calcNumRuns(state.NumTestRuns, target.Flakiness)
var resultErr error
resultMsg := ""
var coverage core.TestCoverage
for i := 0; i < numRuns && numSucceeded < successesRequired; i++ {
if numRuns > 1 {
state.LogBuildResult(tid, label, core.TargetTesting, fmt.Sprintf("Testing (%d of %d)...", i+1, numRuns))
}
out, err := prepareAndRunTest(tid, state, target)
duration := time.Since(startTime)
startTime = time.Now() // reset this for next time
// This is all pretty involved; there are lots of different possibilities of what could happen.
// The contract is that the test must return zero on success or non-zero on failure (Unix FTW).
// If it's successful, it must produce a parseable file named "test.results" in its temp folder.
// (alternatively, this can be a directory containing parseable files).
// Tests can opt out of the file requirement individually, in which case they're judged only
// by their return value.
// But of course, we still have to consider all the alternatives here and handle them nicely.
target.Results.Output = string(out)
if err != nil && target.Results.Output == "" {
target.Results.Output = err.Error()
}
target.Results.TimedOut = err == context.DeadlineExceeded
coverage = parseCoverageFile(target, coverageFile)
target.Results.Duration += duration
if !core.PathExists(outputFile) {
if err == nil && target.NoTestOutput {
target.Results.NumTests++
target.Results.Passed++
numSucceeded++
} else if err == nil {
target.Results.NumTests++
target.Results.Failed++
target.Results.Failures = append(target.Results.Failures, core.TestFailure{
Name: "Missing results",
Stdout: string(out),
})
resultErr = fmt.Errorf("Test failed to produce output results file")
resultMsg = fmt.Sprintf("Test apparently succeeded but failed to produce %s. Output: %s", outputFile, string(out))
numFlakes++
} else {
target.Results.NumTests++
target.Results.Failed++
target.Results.Failures = append(target.Results.Failures, core.TestFailure{
Name: "Test failed with no results",
Stdout: string(out),
})
numFlakes++
resultErr = err
resultMsg = fmt.Sprintf("Test failed with no results. Output: %s", string(out))
}
} else {
results, err2 := parseTestResults(target, outputFile, false)
if err2 != nil {
resultErr = err2
resultMsg = fmt.Sprintf("Couldn't parse test output file: %s. Stdout: %s", err2, string(out))
numFlakes++
} else if err != nil && results.Failed == 0 {
// Add a failure result to the test so it shows up in the final aggregation.
target.Results.Failed = 1
target.Results.Failures = append(results.Failures, core.TestFailure{
Name: "Return value",
Type: fmt.Sprintf("%s", err),
Stdout: string(out),
})
numFlakes++
resultErr = err
resultMsg = fmt.Sprintf("Test returned nonzero but reported no errors: %s. Output: %s", err, string(out))
} else if err == nil && results.Failed != 0 {
resultErr = fmt.Errorf("Test returned 0 but still reported failures")
resultMsg = fmt.Sprintf("Test returned 0 but still reported failures. Stdout: %s", string(out))
numFlakes++
} else if results.Failed != 0 {
resultErr = fmt.Errorf("Tests failed")
resultMsg = fmt.Sprintf("Tests failed. Stdout: %s", string(out))
numFlakes++
} else {
numSucceeded++
if !state.ShowTestOutput {
// Save a bit of memory, if we're not printing results on success we will never use them again.
target.Results.Output = ""
}
}
}
}
if numSucceeded >= successesRequired {
target.Results.Failures = nil // Remove any failures, they don't count
target.Results.Failed = 0 // (they'll be picked up as flakes below)
if numSucceeded > 0 && numFlakes > 0 {
target.Results.Flakes = numFlakes
}
// Success, clean things up
if moveAndCacheOutputFiles(&target.Results, &coverage) {
logTestSuccess(state, tid, label, &target.Results, &coverage)
}
// Clean up the test directory.
if state.CleanWorkdirs {
if err := os.RemoveAll(target.TestDir()); err != nil {
log.Warning("Failed to remove test directory for %s: %s", target.Label, err)
}
}
} else {
state.LogTestResult(tid, label, core.TargetTestFailed, &target.Results, &coverage, resultErr, resultMsg)
}
}
func logTestSuccess(state *core.BuildState, tid int, label core.BuildLabel, results *core.TestResults, coverage *core.TestCoverage) {
var description string
tests := pluralise("test", results.NumTests)
if results.Skipped != 0 || results.ExpectedFailures != 0 {
failures := pluralise("failure", results.ExpectedFailures)
description = fmt.Sprintf("%d %s passed. %d skipped, %d expected %s",
results.NumTests, tests, results.Skipped, results.ExpectedFailures, failures)
} else {
description = fmt.Sprintf("%d %s passed.", results.NumTests, tests)
}
state.LogTestResult(tid, label, core.TargetTested, results, coverage, nil, description)
}
func pluralise(word string, quantity int) string {
if quantity == 1 {
return word
}
return word + "s"
}
func prepareTestDir(graph *core.BuildGraph, target *core.BuildTarget) error {
if err := os.RemoveAll(target.TestDir()); err != nil {
return err
}
if err := os.MkdirAll(target.TestDir(), core.DirPermissions); err != nil {
return err
}
for out := range core.IterRuntimeFiles(graph, target, true) {
if err := core.PrepareSourcePair(out); err != nil {
return err
}
}
return nil
}
// testCommandAndEnv returns the test command & environment for a target.
func testCommandAndEnv(state *core.BuildState, target *core.BuildTarget) (string, []string) {
replacedCmd := build.ReplaceTestSequences(target, target.GetTestCommand())
env := core.BuildEnvironment(state, target, true)
if len(state.TestArgs) > 0 {
args := strings.Join(state.TestArgs, " ")
replacedCmd += " " + args
env = append(env, "TESTS="+args)
}
return replacedCmd, env
}
func runTest(state *core.BuildState, target *core.BuildTarget) ([]byte, error) {
replacedCmd, env := testCommandAndEnv(state, target)
log.Debug("Running test %s\nENVIRONMENT:\n%s\n%s", target.Label, strings.Join(env, "\n"), replacedCmd)
_, out, err := core.ExecWithTimeoutShell(target, target.TestDir(), env, target.TestTimeout, state.Config.Test.Timeout, state.ShowAllOutput, replacedCmd, target.TestSandbox)
return out, err
}
// prepareAndRunTest sets up a test directory and runs the test.
func prepareAndRunTest(tid int, state *core.BuildState, target *core.BuildTarget) (out []byte, err error) {
if err = prepareTestDir(state.Graph, target); err != nil {
state.LogBuildError(tid, target.Label, core.TargetTestFailed, err, "Failed to prepare test directory for %s: %s", target.Label, err)
return []byte{}, err
}
return runPossiblyContainerisedTest(state, target)
}
// Parses the coverage output for a single target.
func parseCoverageFile(target *core.BuildTarget, coverageFile string) core.TestCoverage {
coverage, err := parseTestCoverage(target, coverageFile)
if err != nil {
log.Errorf("Failed to parse coverage file for %s: %s", target.Label, err)
}
return coverage
}
// RemoveCachedTestFiles removes any cached test or coverage result files for a target.
func RemoveCachedTestFiles(target *core.BuildTarget) error {
if err := removeAnyFilesWithPrefix(target.OutDir(), ".test_results_"+target.Label.Name); err != nil {
return err
}
if err := removeAnyFilesWithPrefix(target.OutDir(), ".test_coverage_"+target.Label.Name); err != nil {
return err
}
for _, output := range target.TestOutputs {
if err := os.RemoveAll(path.Join(target.OutDir(), output)); err != nil {
return err
}
}
return nil
}
// removeAnyFilesWithPrefix deletes any files in a directory matching a given prefix.
func removeAnyFilesWithPrefix(dir, prefix string) error {
infos, err := ioutil.ReadDir(dir)
if err != nil {
// Not an error if the directory just isn't there.
if os.IsNotExist(err) {
return nil
}
return err
}
for _, info := range infos {
if strings.HasPrefix(info.Name(), prefix) {
if err := os.RemoveAll(path.Join(dir, info.Name())); err != nil {
return err
}
}
}
return nil
}
// Attempt to write a dummy coverage file to record that it's been done for a test.
func moveAndCacheOutputFile(state *core.BuildState, target *core.BuildTarget, hash []byte, from, to, filename, dummy string) error {
if !core.PathExists(from) {
if dummy == "" {
return nil
}
if err := ioutil.WriteFile(to, []byte(dummy), 0644); err != nil {
return err
}
} else if err := os.Rename(from, to); err != nil {
return err
}
if state.Cache != nil {
state.Cache.StoreExtra(target, hash, filename)
}
return nil
}
// calcNumRuns works out how many total runs we should have for a test, and how many successes
// are required for it to count as success.
func calcNumRuns(numRuns, flakiness int) (int, int) {
if numRuns > 0 && flakiness > 0 { // If flag is passed we run exactly that many times with proportionate flakiness.
return numRuns, int(math.Ceil(float64(numRuns) / float64(flakiness)))
} else if numRuns > 0 {
return numRuns, numRuns
} else if flakiness > 0 { // Test is flaky, run that many times
return flakiness, 1
}
return 1, 1
}
|
package main
import (
"bytes"
"errors"
"fmt"
"mime"
"strings"
"github.com/xeipuuv/gojsonschema"
)
// ResponseExpectation is an interface to any validation
// that needs to be performed on response.
type ResponseExpectation interface {
// check does the response meet expectation
check(resp *Response) error
// desc returns user-friendly description of expectation
desc() string
}
// StatusCodeExpectation validates response HTTP code.
type StatusCodeExpectation struct {
statusCode int
}
func (e StatusCodeExpectation) check(resp *Response) error {
if resp.http.StatusCode != e.statusCode {
return fmt.Errorf("unexpected status code. Expected: %d, Actual: %d", e.statusCode, resp.http.StatusCode)
}
return nil
}
func (e StatusCodeExpectation) desc() string {
return fmt.Sprintf("Status code is %d", e.statusCode)
}
// BodySchemaExpectation validates response body against schema.
// Content-Type header is used to identify either json schema or xsd is applied.
type BodySchemaExpectation struct {
schema []byte
displayName string
}
func (e BodySchemaExpectation) check(resp *Response) error {
contentType, _, _ := mime.ParseMediaType(resp.http.Header.Get("content-type"))
if contentType == "application/json" {
return e.checkJSON(resp)
}
return fmt.Errorf("unsupported content type: %s", contentType)
}
func (e BodySchemaExpectation) desc() string {
tmpl := "BodyPath matches the schema"
if e.displayName == "" {
return tmpl
}
return fmt.Sprintf(tmpl+" (%s)", e.displayName)
}
func (e BodySchemaExpectation) checkJSON(resp *Response) error {
schemaLoader := gojsonschema.NewBytesLoader(e.schema)
documentLoader := gojsonschema.NewStringLoader(string(resp.body))
result, err := gojsonschema.Validate(schemaLoader, documentLoader)
if err != nil {
return fmt.Errorf("failed to load schema file: %s", err)
}
if !result.Valid() {
msg := "Unexpected Body Schema:"
for _, desc := range result.Errors() {
msg = fmt.Sprintf(msg+"\n\t%s", desc)
}
return errors.New(msg)
}
return nil
}
// BodyExpectation validates that expected object is presented in the response.
// The expected body reflect required part of the response object.
type BodyExpectation struct {
Strict bool
ExpectedBody interface{}
}
func (e BodyExpectation) check(resp *Response) error {
actualBody, err := resp.Body() // cached
if err != nil {
str := "Can't parse response body."
str += " " + err.Error()
return errors.New(str)
}
matcher := NewBodyMatcher(e)
return matcher.check(actualBody)
}
func (e BodyExpectation) desc() string {
return "Expected body's structure / values"
}
// BodyPathExpectation validates values under a certain path in a body.
// Applies to json and xml.
type BodyPathExpectation struct {
pathExpectations map[string]interface{}
}
func (e BodyPathExpectation) check(resp *Response) error {
for pathStr, expectedValue := range e.pathExpectations {
err := responseBodyPathCheck(resp, bodyExpectationItem{Path: pathStr, ExpectedValue: expectedValue}, checkExpectedPath)
if err != nil {
return err
}
}
return nil
}
func (e BodyPathExpectation) desc() string {
return fmt.Sprintf("Expected body's structure / values (%d checks)", len(e.pathExpectations))
}
type bodyExpectationItem struct {
Path string
ExpectedValue interface{}
}
func checkExpectedPath(m interface{}, pathItem interface{}) string {
if expectationItem, ok := pathItem.(bodyExpectationItem); ok {
err := SearchByPath(m, expectationItem.ExpectedValue, expectationItem.Path)
if err != nil {
return err.Error()
}
return ""
}
return fmt.Sprintf("Path Item: %v is invalid for expectation check", pathItem)
}
// HeaderExpectation validates one header in a response.
type HeaderExpectation struct {
Name string
Value string
ValueParser func(string) string
}
func (e HeaderExpectation) check(resp *Response) error {
value := resp.http.Header.Get(e.Name)
if e.ValueParser != nil {
value = e.ValueParser(value)
}
value = strings.TrimSpace(value)
if value == "" {
return fmt.Errorf("missing header. Expected \"%s: %s\"", e.Name, e.Value)
}
if e.Value != "" && e.Value != value {
return fmt.Errorf("unexpected header. Expected \"%s: %s\". Actual \"%s: %s\"", e.Name, e.Value, e.Name, value)
}
return nil
}
func (e HeaderExpectation) desc() string {
return fmt.Sprintf("Header '%s' matches expected value '%s", e.Name, e.Value)
}
// ContentTypeExpectation validates media type returned in the Content-Type header.
// Encoding information is excluded from matching value.
// E.g. "application/json;charset=utf-8" header transformed to "application/json" media type.
type ContentTypeExpectation struct {
Value string
}
func (e ContentTypeExpectation) check(resp *Response) error {
parser := func(value string) string {
contentType, _, _ := mime.ParseMediaType(value)
return contentType
}
headerCheck := HeaderExpectation{"content-type", e.Value, parser}
return headerCheck.check(resp)
}
func (e ContentTypeExpectation) desc() string {
return fmt.Sprintf("Content Type is '%s'", e.Value)
}
// AbsentExpectation validates paths are absent in response body
type AbsentExpectation struct {
paths []string
}
func (e AbsentExpectation) check(resp *Response) error {
for _, pathStr := range e.paths {
err := responseBodyPathCheck(resp, pathStr, checkAbsentPath)
if err != nil {
return err
}
}
return nil
}
func (e AbsentExpectation) desc() string {
buf := bytes.NewBufferString("")
buf.WriteString("Absent fields:")
for _, path := range e.paths {
buf.WriteString(fmt.Sprintf("\n - %s", path))
}
return buf.String()
}
type pathCheckFunc func(m interface{}, pathItem interface{}) string
func responseBodyPathCheck(resp *Response, pathItem interface{}, checkPath pathCheckFunc) error {
m, err := resp.Body() // cached
if err != nil {
str := "Can't parse response body to Map." // TODO specific message for functions
str += " " + err.Error()
return errors.New(str)
}
str := checkPath(m, pathItem)
if str != "" {
return errors.New(str)
}
return nil
}
func checkAbsentPath(m interface{}, pathItem interface{}) string {
if pathStr, ok := pathItem.(string); ok {
searchResult := Search(m, pathStr)
if len(searchResult) > 0 {
return fmt.Sprintf("Value expected to be absent was found: %v, path: %v", searchResult, pathStr)
}
return ""
}
return fmt.Sprintf("Path Item: %v is invalid for absence check", pathItem)
}
|
// Package model provides an interface for all models, ensuring all models
// have validation functions of the same format
package models
import (
"reflect"
"fmt"
"errors"
)
// Model is an interface for the models of the data from incoming requests
type Model interface {
Validate() error
}
// ValidateRanges validates whether the values of the fields of the model
// lie within the specified ranges
func ValidateRanges(p Model, requiredCols map[string] bool, colMin,
colMax map[string]int64) error {
var errorMessage string
const errMsgStrMin = "field %s min length is %d, "
const errMsgStrMax = "field %s max length is %d, "
const errMsgIntMin = "field %s min val is %d, "
const errMsgIntMax = "field %s max val is %d, "
const errMsgMissing = "field %s required, "
pRefl := reflect.ValueOf(p).Elem()
pType := pRefl.Type()
for i := 0; i < pRefl.NumField(); i++ {
key := pType.Field(i).Name
t := pRefl.Field(i).Type()
var strval string
var intval int64
if t == reflect.TypeOf(strval) {
strval := pRefl.Field(i).String()
if strval == "" {
continue;
}
if targ, ok := colMin[key]; ok && int64(len(strval)) < targ {
errorMessage += fmt.Sprintf(errMsgStrMin, key, targ)
}
if targ, ok := colMax[key]; ok && int64(len(strval)) > targ {
errorMessage += fmt.Sprintf(errMsgStrMax, key, targ)
}
} else if t == reflect.TypeOf(intval) {
intval := pRefl.Field(i).Int()
if targ, ok := colMin[key]; ok && intval < targ {
errorMessage += fmt.Sprintf(errMsgIntMin, key, targ)
}
if targ, ok := colMax[key]; ok && intval > targ {
errorMessage += fmt.Sprintf(errMsgIntMax, key, targ)
}
}
if _, ok := requiredCols[key]; ok {
requiredCols[key] = false
}
}
for k, v := range requiredCols {
if v {
errorMessage += fmt.Sprint(errMsgMissing, k)
}
}
var err error
if errorMessage != "" {
err = errors.New(errorMessage[:len(errorMessage) - 2])
}
return err
}
|
package config
import (
"fmt"
"regexp"
)
// Config api/db shared config object
type Config struct {
// 'api' or 'db'
Role string
// address of db
DBAddr string
// port of db
DBPort string
// port of vr service
VRPort string
// list of vr peer urls
VRPeerURLs []string
// list of peer urls of dbs
DBPeerURLs []string
}
// DBURL convenience getter
func (config Config) DBURL() string {
return fmt.Sprintf("%s:%s", config.DBAddr, config.DBPort)
}
// VRURL convenience getter
func (config Config) VRURL() string {
return fmt.Sprintf("%s:%s", config.DBAddr, config.VRPort)
}
// Validate - -
func (config Config) Validate() error {
if config.Role == "api" {
if !regexp.MustCompile("^.*?:.*?$").Match([]byte(config.DBURL())) {
return fmt.Errorf("misconfigured DBAddr or DBPort")
}
return nil
}
if config.Role == "db" {
if !regexp.MustCompile("^.*?:.*?$").Match([]byte(config.VRURL())) {
return fmt.Errorf("misconfigured DBAddr or VRPort")
}
if config.VRMe() == -1 {
return fmt.Errorf("VRURL %s must be in VRPeerURLs", config.VRURL())
}
return nil
}
return fmt.Errorf("unknown role %s", config.Role)
}
// VRMe index of local machine in vr peer array
func (config Config) VRMe() int {
for idx, url := range config.VRPeerURLs {
if url == config.VRURL() {
return idx
}
}
return -1
}
|
package script
//Float is a floating-point number
type Float struct {
Type
}
|
package vrf
import (
"crypto/sha512"
"fmt"
cmn "github.com/HNB-ECO/HNB-Blockchain/HNB/consensus/algorand/common"
"github.com/HNB-ECO/HNB-Blockchain/HNB/consensus/algorand/types"
)
func CalcBFTGroupMembersByVRF(VRFValue []byte, candidates []*cmn.BftGroupSwitchAdvice, groupSize int) ([]*cmn.BftGroupSwitchAdvice, error) {
nodes := make([]*cmn.BftGroupSwitchAdvice, 0)
nodeMap := make(map[uint32]bool)
var ctr int
// Candidate set should be larger than groupSize
candidNum := len(candidates)
if candidNum <= groupSize {
err := fmt.Errorf("err: candidNum is equal or smaller than groupSize.\n")
return candidates, err
}
// Calculate selection seed
vrf := VRFValue
// Loop end when adding enough validators
for i := 0; ctr < groupSize; i++ {
nodeID := CalcParticipant(vrf, candidNum, uint32(i))
if _, present := nodeMap[nodeID]; !present {
// Add new validator to list
nodes = append(nodes, candidates[nodeID])
nodeMap[nodeID] = true
ctr++
}
}
return nodes, nil
}
func CalcProposerByVRF(VRFValue []byte, valSet *types.ValidatorSet, height uint64, round int32) *types.Validator {
index := (height + uint64(round)) % uint64(len(VRFValue))
valIndex := CalcParticipant(VRFValue, valSet.Size(), uint32(index))
_, val := valSet.GetByIndex(int(valIndex))
return val
}
func CalcParticipant(vrf []byte, candidNum int, index uint32) uint32 {
// Zc add in 18.08.14 and modified in 18.09.13 to achieve enhanced fairness
// TODO: The fairness should be analysed.
// Use SHA512(VRF) instead of using VRF itself.
sha512VRF := sha512.Sum512(vrf)
hash := sha512VRF[:]
var v1, v2 uint32
bIdx := index / 8
bits1 := index % 8
bits2 := 8 + bits1
v1 = uint32(hash[bIdx]) >> bits1
if bIdx+1 < uint32(len(hash)) {
v2 = uint32(hash[bIdx+1])
} else {
v2 = uint32(hash[0])
}
v2 = v2 & ((1 << bits2) - 1)
v := (v2 << (8 - bits1)) + v1
v = v % uint32(candidNum)
return v
}
|
package msgHandler
import (
"fmt"
"github.com/HNB-ECO/HNB-Blockchain/HNB/consensus/algorand/state"
"github.com/HNB-ECO/HNB-Blockchain/HNB/consensus/algorand/types"
"github.com/HNB-ECO/HNB-Blockchain/HNB/ledger"
"time"
)
func (h *TDMMsgHandler) rollBack(begin, end uint64) error {
return ledger.RollBackLedger(begin)
}
func (h *TDMMsgHandler) reloadLastBlock() error {
height, err := ledger.GetBlockHeight()
if err != nil {
return err
}
blk, err := ledger.GetBlock(height - 1)
if err != nil {
return err
}
if blk == nil {
return fmt.Errorf("blk %d is nil", height-1)
}
ConsLog.Infof(LOGTABLE_CONS, "the last blk %d", blk.Header.BlockNum)
status, err := h.LoadLastCommitStateFromBlkAndMem(blk)
if err != nil {
return err
}
ConsLog.Infof(LOGTABLE_CONS, "the last blk status height %d", status.LastBlockNum)
hash, err := ledger.CalcBlockHash(blk)
if err != nil {
return err
}
status.PreviousHash = hash
err = h.reloadState(*status)
if err != nil {
return err
}
ConsLog.Infof(LOGTABLE_CONS, "after update status height %d", h.Height)
err = h.reconstructLastCommit(*status, blk)
if err != nil {
return err
}
return nil
}
func (h *TDMMsgHandler) reloadState(state state.State) error {
validators := state.Validators
lastPrecommits := (*types.VoteSet)(nil)
if h.CommitRound > -1 && h.Votes != nil {
if !h.Votes.Precommits(h.CommitRound).HasTwoThirdsMajority() {
}
lastPrecommits = h.Votes.Precommits(h.CommitRound)
}
height := state.LastBlockNum + 1
h.updateNextBlkNum(height)
h.updateRoundStep(0, types.RoundStepNewHeight)
if h.CommitTime.IsZero() {
h.StartTime = h.Commit(time.Now())
} else {
h.StartTime = h.Commit(h.CommitTime)
}
h.Validators = validators
h.Proposal = nil
h.ProposalBlock = nil
h.ProposalBlockParts = nil
h.Votes = types.NewBlkNumVoteSet(height, validators)
h.CommitRound = -1
h.LastCommit = lastPrecommits
h.LastValidators = state.LastValidators
h.LastCommitState = state
// Finally, broadcast RoundState
if !h.isSyncStatus.Get() {
h.newStep()
}
return nil
}
|
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
// vespa document command
// author: bratseth
package cmd
import (
"fmt"
"io"
"strings"
"time"
"github.com/fatih/color"
"github.com/spf13/cobra"
"github.com/vespa-engine/vespa/client/go/util"
"github.com/vespa-engine/vespa/client/go/vespa"
)
func addDocumentFlags(cmd *cobra.Command, printCurl *bool, timeoutSecs *int) {
cmd.PersistentFlags().BoolVarP(printCurl, "verbose", "v", false, "Print the equivalent curl command for the document operation")
cmd.PersistentFlags().IntVarP(timeoutSecs, "timeout", "T", 60, "Timeout for the document request in seconds")
}
func newDocumentCmd(cli *CLI) *cobra.Command {
var (
printCurl bool
timeoutSecs int
)
cmd := &cobra.Command{
Use: "document json-file",
Short: "Issue a document operation to Vespa",
Long: `Issue a document operation to Vespa.
The operation must be on the format documented in
https://docs.vespa.ai/en/reference/document-json-format.html#document-operations
When this returns successfully, the document is guaranteed to be visible in any
subsequent get or query operation.
To feed with high throughput, https://docs.vespa.ai/en/vespa-feed-client.html
should be used instead of this.`,
Example: `$ vespa document src/test/resources/A-Head-Full-of-Dreams.json`,
DisableAutoGenTag: true,
SilenceUsage: true,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
service, err := documentService(cli)
if err != nil {
return err
}
return printResult(cli, vespa.Send(args[0], service, operationOptions(cli.Stderr, printCurl, timeoutSecs)), false)
},
}
addDocumentFlags(cmd, &printCurl, &timeoutSecs)
return cmd
}
func newDocumentPutCmd(cli *CLI) *cobra.Command {
var (
printCurl bool
timeoutSecs int
)
cmd := &cobra.Command{
Use: "put [id] json-file",
Short: "Writes a document to Vespa",
Long: `Writes the document in the given file to Vespa.
If the document already exists, all its values will be replaced by this document.
If the document id is specified both as an argument and in the file the argument takes precedence.`,
Args: cobra.RangeArgs(1, 2),
Example: `$ vespa document put src/test/resources/A-Head-Full-of-Dreams.json
$ vespa document put id:mynamespace:music::a-head-full-of-dreams src/test/resources/A-Head-Full-of-Dreams.json`,
DisableAutoGenTag: true,
SilenceUsage: true,
RunE: func(cmd *cobra.Command, args []string) error {
service, err := documentService(cli)
if err != nil {
return err
}
if len(args) == 1 {
return printResult(cli, vespa.Put("", args[0], service, operationOptions(cli.Stderr, printCurl, timeoutSecs)), false)
} else {
return printResult(cli, vespa.Put(args[0], args[1], service, operationOptions(cli.Stderr, printCurl, timeoutSecs)), false)
}
},
}
addDocumentFlags(cmd, &printCurl, &timeoutSecs)
return cmd
}
func newDocumentUpdateCmd(cli *CLI) *cobra.Command {
var (
printCurl bool
timeoutSecs int
)
cmd := &cobra.Command{
Use: "update [id] json-file",
Short: "Modifies some fields of an existing document",
Long: `Updates the values of the fields given in a json file as specified in the file.
If the document id is specified both as an argument and in the file the argument takes precedence.`,
Args: cobra.RangeArgs(1, 2),
Example: `$ vespa document update src/test/resources/A-Head-Full-of-Dreams-Update.json
$ vespa document update id:mynamespace:music::a-head-full-of-dreams src/test/resources/A-Head-Full-of-Dreams.json`,
DisableAutoGenTag: true,
SilenceUsage: true,
RunE: func(cmd *cobra.Command, args []string) error {
service, err := documentService(cli)
if err != nil {
return err
}
if len(args) == 1 {
return printResult(cli, vespa.Update("", args[0], service, operationOptions(cli.Stderr, printCurl, timeoutSecs)), false)
} else {
return printResult(cli, vespa.Update(args[0], args[1], service, operationOptions(cli.Stderr, printCurl, timeoutSecs)), false)
}
},
}
addDocumentFlags(cmd, &printCurl, &timeoutSecs)
return cmd
}
func newDocumentRemoveCmd(cli *CLI) *cobra.Command {
var (
printCurl bool
timeoutSecs int
)
cmd := &cobra.Command{
Use: "remove id | json-file",
Short: "Removes a document from Vespa",
Long: `Removes the document specified either as a document id or given in the json file.
If the document id is specified both as an argument and in the file the argument takes precedence.`,
Args: cobra.ExactArgs(1),
Example: `$ vespa document remove src/test/resources/A-Head-Full-of-Dreams-Remove.json
$ vespa document remove id:mynamespace:music::a-head-full-of-dreams`,
DisableAutoGenTag: true,
SilenceUsage: true,
RunE: func(cmd *cobra.Command, args []string) error {
service, err := documentService(cli)
if err != nil {
return err
}
if strings.HasPrefix(args[0], "id:") {
return printResult(cli, vespa.RemoveId(args[0], service, operationOptions(cli.Stderr, printCurl, timeoutSecs)), false)
} else {
return printResult(cli, vespa.RemoveOperation(args[0], service, operationOptions(cli.Stderr, printCurl, timeoutSecs)), false)
}
},
}
addDocumentFlags(cmd, &printCurl, &timeoutSecs)
return cmd
}
func newDocumentGetCmd(cli *CLI) *cobra.Command {
var (
printCurl bool
timeoutSecs int
)
cmd := &cobra.Command{
Use: "get id",
Short: "Gets a document",
Args: cobra.ExactArgs(1),
DisableAutoGenTag: true,
SilenceUsage: true,
Example: `$ vespa document get id:mynamespace:music::a-head-full-of-dreams`,
RunE: func(cmd *cobra.Command, args []string) error {
service, err := documentService(cli)
if err != nil {
return err
}
return printResult(cli, vespa.Get(args[0], service, operationOptions(cli.Stderr, printCurl, timeoutSecs)), true)
},
}
addDocumentFlags(cmd, &printCurl, &timeoutSecs)
return cmd
}
func documentService(cli *CLI) (*vespa.Service, error) {
return cli.service(vespa.DocumentService, 0, "")
}
func operationOptions(stderr io.Writer, printCurl bool, timeoutSecs int) vespa.OperationOptions {
curlOutput := io.Discard
if printCurl {
curlOutput = stderr
}
return vespa.OperationOptions{
CurlOutput: curlOutput,
Timeout: time.Second * time.Duration(timeoutSecs),
}
}
func printResult(cli *CLI, result util.OperationResult, payloadOnlyOnSuccess bool) error {
out := cli.Stdout
if !result.Success {
out = cli.Stderr
}
if !result.Success {
fmt.Fprintln(out, color.RedString("Error:"), result.Message)
} else if !(payloadOnlyOnSuccess && result.Payload != "") {
fmt.Fprintln(out, color.GreenString("Success:"), result.Message)
}
if result.Detail != "" {
fmt.Fprintln(out, color.YellowString(result.Detail))
}
if result.Payload != "" {
if !payloadOnlyOnSuccess {
fmt.Fprintln(out)
}
fmt.Fprintln(out, result.Payload)
}
if !result.Success {
err := errHint(fmt.Errorf("document operation failed"))
err.quiet = true
return err
}
return nil
}
|
package main
import "fmt"
func main() {
m := map[string]string{
"nombre" : "Rodolfo",
"apellido" : "Guzmán",
}
fmt.Printf("El mapa es: %s \n", m)
fmt.Printf("\n El nombre es: %s \n", m["nombre"])
m["nickname"] = "Santo"
fmt.Printf("\n El tipo es: %T \n\n", m)
for k, v := range m{
fmt.Printf("%s \t %s \n", k, v)
}
}
|
package main
import (
"encoding/json"
"fmt"
"io/ioutil"
"log"
"sync"
"github.com/PuerkitoBio/goquery"
"github.com/parnurzeal/gorequest"
)
type Config struct {
WebhookURL string `json:"webhook_url"`
Channel string `json:"channel"`
BotName string `json:"bot_name"`
}
type Site struct {
URL string `json:"url"`
Element string `json:"element"`
Text string `json:"text"`
}
type Payload struct {
Username string `json:"username"`
Channel string `json:"channel"`
Text string `json:"text"`
}
var waitGroup sync.WaitGroup
var config Config
func main() {
raw := read("./config.json")
err := json.Unmarshal(raw, &config)
if err != nil {
log.Print(err)
}
raw = read("./urls.json")
var sites []Site
err = json.Unmarshal(raw, &sites)
if err != nil {
log.Print(err)
}
waitGroup.Add(len(sites))
for _, site := range sites {
go scanSite(site)
}
waitGroup.Wait()
}
func scanSite(site Site) {
doc, err := goquery.NewDocument(site.URL)
if err != nil {
log.Print(err)
waitGroup.Done()
return
}
doc.Find(site.Element).Each(func(i int, s *goquery.Selection) {
result := s.Text()
if len(result) == 0 {
fmt.Printf("No result for %s at %s", site.Element, site.URL)
return
}
fmt.Printf("Found: %s\n", result)
webhookURL := config.WebhookURL
text := fmt.Sprintf("Found: %s - <%s>", result, site.URL)
payload := Payload{Username: config.BotName, Channel: config.Channel, Text: text}
request := gorequest.New()
resp, _, errs := request.Post(webhookURL).
Send(payload).
End()
fmt.Println(resp.Status)
if errs != nil {
log.Print(errs)
waitGroup.Done()
return
}
})
defer waitGroup.Done()
}
func read(path string) []byte {
raw, err := ioutil.ReadFile(path)
if err != nil {
fmt.Println(err.Error())
}
return raw
}
|
package main
import (
"net/http"
"github.com/gin-gonic/gin"
"os"
"io"
"fmt"
"strings"
"errors"
_ "net/http/pprof"
)
//gin redis nginx gorm etcd kafka es kibane
type Person struct {
Name string `json:"name"`
Age string `json:"age"`
}
//参数校验
func Middleware(out io.Writer) gin.HandlerFunc {
return func(context *gin.Context) {
fmt.Println(context.Request.URL, "=====================")
if strings.Contains(context.Request.URL.Path, "888") {
context.JSON(http.StatusOK, "404 middleware")
fmt.Println("xxxxxx")
//权限验证不通过后 不需要执行以后的HandlerFunc
context.Abort()
return
}
context.Next()
}
}
func main() {
router := gin.New()
router.Use(gin.Logger())
router.Use(gin.Recovery())
router.Use(Middleware(gin.DefaultErrorWriter))
f, _ := os.Create("gin.log")
gin.DefaultWriter = io.MultiWriter(f)
v1 := router.Group("v1")
v1.GET("/user/:name/:age", func(context *gin.Context) {
name := context.Param("name")
age := context.Param("age")
p := Person{name, age}
context.Error(errors.New(name))
context.JSON(http.StatusOK, p)
})
v1.Handle("GET", "/test/:id", func(context *gin.Context) {
id := context.Param("id")
context.JSON(http.StatusOK, id)
})
router.Run(":8080")
}
|
//************************************************************************//
// rsc - RightScale API command line tool
//
// Generated with:
// $ praxisgen -metadata=ss/ssc/restful_doc -output=ss/ssc -pkg=ssc -target=1.0 -client=API
//
// The content of this file is auto-generated, DO NOT MODIFY
//************************************************************************//
package ssc
import (
"regexp"
"github.com/rightscale/rsc/metadata"
)
// Consists of a map of resource name to resource metadata.
var GenMetadata = map[string]*metadata.Resource{
"AccountPreference": &metadata.Resource{
Name: "AccountPreference",
Description: `The AccountPreference resource stores preferences that apply account-wide, such as UI customization settings and other settings.
The Self-Service portal uses some of these preferences in the portal itself, and this resource allows you to extend the settings
to use in your own integration.`,
Identifier: "application/vnd.rightscale.self_service.account_preference",
Attributes: []*metadata.Attribute{
&metadata.Attribute{
Name: "created_by",
FieldName: "CreatedBy",
FieldType: "*User",
},
&metadata.Attribute{
Name: "group_name",
FieldName: "GroupName",
FieldType: "string",
},
&metadata.Attribute{
Name: "href",
FieldName: "Href",
FieldType: "string",
},
&metadata.Attribute{
Name: "kind",
FieldName: "Kind",
FieldType: "string",
},
&metadata.Attribute{
Name: "name",
FieldName: "Name",
FieldType: "string",
},
&metadata.Attribute{
Name: "timestamps",
FieldName: "Timestamps",
FieldType: "*TimestampsStruct",
},
&metadata.Attribute{
Name: "value",
FieldName: "Value",
FieldType: "string",
},
},
Actions: []*metadata.Action{
&metadata.Action{
Name: "index",
Description: `List the AccountPreferences for this account.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "GET",
Pattern: "/api/catalog/accounts/%s/account_preferences",
Variables: []string{"account_id"},
Regexp: regexp.MustCompile(`/api/catalog/accounts/([^/]+)/account_preferences`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "filter[]",
Description: `Filter by group, so that only AccountPreferences belonging to that group are returned`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "filter[]",
Description: `Filter by group, so that only AccountPreferences belonging to that group are returned`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "show",
Description: `Get details for a particular AccountPreference`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "GET",
Pattern: "/api/catalog/accounts/%s/account_preferences/%s",
Variables: []string{"account_id", "name"},
Regexp: regexp.MustCompile(`/api/catalog/accounts/([^/]+)/account_preferences/([^/]+)`),
},
},
CommandFlags: []*metadata.ActionParam{},
APIParams: []*metadata.ActionParam{},
},
&metadata.Action{
Name: "create",
Description: `Create a new AccountPreference or update an existing AccountPreference with the new value`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "POST",
Pattern: "/api/catalog/accounts/%s/account_preferences",
Variables: []string{"account_id"},
Regexp: regexp.MustCompile(`/api/catalog/accounts/([^/]+)/account_preferences`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "group_name",
Description: `The group to place this AccountPreference in. Any string value is accepted - the group does not need to exist`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "name",
Description: `The name for the new AccountPreference or AccountPreference to update (note this is the key for this resource)`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "value",
Description: `The value to set for this AccountPreference`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "group_name",
Description: `The group to place this AccountPreference in. Any string value is accepted - the group does not need to exist`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "name",
Description: `The name for the new AccountPreference or AccountPreference to update (note this is the key for this resource)`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "value",
Description: `The value to set for this AccountPreference`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "delete",
Description: `Delete an AccountPreference`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "DELETE",
Pattern: "/api/catalog/accounts/%s/account_preferences/%s",
Variables: []string{"account_id", "name"},
Regexp: regexp.MustCompile(`/api/catalog/accounts/([^/]+)/account_preferences/([^/]+)`),
},
},
CommandFlags: []*metadata.ActionParam{},
APIParams: []*metadata.ActionParam{},
},
},
},
"Application": &metadata.Resource{
Name: "Application",
Description: `An Application is an element in the Catalog that can be launched by users. Applications are generally created by uploading CAT
files to the Designer and publishing them to the Catalog, though they can also be created via API calls to the Catalog directly without
going through Designer. If an Application was created from Designer through the publish action, it contains a link back to the Template
resource in Designer.
In the Self-Service portal, an Application is equivalent to an item in the Catalog. Most users have access to these Application resources
and can launch them to create Executions in the Manager application.`,
Identifier: "application/vnd.rightscale.self_service.application",
Attributes: []*metadata.Attribute{
&metadata.Attribute{
Name: "compilation_href",
FieldName: "CompilationHref",
FieldType: "string",
},
&metadata.Attribute{
Name: "compiled_cat",
FieldName: "CompiledCat",
FieldType: "string",
},
&metadata.Attribute{
Name: "compiler_ver",
FieldName: "CompilerVer",
FieldType: "string",
},
&metadata.Attribute{
Name: "created_by",
FieldName: "CreatedBy",
FieldType: "*User",
},
&metadata.Attribute{
Name: "href",
FieldName: "Href",
FieldType: "string",
},
&metadata.Attribute{
Name: "id",
FieldName: "Id",
FieldType: "string",
},
&metadata.Attribute{
Name: "kind",
FieldName: "Kind",
FieldType: "string",
},
&metadata.Attribute{
Name: "long_description",
FieldName: "LongDescription",
FieldType: "string",
},
&metadata.Attribute{
Name: "name",
FieldName: "Name",
FieldType: "string",
},
&metadata.Attribute{
Name: "parameters",
FieldName: "Parameters",
FieldType: "[]*Parameter",
},
&metadata.Attribute{
Name: "required_parameters",
FieldName: "RequiredParameters",
FieldType: "[]string",
},
&metadata.Attribute{
Name: "schedule_required",
FieldName: "ScheduleRequired",
FieldType: "bool",
},
&metadata.Attribute{
Name: "schedules",
FieldName: "Schedules",
FieldType: "[]*Schedule",
},
&metadata.Attribute{
Name: "short_description",
FieldName: "ShortDescription",
FieldType: "string",
},
&metadata.Attribute{
Name: "template_info",
FieldName: "TemplateInfo",
FieldType: "*TemplateInfo",
},
&metadata.Attribute{
Name: "timestamps",
FieldName: "Timestamps",
FieldType: "*TimestampsStruct",
},
},
Actions: []*metadata.Action{
&metadata.Action{
Name: "index",
Description: `List the Applications available in the specified Catalog.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "GET",
Pattern: "/api/catalog/catalogs/%s/applications",
Variables: []string{"catalog_id"},
Regexp: regexp.MustCompile(`/api/catalog/catalogs/([^/]+)/applications`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "ids[]",
Description: `An optional list of Application IDs to retrieve. If not specified, all are returned.`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "ids[]",
Description: `An optional list of Application IDs to retrieve. If not specified, all are returned.`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "show",
Description: `Show detailed information about a given Application.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "GET",
Pattern: "/api/catalog/catalogs/%s/applications/%s",
Variables: []string{"catalog_id", "id"},
Regexp: regexp.MustCompile(`/api/catalog/catalogs/([^/]+)/applications/([^/]+)`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "view",
Description: `Optional view to return`,
Type: "string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"default", "expanded"},
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "view",
Description: `Optional view to return`,
Type: "string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"default", "expanded"},
},
},
},
&metadata.Action{
Name: "create",
Description: `Create a new Application in the Catalog.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "POST",
Pattern: "/api/catalog/catalogs/%s/applications",
Variables: []string{"catalog_id"},
Regexp: regexp.MustCompile(`/api/catalog/catalogs/([^/]+)/applications`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "compiled_cat[cat_parser_gem_version]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[compiler_ver]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[conditions]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[definitions]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[dependency_hashes][]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[imports]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[long_description]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[mappings]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[name]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[dependency_hashes][]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[operations]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[outputs]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[package]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[parameters]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[permissions]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[dependency_hashes][]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[resources]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[rs_ca_ver]",
Description: ``,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[short_description]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[source]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "long_description",
Description: `Long description of application`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "name",
Description: `Name of application`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedule_required",
Description: `Whether the CloudApp requires a schedule to be provided at launch time. If set to false, allows user to pick from '24/7' schedule when launching in the UI`,
Type: "bool",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][created_from]",
Description: `optional HREF of the Schedule resource used to create this schedule`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][description]",
Description: `An optional description that will help users understand the purpose of the Schedule`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][name]",
Description: `The name of the Schedule`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][hour]",
Description: `The hour of day from 0 to 23.`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][minute]",
Description: `The minute from 0 to 59.`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][rule]",
Description: `A RRULE string describing the recurrence rule.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][hour]",
Description: `The hour of day from 0 to 23.`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][minute]",
Description: `The minute from 0 to 59.`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][rule]",
Description: `A RRULE string describing the recurrence rule.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "short_description",
Description: `Short description of application`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "template_href",
Description: `If created from a Template, the template href can be provided to maintain the relationship between the resources.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "compiled_cat",
Description: `The compiled source of the CAT file. This can be obtained by calling Template.compile or Template.show in the Designer application.`,
Type: "*CompiledCAT",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "long_description",
Description: `Long description of application`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "name",
Description: `Name of application`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedule_required",
Description: `Whether the CloudApp requires a schedule to be provided at launch time. If set to false, allows user to pick from '24/7' schedule when launching in the UI`,
Type: "bool",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules",
Description: `Schedules available to users when launching the application`,
Type: "[]*Schedule",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "short_description",
Description: `Short description of application`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "template_href",
Description: `If created from a Template, the template href can be provided to maintain the relationship between the resources.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "update",
Description: `Update the content of an existing Application.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "PUT",
Pattern: "/api/catalog/catalogs/%s/applications/%s",
Variables: []string{"catalog_id", "id"},
Regexp: regexp.MustCompile(`/api/catalog/catalogs/([^/]+)/applications/([^/]+)`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "compiled_cat[cat_parser_gem_version]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[compiler_ver]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[conditions]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[definitions]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[dependency_hashes][]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[imports]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[long_description]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[mappings]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[name]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[dependency_hashes][]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[operations]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[outputs]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[package]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[parameters]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[permissions]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[dependency_hashes][]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[resources]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[rs_ca_ver]",
Description: ``,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[short_description]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[source]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "long_description",
Description: `Long description of application`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "name",
Description: `Name of application`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedule_required",
Description: `Whether the CloudApp requires a schedule to be provided at launch time. If set to false, allows user to pick from '24/7' schedule when launching in the UI`,
Type: "bool",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][created_from]",
Description: `optional HREF of the Schedule resource used to create this schedule`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][description]",
Description: `An optional description that will help users understand the purpose of the Schedule`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][name]",
Description: `The name of the Schedule`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][hour]",
Description: `The hour of day from 0 to 23.`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][minute]",
Description: `The minute from 0 to 59.`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][rule]",
Description: `A RRULE string describing the recurrence rule.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][hour]",
Description: `The hour of day from 0 to 23.`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][minute]",
Description: `The minute from 0 to 59.`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][rule]",
Description: `A RRULE string describing the recurrence rule.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "short_description",
Description: `Short description of application`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "template_href",
Description: `A template href can be provided to maintain the relationship between the resources.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "compiled_cat",
Description: `The compiled source of the CAT file. This can be obtained by calling Template.compile or Template.show in the Designer application.`,
Type: "*CompiledCAT",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "long_description",
Description: `Long description of application`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "name",
Description: `Name of application`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedule_required",
Description: `Whether the CloudApp requires a schedule to be provided at launch time. If set to false, allows user to pick from '24/7' schedule when launching in the UI`,
Type: "bool",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules",
Description: `Schedules available to users when launching the application`,
Type: "[]*Schedule",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "short_description",
Description: `Short description of application`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "template_href",
Description: `A template href can be provided to maintain the relationship between the resources.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "multi_update",
Description: `Update the content of multiple Applications.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "PUT",
Pattern: "/api/catalog/catalogs/%s/applications",
Variables: []string{"catalog_id"},
Regexp: regexp.MustCompile(`/api/catalog/catalogs/([^/]+)/applications`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "compiled_cat[cat_parser_gem_version]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[compiler_ver]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[conditions]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[definitions]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[dependency_hashes][]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[imports]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[long_description]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[mappings]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[name]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[dependency_hashes][]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[operations]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[outputs]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[package]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[parameters]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[permissions]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[dependency_hashes][]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[resources]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[rs_ca_ver]",
Description: ``,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[short_description]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[source]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "id",
Description: `The Application ID to update`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "long_description",
Description: `Long description of application`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "name",
Description: `Name of application`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedule_required",
Description: `Whether the CloudApp requires a schedule to be provided at launch time. If set to false, allows user to pick from '24/7' schedule when launching in the UI`,
Type: "bool",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][created_from]",
Description: `optional HREF of the Schedule resource used to create this schedule`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][description]",
Description: `An optional description that will help users understand the purpose of the Schedule`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][name]",
Description: `The name of the Schedule`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][hour]",
Description: `The hour of day from 0 to 23.`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][minute]",
Description: `The minute from 0 to 59.`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][rule]",
Description: `A RRULE string describing the recurrence rule.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][hour]",
Description: `The hour of day from 0 to 23.`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][minute]",
Description: `The minute from 0 to 59.`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][rule]",
Description: `A RRULE string describing the recurrence rule.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "short_description",
Description: `Short description of application`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "template_href",
Description: `A template href can be provided to maintain the relationship between the resources.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "compiled_cat",
Description: `The compiled source of the CAT file. This can be obtained by calling Template.compile or Template.show in the Designer application.`,
Type: "*CompiledCAT",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "id",
Description: `The Application ID to update`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "long_description",
Description: `Long description of application`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "name",
Description: `Name of application`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedule_required",
Description: `Whether the CloudApp requires a schedule to be provided at launch time. If set to false, allows user to pick from '24/7' schedule when launching in the UI`,
Type: "bool",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules",
Description: `Schedules available to users when launching the application`,
Type: "[]*Schedule",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "short_description",
Description: `Short description of application`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "template_href",
Description: `A template href can be provided to maintain the relationship between the resources.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "delete",
Description: `Delete an Application from the Catalog`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "DELETE",
Pattern: "/api/catalog/catalogs/%s/applications/%s",
Variables: []string{"catalog_id", "id"},
Regexp: regexp.MustCompile(`/api/catalog/catalogs/([^/]+)/applications/([^/]+)`),
},
},
CommandFlags: []*metadata.ActionParam{},
APIParams: []*metadata.ActionParam{},
},
&metadata.Action{
Name: "multi_delete",
Description: `Delete multiple Applications from the Catalog`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "DELETE",
Pattern: "/api/catalog/catalogs/%s/applications",
Variables: []string{"catalog_id"},
Regexp: regexp.MustCompile(`/api/catalog/catalogs/([^/]+)/applications`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "ids[]",
Description: `The Application IDs to delete`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: true,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "ids[]",
Description: `The Application IDs to delete`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: true,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "download",
Description: `Download the underlying CAT source of an Application.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "GET",
Pattern: "/api/catalog/catalogs/%s/applications/%s/download",
Variables: []string{"catalog_id", "id"},
Regexp: regexp.MustCompile(`/api/catalog/catalogs/([^/]+)/applications/([^/]+)/download`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "api_version",
Description: `The API version (only valid value is currently "1.0")`,
Type: "string",
Location: metadata.QueryParam,
Mandatory: true,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "api_version",
Description: `The API version (only valid value is currently "1.0")`,
Type: "string",
Location: metadata.QueryParam,
Mandatory: true,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "launch",
Description: `Launches an Application by creating an Execution with ScheduledActions as needed to match the optional Schedule provided.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "POST",
Pattern: "/api/catalog/catalogs/%s/applications/%s/actions/launch",
Variables: []string{"catalog_id", "id"},
Regexp: regexp.MustCompile(`/api/catalog/catalogs/([^/]+)/applications/([^/]+)/actions/launch`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "defer_launch",
Description: `Whether or not to defer launching the execution. Setting this value to true will keep the execution in not_started state until it is explicitly launched or the first scheduled start operation occurs.`,
Type: "bool",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "description",
Description: `The description for the execution. The description of the Application will be used if none is provided.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "end_date",
Description: `When the CloudApp should be automatically terminated.`,
Type: "*time.Time",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "name",
Description: `The name for the Execution. The Application name will be used if none is provided. This will be used as the name of the deployment (appended with a unique ID).`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "options[][name]",
Description: `Name of configuration option`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "options[][type]",
Description: `Type of configuration option.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"string", "number", "list"},
},
&metadata.ActionParam{
Name: "options[][value]",
Description: `Configuration option value, a string, integer or array of strings depending on type`,
Type: "interface{}",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedule_name",
Description: `Name of the Schedule to use when launching. It must match one of the schedules attached to the Application`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "defer_launch",
Description: `Whether or not to defer launching the execution. Setting this value to true will keep the execution in not_started state until it is explicitly launched or the first scheduled start operation occurs.`,
Type: "bool",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "description",
Description: `The description for the execution. The description of the Application will be used if none is provided.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "end_date",
Description: `When the CloudApp should be automatically terminated.`,
Type: "*time.Time",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "name",
Description: `The name for the Execution. The Application name will be used if none is provided. This will be used as the name of the deployment (appended with a unique ID).`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "options",
Description: `The configuration options of the Execution. These are the values provided for the CloudApp parameters.`,
Type: "[]*ConfigurationOption",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedule_name",
Description: `Name of the Schedule to use when launching. It must match one of the schedules attached to the Application`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
},
},
},
"EndUser": &metadata.Resource{
Name: "EndUser",
Description: ``,
Identifier: "application/vnd.rightscale.self_service.end_user",
Attributes: []*metadata.Attribute{
&metadata.Attribute{
Name: "company",
FieldName: "Company",
FieldType: "string",
},
&metadata.Attribute{
Name: "first_name",
FieldName: "FirstName",
FieldType: "string",
},
&metadata.Attribute{
Name: "href",
FieldName: "Href",
FieldType: "string",
},
&metadata.Attribute{
Name: "id",
FieldName: "Id",
FieldType: "string",
},
&metadata.Attribute{
Name: "kind",
FieldName: "Kind",
FieldType: "string",
},
&metadata.Attribute{
Name: "last_name",
FieldName: "LastName",
FieldType: "string",
},
&metadata.Attribute{
Name: "phone",
FieldName: "Phone",
FieldType: "string",
},
&metadata.Attribute{
Name: "timezone_name",
FieldName: "TimezoneName",
FieldType: "string",
},
},
Actions: []*metadata.Action{
&metadata.Action{
Name: "index",
Description: `Show all Self-Service Only End Users that belong to this account.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "GET",
Pattern: "/api/catalog/accounts/%s/end_users",
Variables: []string{"account_id"},
Regexp: regexp.MustCompile(`/api/catalog/accounts/([^/]+)/end_users`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "filter[]",
Description: `Filter by user ID`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "filter[]",
Description: `Filter by user ID`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "create",
Description: `Grant a user Self-Service Only End User access to this account.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "POST",
Pattern: "/api/catalog/accounts/%s/end_users",
Variables: []string{"account_id"},
Regexp: regexp.MustCompile(`/api/catalog/accounts/([^/]+)/end_users`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "user_ids[]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "user_ids",
Description: `User IDs to add as SS End Users to this account`,
Type: "[]string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "delete",
Description: ``,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "DELETE",
Pattern: "/api/catalog/accounts/%s/end_users",
Variables: []string{"account_id"},
Regexp: regexp.MustCompile(`/api/catalog/accounts/([^/]+)/end_users`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "user_ids[]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "user_ids",
Description: `User IDs to remove as SS End Users to this account`,
Type: "[]string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "non_ss_users",
Description: ``,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "GET",
Pattern: "/api/catalog/accounts/%s/end_users/available",
Variables: []string{"account_id"},
Regexp: regexp.MustCompile(`/api/catalog/accounts/([^/]+)/end_users/available`),
},
},
CommandFlags: []*metadata.ActionParam{},
APIParams: []*metadata.ActionParam{},
},
},
},
"NotificationRule": &metadata.Resource{
Name: "NotificationRule",
Description: `A notification rule describes which notification should be created
when events occur in the system. Events may be generated when an
execution status changes or when an operation fails for example.
A rule has a source which can be a specific resource or a group of
resources (described via a link-like syntax), a target which
corresponds to a user (for now) and a minimum severity used to filter
out events with lower severities.`,
Identifier: "application/vnd.rightscale.self_service.notification_rule",
Attributes: []*metadata.Attribute{
&metadata.Attribute{
Name: "account_id",
FieldName: "AccountId",
FieldType: "string",
},
&metadata.Attribute{
Name: "category",
FieldName: "Category",
FieldType: "string",
},
&metadata.Attribute{
Name: "href",
FieldName: "Href",
FieldType: "string",
},
&metadata.Attribute{
Name: "id",
FieldName: "Id",
FieldType: "string",
},
&metadata.Attribute{
Name: "kind",
FieldName: "Kind",
FieldType: "string",
},
&metadata.Attribute{
Name: "min_severity",
FieldName: "MinSeverity",
FieldType: "string",
},
&metadata.Attribute{
Name: "priority",
FieldName: "Priority",
FieldType: "int",
},
&metadata.Attribute{
Name: "source",
FieldName: "Source",
FieldType: "string",
},
&metadata.Attribute{
Name: "target",
FieldName: "Target",
FieldType: "string",
},
&metadata.Attribute{
Name: "timestamps",
FieldName: "Timestamps",
FieldType: "*TimestampsStruct",
},
},
Actions: []*metadata.Action{
&metadata.Action{
Name: "index",
Description: `List all notification rules, potentially filtering by a collection of resources.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "GET",
Pattern: "/api/catalog/accounts/%s/notification_rules",
Variables: []string{"account_id"},
Regexp: regexp.MustCompile(`/api/catalog/accounts/([^/]+)/notification_rules`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "filter[]",
Description: `Filter by category.`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "source",
Description: `List all notification rules where the target is the current user.
The list can be further filtered by notification source: either by
source type or by specific source.
* To retrieve all notification rules that apply to all executions use:
GET nofication_rules?source==/api/projects/1234/executions
* To retrieve all notification rules that apply to a specific execution use:
GET notification_rules?source==/api/projects/1234/executions/5678`,
Type: "string",
Location: metadata.QueryParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "targets",
Description: `Comma separated list of target ids. Note, currently only "me" is allowed.`,
Type: "string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "filter[]",
Description: `Filter by category.`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "source",
Description: `List all notification rules where the target is the current user.
The list can be further filtered by notification source: either by
source type or by specific source.
* To retrieve all notification rules that apply to all executions use:
GET nofication_rules?source==/api/projects/1234/executions
* To retrieve all notification rules that apply to a specific execution use:
GET notification_rules?source==/api/projects/1234/executions/5678`,
Type: "string",
Location: metadata.QueryParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "targets",
Description: `Comma separated list of target ids. Note, currently only "me" is allowed.`,
Type: "string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "create",
Description: `Create one notification rule for a specific target and source.
The source must be unique in the scope of target and account.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "POST",
Pattern: "/api/catalog/accounts/%s/notification_rules",
Variables: []string{"account_id"},
Regexp: regexp.MustCompile(`/api/catalog/accounts/([^/]+)/notification_rules`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "filter[]",
Description: ``,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "category",
Description: `The type of notification for the resource.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"lifecycle", "scheduled"},
},
&metadata.ActionParam{
Name: "min_severity",
Description: `The lowest level of notifications for the target to receive.
Setting this to "error" will result in only receiving error notifications,
whereas setting it to "info" will result in receiving both info and error notifications,
and setting it to "none" will result in not receiving any notifications.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
ValidValues: []string{"error", "info", "none"},
},
&metadata.ActionParam{
Name: "source",
Description: `The resource (or resource collection) that would trigger the notification.
"/api/manager/projects/1234/executions" refers to ALL executions in the project,
"/api/manager/projects/1234/executions/5678" refers to just one execution, and
"/api/manager/projects/1234/executions?filter[]=created_by==me" refers to executions
created by the submitting user. The source must be unique in the scope of target and account.
Note that at this time, "me" is the only supported target filter.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "target",
Description: `The notification target (user) that the rule applies to.
Note that at this time, "me" is the only supported target.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "filter[]",
Description: ``,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "category",
Description: `The type of notification for the resource.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"lifecycle", "scheduled"},
},
&metadata.ActionParam{
Name: "min_severity",
Description: `The lowest level of notifications for the target to receive.
Setting this to "error" will result in only receiving error notifications,
whereas setting it to "info" will result in receiving both info and error notifications,
and setting it to "none" will result in not receiving any notifications.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
ValidValues: []string{"error", "info", "none"},
},
&metadata.ActionParam{
Name: "source",
Description: `The resource (or resource collection) that would trigger the notification.
"/api/manager/projects/1234/executions" refers to ALL executions in the project,
"/api/manager/projects/1234/executions/5678" refers to just one execution, and
"/api/manager/projects/1234/executions?filter[]=created_by==me" refers to executions
created by the submitting user. The source must be unique in the scope of target and account.
Note that at this time, "me" is the only supported target filter.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "target",
Description: `The notification target (user) that the rule applies to.
Note that at this time, "me" is the only supported target.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "patch",
Description: `Change min severity of existing rule`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "PATCH",
Pattern: "/api/catalog/accounts/%s/notification_rules/%s",
Variables: []string{"account_id", "id"},
Regexp: regexp.MustCompile(`/api/catalog/accounts/([^/]+)/notification_rules/([^/]+)`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "min_severity",
Description: `The lowest level of notifications for the target to receive.
Setting this to "error" will result in only receiving error notifications,
whereas setting it to "info" will result in receiving both info and error notifications,
and setting it to "none" will result in not receiving any notifications.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
ValidValues: []string{"error", "info", "none"},
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "min_severity",
Description: `The lowest level of notifications for the target to receive.
Setting this to "error" will result in only receiving error notifications,
whereas setting it to "info" will result in receiving both info and error notifications,
and setting it to "none" will result in not receiving any notifications.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
ValidValues: []string{"error", "info", "none"},
},
},
},
&metadata.Action{
Name: "show",
Description: `Show one notification rule.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "GET",
Pattern: "/api/catalog/accounts/%s/notification_rules/%s",
Variables: []string{"account_id", "id"},
Regexp: regexp.MustCompile(`/api/catalog/accounts/([^/]+)/notification_rules/([^/]+)`),
},
},
CommandFlags: []*metadata.ActionParam{},
APIParams: []*metadata.ActionParam{},
},
&metadata.Action{
Name: "delete",
Description: `Delete one notification rule.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "DELETE",
Pattern: "/api/catalog/accounts/%s/notification_rules/%s",
Variables: []string{"account_id", "id"},
Regexp: regexp.MustCompile(`/api/catalog/accounts/([^/]+)/notification_rules/([^/]+)`),
},
},
CommandFlags: []*metadata.ActionParam{},
APIParams: []*metadata.ActionParam{},
},
&metadata.Action{
Name: "multi_delete",
Description: `Delete one or more notification rules by id or source and target.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "DELETE",
Pattern: "/api/catalog/accounts/%s/notification_rules",
Variables: []string{"account_id"},
Regexp: regexp.MustCompile(`/api/catalog/accounts/([^/]+)/notification_rules`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "id",
Description: `Notification rule id`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "source",
Description: `The exact source of the rule to be deleted`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "target",
Description: `The notification target (user) that the rule applies to.
Note that at this time, "me" is the only supported target.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "id",
Description: `Notification rule id`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "source",
Description: `The exact source of the rule to be deleted`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "target",
Description: `The notification target (user) that the rule applies to.
Note that at this time, "me" is the only supported target.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
},
},
},
"UserPreference": &metadata.Resource{
Name: "UserPreference",
Description: `The UserPreference resource stores preferences on a per user basis, such as default notification preference.
The Self-Service portal uses these preferences in the portal.`,
Identifier: "application/vnd.rightscale.self_service.user_preference",
Attributes: []*metadata.Attribute{
&metadata.Attribute{
Name: "created_by",
FieldName: "CreatedBy",
FieldType: "*User",
},
&metadata.Attribute{
Name: "href",
FieldName: "Href",
FieldType: "string",
},
&metadata.Attribute{
Name: "id",
FieldName: "Id",
FieldType: "string",
},
&metadata.Attribute{
Name: "kind",
FieldName: "Kind",
FieldType: "string",
},
&metadata.Attribute{
Name: "timestamps",
FieldName: "Timestamps",
FieldType: "*TimestampsStruct",
},
&metadata.Attribute{
Name: "user_id",
FieldName: "UserId",
FieldType: "int",
},
&metadata.Attribute{
Name: "user_preference_info",
FieldName: "UserPreferenceInfo",
FieldType: "*UserPreferenceInfo",
},
&metadata.Attribute{
Name: "value",
FieldName: "Value",
FieldType: "string",
},
},
Actions: []*metadata.Action{
&metadata.Action{
Name: "index",
Description: `List the UserPreference for users in this account.
Only administrators and infrastructure users may request the preferences of other users.
Users who are not members of the admin role need to specify a filter with their ID in order to retrieve their preferences.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "GET",
Pattern: "/api/catalog/accounts/%s/user_preferences",
Variables: []string{"account_id"},
Regexp: regexp.MustCompile(`/api/catalog/accounts/([^/]+)/user_preferences`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "filter[]",
Description: `Filter by user, so that only UserPreference belonging to that user are returned. Use "me" as a shortcut for the current user ID.`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "view",
Description: `Optional view to return`,
Type: "string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"default", "expanded"},
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "filter[]",
Description: `Filter by user, so that only UserPreference belonging to that user are returned. Use "me" as a shortcut for the current user ID.`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "view",
Description: `Optional view to return`,
Type: "string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"default", "expanded"},
},
},
},
&metadata.Action{
Name: "show",
Description: `Get details for a particular UserPreference`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "GET",
Pattern: "/api/catalog/accounts/%s/user_preferences/%s",
Variables: []string{"account_id", "id"},
Regexp: regexp.MustCompile(`/api/catalog/accounts/([^/]+)/user_preferences/([^/]+)`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "view",
Description: `Optional view to return`,
Type: "string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"default", "expanded"},
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "view",
Description: `Optional view to return`,
Type: "string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"default", "expanded"},
},
},
},
&metadata.Action{
Name: "create",
Description: `Create a new UserPreference.
Multiple resources can be created at once with a multipart request.
Values are validated with the corresponding UserPreferenceInfo.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "POST",
Pattern: "/api/catalog/accounts/%s/user_preferences",
Variables: []string{"account_id"},
Regexp: regexp.MustCompile(`/api/catalog/accounts/([^/]+)/user_preferences`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "user_id",
Description: `Administrators can create preferences for other users by providing this value`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "user_preference_info_id",
Description: `The ID for the UserPreferenceInfo defining this UserPreference`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "value",
Description: `The value to set for this UserPreference`,
Type: "interface{}",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "user_id",
Description: `Administrators can create preferences for other users by providing this value`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "user_preference_info_id",
Description: `The ID for the UserPreferenceInfo defining this UserPreference`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "value",
Description: `The value to set for this UserPreference`,
Type: "interface{}",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "update",
Description: `Update the value of a UserPreference.
Multiple values may be updated using a multipart request.
Values are validated with the corresponding UserPreferenceInfo.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "PATCH",
Pattern: "/api/catalog/accounts/%s/user_preferences/%s",
Variables: []string{"account_id", "id"},
Regexp: regexp.MustCompile(`/api/catalog/accounts/([^/]+)/user_preferences/([^/]+)`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "id",
Description: `In a multipart request, the ID of the UserPreference to update`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "value",
Description: `The value to set for this UserPreference`,
Type: "interface{}",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "id",
Description: `In a multipart request, the ID of the UserPreference to update`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "value",
Description: `The value to set for this UserPreference`,
Type: "interface{}",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "delete",
Description: `Delete a UserPreference`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "DELETE",
Pattern: "/api/catalog/accounts/%s/user_preferences/%s",
Variables: []string{"account_id", "id"},
Regexp: regexp.MustCompile(`/api/catalog/accounts/([^/]+)/user_preferences/([^/]+)`),
},
},
CommandFlags: []*metadata.ActionParam{},
APIParams: []*metadata.ActionParam{},
},
},
},
"UserPreferenceInfo": &metadata.Resource{
Name: "UserPreferenceInfo",
Description: `The UserPreferenceInfo resource defines the available user preferences supported by the system.
It is also used to validate values saved in UserPreference.`,
Identifier: "application/vnd.rightscale.self_service.user_preference_info",
Attributes: []*metadata.Attribute{
&metadata.Attribute{
Name: "category",
FieldName: "Category",
FieldType: "string",
},
&metadata.Attribute{
Name: "default_value",
FieldName: "DefaultValue",
FieldType: "string",
},
&metadata.Attribute{
Name: "display_name",
FieldName: "DisplayName",
FieldType: "string",
},
&metadata.Attribute{
Name: "help_text",
FieldName: "HelpText",
FieldType: "string",
},
&metadata.Attribute{
Name: "href",
FieldName: "Href",
FieldType: "string",
},
&metadata.Attribute{
Name: "id",
FieldName: "Id",
FieldType: "string",
},
&metadata.Attribute{
Name: "kind",
FieldName: "Kind",
FieldType: "string",
},
&metadata.Attribute{
Name: "name",
FieldName: "Name",
FieldType: "string",
},
&metadata.Attribute{
Name: "value_constraint",
FieldName: "ValueConstraint",
FieldType: "[]string",
},
&metadata.Attribute{
Name: "value_range",
FieldName: "ValueRange",
FieldType: "*ValueRangeStruct",
},
&metadata.Attribute{
Name: "value_type",
FieldName: "ValueType",
FieldType: "string",
},
},
Actions: []*metadata.Action{
&metadata.Action{
Name: "index",
Description: `List the UserPreferenceInfo.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "GET",
Pattern: "/api/catalog/accounts/%s/user_preference_infos",
Variables: []string{"account_id"},
Regexp: regexp.MustCompile(`/api/catalog/accounts/([^/]+)/user_preference_infos`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "filter[]",
Description: `Filter by category and/or name`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "filter[]",
Description: `Filter by category and/or name`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "show",
Description: `Get details for a particular UserPreferenceInfo`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "GET",
Pattern: "/api/catalog/accounts/%s/user_preference_infos/%s",
Variables: []string{"account_id", "id"},
Regexp: regexp.MustCompile(`/api/catalog/accounts/([^/]+)/user_preference_infos/([^/]+)`),
},
},
CommandFlags: []*metadata.ActionParam{},
APIParams: []*metadata.ActionParam{},
},
},
},
}
|
// Copyright 2023 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package extactorhandler
import (
"context"
"fmt"
"io"
"net/http"
"os"
"path/filepath"
"strconv"
"strings"
"time"
"github.com/pingcap/errors"
"github.com/pingcap/failpoint"
"github.com/pingcap/tidb/domain"
"github.com/pingcap/tidb/server/handler"
"github.com/pingcap/tidb/types"
"github.com/pingcap/tidb/util/logutil"
"go.uber.org/zap"
)
const (
extractPlanTaskType = "plan"
)
// ExtractTaskServeHandler is the http serve handler for extract task handler
type ExtractTaskServeHandler struct {
ExtractHandler *domain.ExtractHandle
}
// NewExtractTaskServeHandler creates a new extract task serve handler
func NewExtractTaskServeHandler(extractHandler *domain.ExtractHandle) *ExtractTaskServeHandler {
return &ExtractTaskServeHandler{ExtractHandler: extractHandler}
}
// ServeHTTP serves http
func (eh ExtractTaskServeHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) {
task, isDump, err := buildExtractTask(req)
if err != nil {
logutil.BgLogger().Error("build extract task failed", zap.Error(err))
handler.WriteError(w, err)
return
}
failpoint.Inject("extractTaskServeHandler", func(val failpoint.Value) {
if val.(bool) {
w.WriteHeader(http.StatusOK)
_, err = w.Write([]byte("mock"))
if err != nil {
handler.WriteError(w, err)
}
failpoint.Return()
}
})
name, err := eh.ExtractHandler.ExtractTask(context.Background(), task)
if err != nil {
logutil.BgLogger().Error("extract task failed", zap.Error(err))
handler.WriteError(w, err)
return
}
w.WriteHeader(http.StatusOK)
if !isDump {
_, err = w.Write([]byte(name))
if err != nil {
logutil.BgLogger().Error("extract handler failed", zap.Error(err))
}
return
}
content, err := loadExtractResponse(name)
if err != nil {
logutil.BgLogger().Error("load extract task failed", zap.Error(err))
handler.WriteError(w, err)
return
}
_, err = w.Write(content)
if err != nil {
handler.WriteError(w, err)
return
}
w.Header().Set("Content-Type", "application/zip")
w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s.zip\"", name))
}
func loadExtractResponse(name string) ([]byte, error) {
path := filepath.Join(domain.GetExtractTaskDirName(), name)
//nolint: gosec
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer file.Close()
content, err := io.ReadAll(file)
if err != nil {
return nil, err
}
return content, nil
}
func buildExtractTask(req *http.Request) (*domain.ExtractTask, bool, error) {
extractTaskType := req.URL.Query().Get(handler.Type)
if strings.ToLower(extractTaskType) == extractPlanTaskType {
return buildExtractPlanTask(req)
}
logutil.BgLogger().Error("unknown extract task type")
return nil, false, errors.New("unknown extract task type")
}
func buildExtractPlanTask(req *http.Request) (*domain.ExtractTask, bool, error) {
beginStr := req.URL.Query().Get(handler.Begin)
endStr := req.URL.Query().Get(handler.End)
var begin time.Time
var err error
if len(beginStr) < 1 {
begin = time.Now().Add(30 * time.Minute)
} else {
begin, err = time.Parse(types.TimeFormat, beginStr)
if err != nil {
logutil.BgLogger().Error("extract task begin time failed", zap.Error(err), zap.String("begin", beginStr))
return nil, false, err
}
}
var end time.Time
if len(endStr) < 1 {
end = time.Now()
} else {
end, err = time.Parse(types.TimeFormat, endStr)
if err != nil {
logutil.BgLogger().Error("extract task end time failed", zap.Error(err), zap.String("end", endStr))
return nil, false, err
}
}
isDump := extractBoolParam(handler.IsDump, false, req)
return &domain.ExtractTask{
ExtractType: domain.ExtractPlanType,
IsBackgroundJob: false,
Begin: begin,
End: end,
SkipStats: extractBoolParam(handler.IsSkipStats, false, req),
UseHistoryView: extractBoolParam(handler.IsHistoryView, true, req),
}, isDump, nil
}
func extractBoolParam(param string, defaultValue bool, req *http.Request) bool {
str := req.URL.Query().Get(param)
if len(str) < 1 {
return defaultValue
}
v, err := strconv.ParseBool(str)
if err != nil {
return defaultValue
}
return v
}
|
package model
type Language string
const (
Javascript Language = "javascript"
Java Language = "java"
Go Language = "golang"
Cpp Language = "cpp"
)
|
package main
import (
"database/sql"
"flag"
"fmt"
"log"
"time"
_ "github.com/go-sql-driver/mysql"
)
func main() {
var (
idle = 5
open = 5
dsn = "bench:cygames0@tcp(10.9.10.223:3307)/hayao10_hayao"
)
flag.IntVar(&idle, "idle", idle, "SetMaxIdleConns")
flag.IntVar(&open, "open", open, "SetMaxOpenConns")
flag.StringVar(&dsn, "dsn", dsn, "dsn")
flag.Parse()
cnn, err := sql.Open("mysql", dsn)
if err != nil {
log.Fatal(err)
}
defer cnn.Close()
log.Printf("open:%v", open)
log.Printf("idle:%v", idle)
cnn.SetMaxIdleConns(idle)
cnn.SetMaxOpenConns(open)
for i := 0; i < 100; i++ {
for j := 0; j < 1000; j++ {
go func() {
rows, err := cnn.Query("SELECT rand()")
if err != nil {
log.Fatal(err)
}
defer rows.Close()
for rows.Next() {
var id interface{}
if err := rows.Scan(&id); err != nil {
log.Fatal(err)
}
fmt.Print(".")
}
if err := rows.Err(); err != nil {
log.Fatal(err)
}
}()
}
time.Sleep(1 * time.Second)
}
}
|
package main
/*
* @lc app=leetcode id=450 lang=golang
*
* [450] Delete Node in a BST
*/
/**
* Definition for a binary tree node.
* type TreeNode struct {
* Val int
* Left *TreeNode
* Right *TreeNode
* }
*/
/*
感觉这题的解法有点取巧的意思,元素一旦重复,就不适用了
*/
func deleteNod450(root *TreeNode, key int) *TreeNode {
for root == nil {
return nil
}
if root.Val < key {
root.Right = deleteNod450(root.Right, key)
return root
} else if root.Val > key {
root.Left = deleteNod450(root.Left, key)
return root
} else { // root.Val == key
if root.Left == nil {
return root.Right
} else if root.Right == nil {
return root.Left
} else { // root.Left != nil && root.Right != nil
rightMin := findMin(root.Right)
root.Val = rightMin.Val
root.Right = deleteNod450(root.Right, root.Val)
return root
}
}
}
// 前置条件:root != nil
func findMin(root *TreeNode) *TreeNode {
for root.Left != nil {
root = root.Left
}
return root
}
|
package main
import (
"encoding/json"
"net/http"
"os"
"path/filepath"
"strings"
"time"
"github.com/go-chi/chi"
"github.com/go-chi/chi/middleware"
"github.com/sirupsen/logrus"
)
func startServer() {
r := chi.NewRouter()
r.Use(middleware.RequestID)
r.Use(middleware.RealIP)
r.Use(middleware.Logger)
r.Use(middleware.Recoverer)
r.Use(middleware.Timeout(60 * time.Second))
workDir, _ := os.Getwd()
fileServer(r, "/", http.Dir(filepath.Join(workDir, "web")))
r.Get("/ws", func(w http.ResponseWriter, r *http.Request) {
handleWebsocket(w, r)
})
r.Get("/count", func(w http.ResponseWriter, r *http.Request) {
total, day, hour, minute := redisClient.getCounts()
count := &CountPayload{
Total: total,
Day: day,
Hour: hour,
Minute: minute,
}
writeJSON(w, http.StatusOK, count)
})
http.ListenAndServe(":4337", r)
}
func fileServer(r chi.Router, path string, root http.FileSystem) {
if strings.ContainsAny(path, "{}*") {
panic("FileServer does not permit URL parameters.")
}
fs := http.StripPrefix(path, http.FileServer(root))
if path != "/" && path[len(path)-1] != '/' {
r.Get(path, http.RedirectHandler(path+"/", 301).ServeHTTP)
path += "/"
}
path += "*"
r.Get(path, http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
fs.ServeHTTP(w, r)
}))
}
func writeJSON(w http.ResponseWriter, code int, data interface{}) {
bs, err := json.Marshal(data)
if err != nil {
logrus.WithError(err).Error("Failed to encode json")
w.WriteHeader(500)
return
}
w.Header().Set("Content-Type", "application/json")
write(w, code, bs)
}
func writeString(w http.ResponseWriter, code int, s string) {
write(w, code, []byte(s))
}
func write(w http.ResponseWriter, code int, bs []byte) {
w.WriteHeader(code)
_, err := w.Write(bs)
if err != nil {
log.Debug("Failed to write response")
log.Debug(err)
}
}
|
package cmd
import (
"fmt"
"strconv"
"github.com/spf13/cobra"
)
func init() {
RootCmd.AddCommand(divideCmd)
}
var divideCmd = &cobra.Command{
Use: "divide",
Short: "Calculator of division.",
Long: "Calculator to perform the division.",
Run: func(cmd *cobra.Command, args []string) {
var n1 int
var n2 int
n1, _ = strconv.Atoi(args[0])
n2, _ = strconv.Atoi(args[1])
fmt.Println(n1 / n2)
},
}
|
package main
import "fmt"
import "regexp"
import "time"
func main() {
t := time.Now()
for i := 0; i < 100; i++ {
res, _ := regexp.MatchString("a([a-z]+)o", "afdjo")
fmt.Print(res)
}
fmt.Println("cost without regexp compiling", time.Now().Sub(t).Nanoseconds())
t = time.Now()
reg := regexp.MustCompile("a([a-z]+)o")
for i := 0; i < 100; i++ {
rest := reg.MatchString("afdjo")
fmt.Print(rest)
}
fmt.Println("cost with regexp compiling", time.Now().Sub(t).Nanoseconds())
//cost without compiling is 10 times than that with compiling
}
|
package main
import (
"fmt"
"os"
"github.com/aymanbagabas/hknui/ui"
tea "github.com/charmbracelet/bubbletea"
"github.com/spf13/cobra"
)
var (
rootCmd = &cobra.Command{
Use: "hknui",
Short: "Hacker News TUI",
RunE: func(cmd *cobra.Command, args []string) error {
f, err := tea.LogToFile("debug.log", "debug")
if err != nil {
fmt.Println("fatal:", err)
os.Exit(1)
}
defer f.Close()
m := ui.NewModel()
p := tea.NewProgram(m)
p.EnterAltScreen()
defer p.ExitAltScreen()
// p.EnableMouseCellMotion()
// defer p.DisableMouseCellMotion()
if err := p.Start(); err != nil {
fmt.Printf("Alas, there's been an error: %v", err)
os.Exit(1)
}
return cmd.Help()
},
}
)
func main() {
if err := rootCmd.Execute(); err != nil {
fmt.Println(err)
os.Exit(1)
}
}
|
package masterkey
import (
"log"
"git.roosoft.com/bitcoin/hd-wallets/lib"
"github.com/tyler-smith/go-bip32"
)
var km *lib.KeyManager
var masterkey *bip32.Key
func init() {
var err error
km, err = lib.NewKeyManagerFromMnemonic(lib.Mnemonic, lib.Passphrase)
if err != nil {
log.Fatal(err)
}
masterkey, err = km.GetMasterKey()
if err != nil {
log.Fatal(err)
}
}
// GetPrivateKey returns the private key
func GetPrivateKey() []byte {
return masterkey.Key
}
// GetPublicKey returns the public key
func GetPublicKey() []byte {
return masterkey.PublicKey().Key
}
// GetChainCode returns the extended private key's chain code
func GetChainCode() []byte {
return masterkey.ChainCode
}
|
// Copyright 2015 Simon HEGE. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
/*
Package geodesic provides direct and inverse geodesic calculations.
It is a Go wrapper around the GeographicLib geodesic C library by Charles Karney.
The original library (MIT licensed) can be found at http://geographiclib.sourceforge.net/
*/
package geodesic
// #cgo LDFLAGS: -lm
// #include "geodesic.h"
import "C"
import (
"github.com/xeonx/geographic"
)
//InvertAzimuth invert an azimuth in degree (adds 180° and convert to [0,360[ interval).
func InvertAzimuth(azDeg float64) float64 {
return azDeg + 180%360
}
//Geodesic contains information about the ellipsoid
type Geodesic struct {
g C.struct_geod_geodesic
}
//NewGeodesic initialize a Geodesic with the equatorial radius (a) and the flattening (f)
func NewGeodesic(a float64, f float64) Geodesic {
var g Geodesic
C.geod_init(&g.g, C.double(a), C.double(f))
return g
}
//WGS84 represents the WGS 84 ellipsoid
var WGS84 = NewGeodesic(6378137, 1/298.257223563)
//Direct computes the point pt2 obtained from pt1 at a distance s12 (in meters) in
//direction of az1 (in degrees, clockwise from north). The returned azimuth az2 is the
//azimuth of the geodesic at pt2 (in degrees).
func (g Geodesic) Direct(pt1 geographic.Point, az1 float64, s12 float64) (pt2 geographic.Point, az2 float64) {
var resLat, resLon, resAz C.double
C.geod_direct(
&g.g,
C.double(pt1.LatitudeDeg),
C.double(pt1.LongitudeDeg),
C.double(az1),
C.double(s12),
&resLat,
&resLon,
&resAz)
pt2.LatitudeDeg = float64(resLat)
pt2.LongitudeDeg = float64(resLon)
az2 = float64(resAz)
return
}
//Inverse computes the azimuths (in degrees, clockwise from north) and the distance (in meters) between two points.
//Azimuths are from one point to the other.
func (g *Geodesic) Inverse(pt1 geographic.Point, pt2 geographic.Point) (s12, az1, az2 float64) {
var resS12, resAz1, resAz2 C.double
C.geod_inverse(
&g.g,
C.double(pt1.LatitudeDeg),
C.double(pt1.LongitudeDeg),
C.double(pt2.LatitudeDeg),
C.double(pt2.LongitudeDeg),
&resS12,
&resAz1,
&resAz2)
s12 = float64(resS12)
az1 = float64(resAz1)
az2 = float64(resAz2)
return
}
//GeodesicLine contains information about a single geodesic line.
type GeodesicLine struct {
gl C.struct_geod_geodesicline
}
//NewGeodesicLine initialize a GeodesicLine.
func NewGeodesicLine(g Geodesic, origin geographic.Point, azDeg float64) GeodesicLine {
var l GeodesicLine
C.geod_lineinit(&l.gl, &g.g, C.double(origin.LatitudeDeg), C.double(origin.LongitudeDeg), C.double(azDeg), 0)
return l
}
//Position computes the point at s12 (meters) from the GeodesicLine origin. The distance can be negativ.
//It is faster than multiple calls to Direct.
//Azimuths is the azimuth of the geodesic line at the resulting point. For short positive distance it
//will be approximately equal to the GeodesicLine azimuth at origin.
func (l GeodesicLine) Position(s12 float64) (pt geographic.Point, azDeg float64) {
var resLat, resLon, resAzDeg C.double
C.geod_position(
&l.gl,
C.double(s12),
&resLat,
&resLon,
&resAzDeg)
pt.LatitudeDeg = float64(resLat)
pt.LongitudeDeg = float64(resLon)
azDeg = float64(resAzDeg)
return
}
|
package helpers
import (
//"fmt"
"log"
"gopkg.in/mgo.v2"
"gopkg.in/mgo.v2/bson"
"time"
)
const MONGOHOST string = "localhost"
const MONGODB string = "ghdaily"
const MONGOCOLLECTION string = "repos"
func InsertRepo(repo Repo) bool {
session, err := mgo.Dial(MONGOHOST)
if err != nil {
panic(err)
}
defer session.Close()
session.SetMode(mgo.Monotonic, true)
c := session.DB(MONGODB).C(MONGOCOLLECTION)
err = c.Insert(repo)
if err != nil {
log.Fatal(err)
return false
} else {
return true
}
}
func GetRepos(lang string, day time.Time) []Repo {
next_day := day.Add(24 * time.Hour)
session, err := mgo.Dial(MONGOHOST)
if err != nil {
panic(err)
}
defer session.Close()
session.SetMode(mgo.Monotonic, true)
c := session.DB(MONGODB).C(MONGOCOLLECTION)
var repos []Repo
c.Find(
bson.M{
"createdat": bson.M{
"$gt": day,
"$lt": next_day,
},
"language": lang,
}).All(&repos)
return repos
}
func Exists(title string) bool {
year, month, day := time.Now().Date()
today := time.Date(year, month, day, 0, 0, 0, 0, time.UTC )
session, err := mgo.Dial(MONGOHOST)
if err != nil {
panic(err)
}
defer session.Close()
session.SetMode(mgo.Monotonic, true)
c := session.DB(MONGODB).C(MONGOCOLLECTION)
var repos []Repo
c.Find(
bson.M{
"createdat": bson.M{
"$gt": today,
},
"name": title,
}).All(&repos)
return len(repos) > 0
}
|
package 数组
var morseCodes = []string{
".-", "-...", "-.-.", "-..", ".", "..-.", "--.", "....", "..", ".---",
"-.-", ".-..", "--", "-.", "---", ".--.", "--.-", ".-.", "...", "-",
"..-", "...-", ".--", "-..-", "-.--", "--..",
}
func uniqueMorseRepresentations(words []string) int {
hasMorseSequenceExisted := make(map[string]bool)
for _, word := range words {
hasMorseSequenceExisted[getMorseSequenceOfWord(word)] = true
}
return len(hasMorseSequenceExisted)
}
func getMorseSequenceOfWord(word string) string {
sequence := ""
for i := 0; i < len(word); i++ {
sequence += getMorseCodeOfLetter(word[i])
}
return sequence
}
func getMorseCodeOfLetter(letter byte) string {
return morseCodes[letter-'a']
}
|
package model
import (
"time"
"github.com/graphql-go/graphql"
)
// Team represents a F1 scuderia
type Team struct {
ID string `json:"id"`
Name string `json:"name"`
CreatedAt *time.Time `json:"createdAt"`
Seasons []TeamSeason `json:"seasons" bson:"seasons"`
}
type TeamSeason struct {
Year string `json:"year" bson:"year"`
Driver string `json:"driver" bson:"driver"`
}
var TeamType *graphql.Object
var TeamSeasonType *graphql.Object
func init() {
TeamType = graphql.NewObject(graphql.ObjectConfig{
Name: "Team",
Description: "A Formula 1 team",
Fields: graphql.Fields{
"id": &graphql.Field{
Name: "id",
Type: graphql.String,
Description: "team's id",
},
"name": &graphql.Field{
Name: "name",
Type: graphql.String,
Description: "team's name",
},
"createdAt": &graphql.Field{
Name: "createdAt",
Type: graphql.DateTime,
Description: "teams creation date",
},
},
})
TeamSeasonType = graphql.NewObject(graphql.ObjectConfig{
Name: "TeamSeason",
Description: "Dummy object to join the season type and the driver type inside the TeamType",
Fields: graphql.Fields{
"season": &graphql.Field{
Name: "season",
Type: SeasonType,
Description: "The season this driver has driven",
},
"driver": &graphql.Field{
Name: "driver",
Type: DriverType,
Description: "The driver this team had",
},
},
})
}
|
package dao
import (
"bytes"
"database/sql"
"fmt"
"strings"
"varconf-server/core/dao/common"
)
type ReleaseData struct {
AppId int64 `json:"appId" DB_COL:"app_id" DB_PK:"app_id" DB_TABLE:"release"`
ConfigList string `json:"configList" DB_COL:"config_list"`
ReleaseTime common.JsonTime `json:"releaseTime" DB_COL:"release_time"`
ReleaseIndex int `json:"releaseIndex" DB_COL:"release_index"`
}
type ReleaseDao struct {
common.Dao
}
func NewReleaseDao(db *sql.DB) *ReleaseDao {
releaseDao := ReleaseDao{common.Dao{DB: db}}
return &releaseDao
}
func (_self *ReleaseDao) QueryReleases(appIds []int64) []*ReleaseData {
values := make([]interface{}, 0)
sql := "SELECT * FROM `release` WHERE `app_id` in "
var ids bytes.Buffer
for _, appId := range appIds {
ids.WriteString(fmt.Sprintf("%d, ", appId))
}
sql = sql + "(" + strings.Trim(ids.String(), ", ") + ")"
releases := make([]*ReleaseData, 0)
success, err := _self.StructSelect(&releases, sql, values...)
if err != nil {
panic(err)
}
if success {
return releases
}
return nil
}
func (_self *ReleaseDao) QueryRelease(appId int64) *ReleaseData {
release := ReleaseData{}
success, err := _self.StructSelectByPK(&release, appId)
if err != nil {
panic(err)
}
if success {
return &release
}
return nil
}
func (_self *ReleaseDao) InsertRelease(data *ReleaseData) int64 {
rowCnt, err := _self.StructInsert(data, true)
if err != nil {
panic(err)
}
return rowCnt
}
func (_self *ReleaseDao) UpsertRelease(data *ReleaseData) int64 {
rowCnt, err := _self.StructUpsert(data)
if err != nil {
panic(err)
}
return rowCnt
}
func (_self *ReleaseDao) SelectedUpdateRelease(data ReleaseData) int64 {
sql, values := _self.prepareSelectedUpdate(data)
rowCnt, err := _self.Exec(sql, values...)
if err != nil {
panic(err)
}
return rowCnt
}
func (_self *ReleaseDao) prepareSelectedUpdate(data ReleaseData) (string, []interface{}) {
buffer := bytes.Buffer{}
buffer.WriteString("UPDATE `release` SET ")
values := make([]interface{}, 0)
if data.ConfigList != "" {
values = append(values, data.ConfigList)
buffer.WriteString("`config_list` = ?,")
}
if !data.ReleaseTime.IsZero() {
values = append(values, data.ReleaseTime)
buffer.WriteString("`release_time` = ?,")
}
if data.ReleaseIndex != 0 {
values = append(values, data.ReleaseIndex)
buffer.WriteString("`release_index` = ?,")
}
sql := strings.TrimSuffix(buffer.String(), ",") + " WHERE `app_id` = ?"
values = append(values, data.AppId)
return sql, values
}
|
package kafkamdm
import (
"encoding/binary"
"hash"
"hash/fnv"
"time"
"github.com/Shopify/sarama"
"github.com/raintank/fakemetrics/out"
"github.com/raintank/met"
"github.com/raintank/worldping-api/pkg/log"
"gopkg.in/raintank/schema.v1"
)
type KafkaMdm struct {
out.OutStats
topic string
brokers []string
config *sarama.Config
client sarama.SyncProducer
shardOrg bool
hash hash.Hash32
}
func New(topic string, brokers []string, codec string, stats met.Backend, shardOrg bool) (*KafkaMdm, error) {
// We are looking for strong consistency semantics.
// Because we don't change the flush settings, sarama will try to produce messages
// as fast as possible to keep latency low.
config := sarama.NewConfig()
config.Producer.Return.Successes = true
config.Producer.RequiredAcks = sarama.WaitForAll // Wait for all in-sync replicas to ack the message
config.Producer.Retry.Max = 10 // Retry up to 10 times to produce the message
config.Producer.Compression = out.GetCompression(codec)
err := config.Validate()
if err != nil {
return nil, err
}
client, err := sarama.NewSyncProducer(brokers, config)
if err != nil {
return nil, err
}
return &KafkaMdm{
OutStats: out.NewStats(stats, "kafka-mdm"),
topic: topic,
brokers: brokers,
config: config,
client: client,
shardOrg: shardOrg,
hash: fnv.New32a(),
}, nil
}
func (k *KafkaMdm) Close() error {
return k.client.Close()
}
func (k *KafkaMdm) Flush(metrics []*schema.MetricData) error {
if len(metrics) == 0 {
k.FlushDuration.Value(0)
return nil
}
preFlush := time.Now()
k.MessageMetrics.Value(1)
var data []byte
payload := make([]*sarama.ProducerMessage, len(metrics))
for i, metric := range metrics {
data, err := metric.MarshalMsg(data[:])
if err != nil {
return err
}
k.MessageBytes.Value(int64(len(data)))
// partition by organisation: metrics for the same org should go to the same
// partition/MetricTank (optimize for locality~performance)
// the extra 4B (now initialized with zeroes) is to later enable a smooth transition
// to a more fine-grained partitioning scheme where
// large organisations can go to several partitions instead of just one.
key := make([]byte, 8)
binary.LittleEndian.PutUint32(key, uint32(metric.OrgId))
if k.shardOrg {
k.hash.Write([]byte(metric.Name))
binary.LittleEndian.PutUint32(key[4:], k.hash.Sum32())
k.hash.Reset()
}
payload[i] = &sarama.ProducerMessage{
Key: sarama.ByteEncoder(key),
Topic: k.topic,
Value: sarama.ByteEncoder(data),
}
}
prePub := time.Now()
err := k.client.SendMessages(payload)
if err != nil {
k.PublishErrors.Inc(1)
if errors, ok := err.(sarama.ProducerErrors); ok {
for i := 0; i < 10 && i < len(errors); i++ {
log.Error(4, "ProducerError %d/%d: %s", i, len(errors), errors[i].Error())
}
}
return err
}
k.PublishedMessages.Inc(int64(len(metrics)))
k.PublishDuration.Value(time.Since(prePub))
k.PublishedMetrics.Inc(int64(len(metrics)))
k.FlushDuration.Value(time.Since(preFlush))
return nil
}
|
package sploit
import (
"fmt"
)
// FileFormat represents the type of file under analysis
type FileFormat uint16
// Architecture represents processor architecture
type Architecture uint16
// Endian is a integer type that represents the byte order of a binary
type Endian int
const (
// PEFile represents Microsoft PE file format
PEFile = iota
// ELFFile represents Unix ELF file format
ELFFile
// UnknownFile indicates that the file format is unsupported
UnknownFile
)
const (
// ArchX8664 indicates Intel x86-64 ISA
ArchX8664 = iota
// ArchI386 - Intel x86
ArchI386
// ArchARM - ARM (32-bit)
ArchARM
// ArchAARCH64 - ARM (64-bit)
ArchAARCH64
// ArchPPC - PowerPC
ArchPPC
// ArchMIPS - MIPS
ArchMIPS
// ArchIA64 - Intel Itanium
ArchIA64
)
// Processor is a struct that represents a binary's machine type
type Processor struct {
Architecture Architecture
Endian Endian
}
const (
// LittleEndian - little endian byte order
LittleEndian Endian = iota
// BigEndian - big endian byte order
BigEndian Endian = iota
)
func (e Endian) String() string {
switch e {
case LittleEndian:
return "little"
case BigEndian:
return "big"
default:
return fmt.Sprintf("%d", int(e))
}
}
// Mitigations is used to store information on exploit mitigations detected while loading the binary
type Mitigations struct {
Canary bool
NX bool
}
|
package main
import (
"encoding/json"
"fmt"
"github.com/imroc/req"
"gitlab.com/buddyspencer/chameleon"
"gopkg.in/cheggaaa/pb.v1"
"gopkg.in/yaml.v2"
"io/ioutil"
"log"
"os"
"regexp"
"strings"
)
type Appimage struct {
Version int `json:"version"`
HomePageURL string `json:"home_page_url"`
FeedURL string `json:"feed_url"`
Description string `json:"description"`
Icon string `json:"icon"`
Favicon string `json:"favicon"`
Expired bool `json:"expired"`
Items []struct {
Name string `json:"name"`
Description string `json:"description,omitempty"`
Categories []string `json:"categories"`
Authors []struct {
Name string `json:"name"`
URL string `json:"url"`
} `json:"authors"`
License interface{} `json:"license"`
Links []struct {
Type string `json:"type"`
URL string `json:"url"`
} `json:"links"`
Icons []string `json:"icons"`
Screenshots []string `json:"screenshots"`
} `json:"items"`
}
type AppImageSave struct {
Appimage [] AppImageSlot `yaml:"appimage"`
}
type AppImageSlot struct {
Program string `yaml:"program"`
File string `yaml:"file"`
}
func SearchAppImage() {
appimagefile, err := ioutil.ReadFile(fmt.Sprintf("%s/appimage.json", repoPath))
if err != nil {
panic(err)
}
appimage := Appimage{}
err = json.Unmarshal(appimagefile, &appimage)
if err != nil {
panic(err)
}
for _, app := range appimage.Items {
if len(re.FindAllString(strings.ToUpper(app.Name), -1)) > 0 {
for _, link := range app.Links {
if link.Type == "uas" {
found = append(found, Found{app.Name, "", app.Name, link.URL, "appimage"})
}
}
}
}
appimage = Appimage{}
}
func InstallAppImage(choice int) {
home, err := os.UserHomeDir()
if err != nil {
panic(err)
}
var appimagesave *AppImageSave
appimage_dir := fmt.Sprintf("%s/.appimage", home)
appimage_save := fmt.Sprintf("%s/.appimage.yml", appimage_dir)
fmt.Println("saving to", appimage_dir)
if _, err = os.Stat(appimage_dir); os.IsNotExist(err) {
err = os.MkdirAll(appimage_dir, 0700)
fmt.Printf("%s not found. Trying to create it.\n", appimage_dir)
if err != nil {
fmt.Println("could not create " + appimage_dir + ".")
os.Exit(1)
}
fmt.Println("Created it.")
}
if _, err = os.Stat(appimage_save); os.IsNotExist(err) {
appimagesave = &AppImageSave{}
} else {
appimagesave, _ = ReadAppImageSaveFile(appimage_save)
}
var bar *pb.ProgressBar
var started bool
progress := func(current, total int64) {
if !started {
bar = pb.New(int(total)).SetUnits(pb.U_BYTES)
bar.Start()
started = true
}
bar.Set(int(current))
}
r, err := req.Get(found[choice].url, req.DownloadProgress(progress))
if err != nil {
fmt.Println("could not download", found[choice].url)
os.Exit(1)
}
splittedLink := strings.Split(found[choice].url, "/")
filename := fmt.Sprintf("%s/%s", appimage_dir, splittedLink[len(splittedLink) - 1])
err = r.ToFile(filename)
if err != nil {
fmt.Println("could not save file.")
os.Exit(1)
}
fmt.Printf("Downloaded %s\n", filename)
err = os.Chmod(filename, 0775)
if err != nil {
fmt.Println("could not set permissions on ", filename)
os.Exit(1)
}
fmt.Printf("Set execute permission on %s\n", filename)
f := false
for x, slot := range appimagesave.Appimage {
if found[choice].name == slot.Program {
appimagesave.Appimage[x].File = splittedLink[len(splittedLink) - 1]
f = true
break
}
}
if !f {
appimagesave.Appimage = append(appimagesave.Appimage, AppImageSlot{found[choice].name, splittedLink[len(splittedLink) - 1]})
}
WriteAppImageSaveFile(appimage_save, appimagesave)
}
func ReadAppImageSaveFile(configfile string) (*AppImageSave, bool) {
cfgdata, err := ioutil.ReadFile(configfile)
if err != nil {
log.Panic("Cannot open config file from " + configfile)
}
t := AppImageSave{}
err = yaml.Unmarshal([]byte(cfgdata), &t)
if err != nil {
log.Panic("Cannot map yml config file to interface, possible syntax error")
log.Panic(err)
}
return &t, true
}
func WriteAppImageSaveFile(configfile string, config *AppImageSave) {
d, err := yaml.Marshal(&config)
if err != nil {
log.Fatalf("error: %v", err)
}
err = ioutil.WriteFile(configfile, d, 0644)
if err != nil {
log.Fatalf("error: %v", err)
}
}
func UpdateAppImages() {
fmt.Println("updating appimages...")
home, err := os.UserHomeDir()
if err != nil {
panic(err)
}
appimage_dir := fmt.Sprintf("%s/.appimage", home)
appimage_save := fmt.Sprintf("%s/.appimage.yml", appimage_dir)
toDelete := []string{}
if _, err = os.Stat(appimage_save); !os.IsNotExist(err) {
appimagesave, _ := ReadAppImageSaveFile(appimage_save)
files, _ := ioutil.ReadDir(appimage_dir)
for _, f := range files {
for _, a := range appimagesave.Appimage {
if f.Name() == a.File {
re = regexp.MustCompile(fmt.Sprintf("(?m)%s", strings.ToUpper(a.Program)))
SearchAppImage()
dfile := strings.Split(found[len(found)-1].url, "/")
if dfile[len(dfile)-1] == f.Name() {
found = found[:len(found)-1]
} else {
toDelete = append(toDelete, f.Name())
}
}
}
}
for k := range found {
fmt.Println("updating", chameleon.BLightblue(found[k].name))
InstallAppImage(k)
}
for _, del := range toDelete {
err := os.Remove(fmt.Sprintf("%s/%s", appimage_dir, del))
if err != nil {
fmt.Println("couldn't delete", del)
}
}
}
}
|
package main
import (
// "learn4/splice"
// "learn4/make"
// "learn4/exercise"
"learn4/randomPwd"
)
func main() {
// splice.Test()
// make.Test()
// exercise.Test()
randomPwd.Test()
}
|
package main
import(
"time"
"net"
"github.com/dedis/protobuf"
"fmt"
"crypto/rsa"
)
type PuzzleProposal struct{
Origin string
NodeID uint64
Timestamp time.Time
PreviousHash []byte
}
type PuzzleResponse struct{
Origin string
Destination string
CreatedBlock *Block
}
type BlockBroadcast struct{
Origin string
NewBlock *Block
}
type BlockChainMessage struct{
Chain *BlockChain
}
type PuzzlesState struct{
MyID uint64
MyName string
privKey *rsa.PrivateKey
PubKey *rsa.PublicKey
Joined bool
LocalChain *BlockChain
conn *net.UDPConn
waiting map[string]*PuzzleProposal //
peers []*Gossiper
}
func (ps *PuzzlesState) addNewGossiper(address, identifier string){
for _,g := range(ps.peers){
if(g.address.String()==address){
return
}
}
udpAddr, _:= net.ResolveUDPAddr("udp", address)
g := &Gossiper{
address: udpAddr,
identifier: identifier,
}
ps.peers = append(ps.peers, g)
}
func (ps *PuzzlesState) handlePuzzleProposal(pp *PuzzleProposal, from *net.UDPAddr){
if(!ps.Joined){
start := time.Now()
fmt.Println("Received puzzle proposal. Start mining.")
b := mineBlock(pp.NodeID, pp.Timestamp, ps.PubKey, pp.PreviousHash)
fmt.Println("Done mining. Send puzzle response.")
ps.MyID = pp.NodeID
ps.addNewGossiper(from.String(), pp.Origin)
pr := &PuzzleResponse{ps.MyName, pp.Origin, b}
ps.send(&GossipPacket{PResponse: pr}, from)
t := time.Now()
elapsed := t.Sub(start)
fmt.Println("Time to join: ",elapsed)
}
}
func (ps *PuzzlesState) handlePuzzleResponse(pr *PuzzleResponse, from *net.UDPAddr, channel chan* IPMatchNodeID){
fmt.Println("Received puzzle response.")
pp,ok := ps.waiting[from.String()]
if(ok && pr.Destination==ps.MyName){
if(pr.CreatedBlock.NodeID==pp.NodeID && pr.CreatedBlock.Timestamp.Equal(pp.Timestamp)){
success := ps.LocalChain.addBlock(pr.CreatedBlock) // puts new Block into local Blockchain
if(success){
delete(ps.waiting, from.String())
ps.addNewGossiper(from.String(), pr.Origin)
fmt.Println("The puzzle response is correct.")
channel<- &IPMatchNodeID{from.String(), pr.CreatedBlock.NodeID}
ps.LocalChain.print()
ps.broadcastBlock(pr.CreatedBlock, from)
ps.sendBlockChain(from)
}else{
fmt.Println("the puzzle response is incorrect.")
}
}
}
}
func (ps *PuzzlesState) broadcastBlock(b *Block, from *net.UDPAddr){
bb := &BlockBroadcast{ps.MyName, b}
msg := &GossipPacket{BBroadcast : bb}
for _,peer := range(ps.peers){
if(peer.address.String()!=from.String()){
fmt.Println("Send block to "+peer.address.String())
ps.send(msg, peer.address)
}
}
}
func (ps *PuzzlesState) sendBlockChain(dest *net.UDPAddr){
bcm := &BlockChainMessage{ps.LocalChain}
msg := &GossipPacket{BChain : bcm}
ps.send(msg, dest)
}
func (ps *PuzzlesState) handleBlockChain(bcm *BlockChainMessage, from *net.UDPAddr){
if(!ps.Joined){
ok := bcm.Chain.checkIntegrity()
if(ok){
ps.LocalChain = bcm.Chain
ps.Joined = true
fmt.Println("Updated block chain.")
ps.LocalChain.print()
}else{
panic("Block chain integrity is not correct. You should retry or connect to another peer")
}
}
}
func (ps *PuzzlesState) handleBlockBroadcast(bb *BlockBroadcast, from *net.UDPAddr){
added := ps.LocalChain.addBlock(bb.NewBlock)
if(added){
fmt.Println("Received new block and updated block chain.")
ps.LocalChain.print()
ps.broadcastBlock(bb.NewBlock, from)
}
}
func (ps *PuzzlesState) handleJoining(joiner *net.UDPAddr){
if(ps.LocalChain.LastBlock!=nil && ps.Joined){
_,ok := ps.waiting[joiner.String()]
if(!ok){
ps.sendPuzzleProposal(joiner)
}
}
}
func (ps *PuzzlesState) sendPuzzleProposal(dest *net.UDPAddr){
fmt.Println("Send puzzle proposal.")
pp := &PuzzleProposal{ps.MyName,ps.LocalChain.nextNodeID(),time.Now(),ps.LocalChain.LastBlock.hash()}
ps.waiting[dest.String()] = pp
ps.send(&GossipPacket{PProposal: pp}, dest)
}
func (ps *PuzzlesState) send(msg *GossipPacket, dest_addr *net.UDPAddr){
msg.NodeID = ps.MyID
packetBytes, err1 := protobuf.Encode(msg)
if(err1!=nil){
fmt.Println(err1, " dest : ", dest_addr.String())
}
_,err2 := ps.conn.WriteToUDP(packetBytes,dest_addr)
if err2 != nil {
fmt.Println(err2, " dest : ", dest_addr.String())
}
}
func (ps *PuzzlesState) expireJoining(){
ticker := time.NewTicker(expiration).C
go func(){
for{
select{
case <- ticker:
ps.Joined = false
}
}
}()
}
|
package main
import (
"fmt"
"github.com/quiz.algorithm-solve/go/scanner"
)
func Solve(n int) int {
return n % 21
}
func main() {
fmt.Printf("%d", Solve(scanner.ScanInt()))
}
|
package main
import (
"encoding/csv"
"fmt"
"os"
"strconv"
)
type Post struct {
Id int
Content string
Author string
}
func main() {
csvFile, err := os.Create("posts.csv")
if err != nil {
panic(err)
}
defer csvFile.Close()
allPosts := []Post{
Post{Id: 1, Content: "Go to school", Author: "Tom"},
Post{Id: 2, Content: "Good morning", Author: "Jimmy"},
Post{Id: 3, Content: "Sit down please", Author: "Sam"},
}
writer := csv.NewWriter(csvFile)
for _, post := range allPosts {
line := []string{strconv.Itoa(post.Id), post.Content, post.Author}
err := writer.Write(line)
if err != nil {
panic(err)
}
}
writer.Flush()
file, err := os.Open("posts.csv")
if err != nil {
panic(err)
}
defer file.Close()
reader := csv.NewReader(file)
reader.FieldsPerRecord = -1
record, err := reader.ReadAll()
if err != nil {
panic(err)
}
var posts []Post
for _, item := range record {
id, _ := strconv.ParseInt(item[0], 0, 0)
post := Post{Id: int(id), Content: item[1], Author: item[2]}
posts = append(posts, post)
}
fmt.Println(posts[0].Id)
fmt.Println(posts[0].Content)
fmt.Println(posts[0].Author)
}
|
package venom
import (
"context"
"encoding/json"
"fmt"
"reflect"
"strings"
"github.com/gosimple/slug"
"github.com/ovh/cds/sdk/interpolate"
"github.com/pkg/errors"
"github.com/rockbears/yaml"
)
// Executor execute a testStep.
type Executor interface {
// Run run a Test Step
Run(context.Context, TestStep) (interface{}, error)
}
type ExecutorRunner interface {
Executor
executorWithDefaultAssertions
executorWithZeroValueResult
ExecutorWithSetup
Name() string
Retry() int
RetryIf() []string
Delay() int
Timeout() int
Info() []string
Type() string
GetExecutor() Executor
}
var _ Executor = new(executor)
// ExecutorWrap contains an executor implementation and some attributes
type executor struct {
Executor
name string
retry int // nb retry a test case if it is in failure.
retryIf []string // retry conditions to check before performing any retries
delay int // delay between two retries
timeout int // timeout on executor
info []string // info to display after the run and before the assertion
stype string // builtin, plugin, user
}
func (e executor) Name() string {
return e.name
}
func (e executor) Type() string {
return e.stype
}
func (e executor) Retry() int {
return e.retry
}
func (e executor) RetryIf() []string {
return e.retryIf
}
func (e executor) Delay() int {
return e.delay
}
func (e executor) Timeout() int {
return e.timeout
}
func (e executor) Info() []string {
return e.info
}
func (e executor) GetExecutor() Executor {
return e.Executor
}
func (e executor) GetDefaultAssertions() *StepAssertions {
if e.Executor == nil {
return nil
}
x, ok := e.Executor.(executorWithDefaultAssertions)
if ok {
return x.GetDefaultAssertions()
}
return nil
}
func (e executor) ZeroValueResult() interface{} {
if e.Executor == nil {
return nil
}
x, ok := e.Executor.(executorWithZeroValueResult)
if ok {
return x.ZeroValueResult()
}
return nil
}
func (e executor) Setup(ctx context.Context, vars H) (context.Context, error) {
if e.Executor == nil {
return ctx, nil
}
x, ok := e.Executor.(ExecutorWithSetup)
if ok {
return x.Setup(ctx, vars)
}
return ctx, nil
}
func (e executor) TearDown(ctx context.Context) error {
if e.Executor == nil {
return nil
}
x, ok := e.Executor.(ExecutorWithSetup)
if ok {
return x.TearDown(ctx)
}
return nil
}
func (e executor) Run(ctx context.Context, step TestStep) (interface{}, error) {
if e.Executor == nil {
return nil, nil
}
return e.Executor.Run(ctx, step)
}
func newExecutorRunner(e Executor, name, stype string, retry int, retryIf []string, delay, timeout int, info []string) ExecutorRunner {
return &executor{
Executor: e,
name: name,
retry: retry,
retryIf: retryIf,
delay: delay,
timeout: timeout,
info: info,
stype: stype,
}
}
// executorWithDefaultAssertions execute a testStep.
type executorWithDefaultAssertions interface {
// GetDefaultAssertion returns default assertions
GetDefaultAssertions() *StepAssertions
}
type executorWithZeroValueResult interface {
ZeroValueResult() interface{}
}
type ExecutorWithSetup interface {
Setup(ctx context.Context, vars H) (context.Context, error)
TearDown(ctx context.Context) error
}
func GetExecutorResult(r interface{}) map[string]interface{} {
d, err := Dump(r)
if err != nil {
panic(err)
}
return d
}
type UserExecutor struct {
Executor string `json:"executor" yaml:"executor"`
Input H `json:"input" yaml:"input"`
RawTestSteps []json.RawMessage `json:"steps" yaml:"steps"`
Output json.RawMessage `json:"output" yaml:"output"`
Filename string `json:"-" yaml:"-"`
}
// Run is not implemented on user executor
func (ux UserExecutor) Run(ctx context.Context, step TestStep) (interface{}, error) {
return nil, errors.New("Run not implemented for user interface, use RunUserExecutor instead")
}
func (ux UserExecutor) ZeroValueResult() interface{} {
type Output struct {
Result interface{} `json:"result"`
}
output := &Output{
Result: ux.Output,
}
outputS, err := json.Marshal(output)
if err != nil {
return ""
}
result := make(map[string]interface{})
err = JSONUnmarshal(outputS, &result)
if err != nil {
return ""
}
return result
}
func (v *Venom) RunUserExecutor(ctx context.Context, runner ExecutorRunner, tcIn *TestCase, tsIn *TestStepResult, step TestStep) (interface{}, error) {
vrs := tcIn.TestSuiteVars.Clone()
uxIn := runner.GetExecutor().(UserExecutor)
for k, va := range uxIn.Input {
if strings.HasPrefix(k, "input.") {
// do not reinject input.vars from parent user executor if exists
continue
} else if !strings.HasPrefix(k, "venom") {
if vl, ok := step[k]; ok && vl != "" { // value from step
vrs.AddWithPrefix("input", k, vl)
} else { // default value from executor
vrs.AddWithPrefix("input", k, va)
}
} else {
vrs.Add(k, va)
}
}
// reload the user executor with the interpolated vars
_, exe, err := v.GetExecutorRunner(ctx, step, vrs)
if err != nil {
return nil, errors.Wrapf(err, "unable to reload executor")
}
ux := exe.GetExecutor().(UserExecutor)
tc := &TestCase{
TestCaseInput: TestCaseInput{
Name: ux.Executor,
RawTestSteps: ux.RawTestSteps,
Vars: vrs,
},
TestSuiteVars: tcIn.TestSuiteVars,
IsExecutor: true,
TestStepResults: make([]TestStepResult, 0),
}
tc.originalName = tc.Name
tc.Name = slug.Make(tc.Name)
tc.Vars.Add("venom.testcase", tc.Name)
tc.Vars.Add("venom.executor.filename", ux.Filename)
tc.Vars.Add("venom.executor.name", ux.Executor)
tc.computedVars = H{}
Debug(ctx, "running user executor %v", tc.Name)
Debug(ctx, "with vars: %v", vrs)
v.runTestSteps(ctx, tc, tsIn)
computedVars, err := DumpString(tc.computedVars)
if err != nil {
return nil, errors.Wrapf(err, "unable to dump testcase computedVars")
}
type Output struct {
Result json.RawMessage `json:"result"`
}
output := Output{
Result: ux.Output,
}
outputString, err := json.Marshal(output)
if err != nil {
return nil, err
}
// the value of each var can contains a double-quote -> "
// if the value is not escaped, it will be used as is, and the json sent to unmarshall will be incorrect.
// This also avoids injections into the json structure of a user executor
for i := range computedVars {
computedVars[i] = strings.ReplaceAll(computedVars[i], "\"", "\\\"")
}
outputS, err := interpolate.Do(string(outputString), computedVars)
if err != nil {
return nil, err
}
var outputResult interface{}
if err := yaml.Unmarshal([]byte(outputS), &outputResult); err != nil {
return nil, errors.Wrapf(err, "unable to unmarshal")
}
if len(tsIn.Errors) > 0 {
return outputResult, fmt.Errorf("failed")
}
// here, we have the user executor results.
// and for each key in output, we try to add the json version
// this will allow user to use json version of output (map, etc...)
// because, it's not possible to to that:
// output:
// therawout: {{.result.systemout}}
//
// test is in file user_executor.yml
result, err := Dump(outputResult)
if err != nil {
return nil, errors.Wrapf(err, "unable to compute result")
}
for k, v := range result {
switch z := v.(type) {
case string:
var outJSON interface{}
if err := JSONUnmarshal([]byte(z), &outJSON); err == nil {
result[k+"json"] = outJSON
// Now we have to dump this object, but the key will change if this is a array or not
if reflect.ValueOf(outJSON).Kind() == reflect.Slice {
prefix := k + "json"
splitPrefix := strings.Split(prefix, ".")
prefix += "." + splitPrefix[len(splitPrefix)-1]
outJSONDump, err := Dump(outJSON)
if err != nil {
return nil, errors.Wrapf(err, "unable to compute result")
}
for ko, vo := range outJSONDump {
result[prefix+ko] = vo
}
} else {
outJSONDump, err := DumpWithPrefix(outJSON, k+"json")
if err != nil {
return nil, errors.Wrapf(err, "unable to compute result")
}
for ko, vo := range outJSONDump {
result[ko] = vo
}
}
}
}
}
return result, nil
}
|
package clients
import (
"time"
"strings"
"github.com/globalsign/mgo"
"github.com/urbn/ordernumbergenerator/app"
"fmt"
)
var (
NewMongoSessionProc = NewSession
)
func NewSession(dialInfo *mgo.DialInfo) (*mgo.Session, error) { return mgo.DialWithInfo(dialInfo) }
func CreateMongoSession(mongoServers string) MongoSession {
mhosts := strings.Split(mongoServers, ",")
dialInfo := &mgo.DialInfo{
Addrs: mhosts,
Timeout: 10 * time.Second,
}
mongoSession, err := NewMongoSessionProc(dialInfo)
if err != nil {
fmt.Printf("Unable to connect to Mongo session at address: %s", dialInfo.Addrs)
panic(err)
}
mongoSession.SetMode(mgo.Monotonic, true)
return MongoSession{mongoSession}
}
type MongoSession struct {
*mgo.Session
}
func (ms MongoSession) DB(name string) app.DataLayer {
return &MongoDatabase{Database: ms.Session.DB(name)}
}
type MongoDatabase struct {
*mgo.Database
}
func (md MongoDatabase) C(name string) app.Collection {
return MongoCollection{Collection: md.Database.C(name)}
}
type MongoQuery struct {
*mgo.Query
}
func (mq MongoQuery) Apply(change mgo.Change, result interface{}) (*mgo.ChangeInfo, error) {
return mq.Query.Apply(change, result)
}
type MongoCollection struct {
*mgo.Collection
}
func (mc MongoCollection) Find(query interface{}) app.Query {
return MongoQuery{
Query: mc.Collection.Find(query),
}
}
|
package minnow
import (
"log"
"os"
"time"
)
func Start(args []string) int {
logger := log.New(os.Stdout, "Minnow: ", 0)
if len(args) != 2 {
logger.Print("Must specify a config file")
return 1
}
config, err := ReadConfig(Path(args[1]))
if err != nil {
logger.Print(err.Error())
return 1
}
dispatchChan := make(chan DispatchInfo, 1000)
ingestDirChan := make(chan IngestDirInfo, 1000)
defer close(ingestDirChan)
defer close(dispatchChan)
processorRegistry, err := NewProcessorRegistry(config.ProcessorDefinitionsPath)
if err != nil {
logger.Print(err.Error())
return 1
}
dispatcher, err := NewDispatcher(config.WorkPath, dispatchChan, ingestDirChan, processorRegistry)
if err != nil {
logger.Print(err.Error())
return 1
}
directoryIngester := NewDirectoryIngester(config.WorkPath, ingestDirChan, dispatchChan)
go dispatcher.Run()
go directoryIngester.Run()
go processorRegistry.Run()
for range time.Tick(config.IngestMinAge) {
ingestDirChan <- IngestDirInfo{config.IngestPath, config.IngestMinAge, make([]ProcessorId, 0), false}
}
return 0
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.