text
stringlengths 11
4.05M
|
|---|
package rpc
type Processor interface {
Execute(request Request) Response
}
|
package format
import "sync"
// Storage holds data for cross-execution storage
type Storage struct {
sync.RWMutex // Lets not have lists explode
data map[string]interface{}
}
func (s *Storage) checkMap() {
s.Lock()
if s.data == nil {
s.data = make(map[string]interface{})
}
s.Unlock()
}
func (s *Storage) set(name string, data interface{}) {
s.checkMap()
s.Lock()
s.data[name] = data
s.Unlock()
}
func (s *Storage) get(name string) (interface{}, bool) {
s.checkMap()
s.RLock()
defer s.RUnlock()
data, ok := s.data[name]
return data, ok // Cant just return data[testName] apparently
}
// SetInt sets the int at `testName` to `data`
func (s *Storage) SetInt(name string, i int) int {
s.set(name, i)
return i
}
// GetInt returns either the int stored at `testName` or a default
func (s *Storage) GetInt(name string, def int) int {
if data, ok := s.get(name); ok {
return data.(int)
}
return def
}
// SetBool sets the bool at `testName` to `data`
func (s *Storage) SetBool(name string, data bool) bool {
s.set(name, data)
return data
}
// GetBool returns either the bool stored at `testName` or a default
func (s *Storage) GetBool(name string, def bool) bool {
if res, ok := s.get(name); ok {
return res.(bool)
}
return def
}
// SetString sets the string at `testName` to `data`
func (s *Storage) SetString(name, data string) string {
s.set(name, data)
return data
}
// GetString returns either the string stored at `testName` or a default
func (s *Storage) GetString(name, def string) string {
if res, ok := s.get(name); ok {
return res.(string)
}
return def
}
// Delete deletes an entry from the Storage, It will not error if the entry does not exist
func (s *Storage) Delete(name string) string {
s.Lock()
delete(s.data, name)
s.Unlock()
return name
}
|
package bd
import (
"context"
"log"
"time"
"github.com/MiguelAngelderobles/microblog/models"
"go.mongodb.org/mongo-driver/bson/primitive"
"go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/options"
)
/*MongoCN es el objeto de conexión a la base de datos*/
var MongoCN = ConectarBD()
var clientOptions = options.Client().ApplyURI("mongodb+srv://usuario_1:SkillFactory2021@cluster0.5k8zj.mongodb.net/microblog?retryWrites=true&w=majority")
/*ConectarBD es la función que me permite conectar a la base de datos
Devuelve una conexión a la BD del tipo Mongo Client*/
func ConectarBD() *mongo.Client {
client, err := mongo.Connect(context.TODO(), clientOptions)
if err != nil {
log.Fatal(err.Error())
return client
}
err = client.Ping(context.TODO(), nil)
if err != nil {
log.Fatal(err.Error())
return client
}
log.Println("Conexión exitosa con la BD")
return client
}
/*ChequeoConnection es el ping a la BD*/
func ChequeoConnection() int {
err := MongoCN.Ping(context.TODO(), nil)
if err != nil {
return 0
}
return 1
}
func InsertoRegistro(u models.Usuario) (string, bool, error) {
ctx, cancel := context.WithTimeout(context.Background(), 15*time.Second)
defer cancel()
db := MongoCN.Database("microblog")
col := database.Collection("usuarios")
u.Password, _ = EncriptarPassword(u.Password)
result, err := col.InsertOne(ctx, u)
if err != nil {
return "", false, err
}
ObjID, _ := result.InsertedID.(primitive.ObjectID)
return ObjID.String(), true, nil
}
|
package main
import "testing"
//TestMode for test mode
func TestMode(t *testing.T) {
cases := []struct {
in, want []float64
}{
{[]float64{1}, []float64{}},
{[]float64{1, 3, 4}, []float64{}},
{[]float64{1, 2, 2, 3, 3, 4}, []float64{2, 3}},
}
for _, c := range cases {
got := mode(c.in)
if !Float64SliceEqual(got, c.want) {
t.Errorf("Reverse(%v) == %v, want %v", c.in, got, c.want)
}
}
}
//Float64SliceEqual for compare float64 slice
func Float64SliceEqual(a, b []float64) bool {
if len(a) != len(b) {
return false
}
if (a == nil) != (b == nil) {
return false
}
for i, v := range a {
if v != b[i] {
return false
}
}
return true
}
func BenchmarkMode(b *testing.B) {
nums := []float64{3, 4, 4.5, 5, 5, 6.2, 7.1, 7.1, 8.5, 9}
b.ResetTimer()
for n := 0; n < b.N; n++ {
mode(nums)
}
}
func BenchmarkMode2(b *testing.B) {
nums := []float64{3, 4, 4.5, 5, 5, 6.2, 7.1, 7.1, 8.5, 9}
b.ResetTimer()
for n := 0; n < b.N; n++ {
mode2(nums)
}
}
func TestQuadratic(t *testing.T) {
x1, x2 := quadratic(1, 2, 2)
t.Errorf("%v,%v", x1, x2)
}
|
package prservice
import (
"os"
"bytes"
"io/ioutil"
"net/http"
"testing"
"net/http/httptest"
fm "github.com/cyg2009/MyTestCode/pkg/functionmanager"
)
func TestMain(m *testing.M) {
wd, _ := os.Getwd()
os.Setenv("RUNTIME_ROOT", wd)
os.Setenv("RUNTIME_LAMBDA", wd + "/../../runtime/bin/lambda-run")
dest := wd + "/func"
_, err := os.Stat(dest)
if err == nil {
os.RemoveAll(dest)
}
os.Mkdir(dest, os.ModeDir)
code := m.Run()
_, err = os.Stat(dest)
if err == nil {
os.RemoveAll(dest)
}
os.Exit(code)
}
func TestHealthCheckHandler(t *testing.T){
req, err := http.NewRequest("POST", "/health", nil)
if err != nil {
t.Fatal(err)
}
rr := httptest.NewRecorder()
handler := GetPrserviceHttpHandler()
handler.ServeHTTP(rr, req)
if status := rr.Code; status != http.StatusOK {
t.Errorf("handler returned wrong status code: got %v want %v",
status, http.StatusOK)
}
t.Log(rr.Body.String())
}
func TestOtherHandler(t *testing.T){
req, err := http.NewRequest("POST", "/other", nil)
if err != nil {
t.Fatal(err)
}
rr := httptest.NewRecorder()
handler := GetPrserviceHttpHandler()
handler.ServeHTTP(rr, req)
if status := rr.Code; status != http.StatusNotFound {
t.Errorf("handler returned wrong status code: got %v want %v",
status, http.StatusNotFound)
}
t.Log(rr.Body.String())
}
func TestIngestAndExecuteFunction(t *testing.T){
fgr := fm.GetFunctionManager()
functionId := "f1:1.0"
//ingest function
body := bytes.NewBufferString(`
exports.handler = function(event) {
console.log('f1:1.0 got an event:' + JSON.stringify(event))
console.log((new Date()).toString())
}
`)
req, err := http.NewRequest("POST", "/add", body)
if err != nil {
t.Fatal(err)
}
req.Header.Add("function",functionId)
rr := httptest.NewRecorder()
handler := GetPrserviceHttpHandler()
handler.ServeHTTP(rr, req)
if status := rr.Code; status != http.StatusOK {
t.Errorf("Function-Add handler returned wrong status code: got %v want %v",
status, http.StatusOK)
t.Log(rr.Body.String())
return
}
sf, ok := fgr.GetFunction(functionId)
if ok == false || sf == nil {
t.Errorf("Fail to get function.")
}
//Execution function
evt := []byte(`{"name":"cacia", "age":"19"}`)
req, err = http.NewRequest("POST", "/invoke", bytes.NewBuffer(evt))
if err != nil {
t.Fatal(err)
}
req.Header.Add("function",functionId)
req.Header.Set("Content-Type","applicaiton/json")
rr = httptest.NewRecorder()
handler = GetPrserviceHttpHandler()
handler.ServeHTTP(rr, req)
if status := rr.Code; status != http.StatusOK {
t.Errorf("Function-Invoke handler returned wrong status code: got %v want %v",
status, http.StatusOK)
}
t.Log(rr.Body.String())
}
func TestExecuteExistingFunction(t *testing.T){
// Use a random id would be better
functionId := "f2:1.0"
dest := os.Getenv("RUNTIME_ROOT") + "/func/" + functionId
data := []byte(`
exports.handler = function(event) {
console.log((new Date()).toString())
console.log('f2:1.0 received an event:' + JSON.stringify(event))
}
`)
os.RemoveAll(dest)
os.Mkdir(dest, os.ModeDir)
dest = dest + "/index.js"
err := ioutil.WriteFile(dest, data, 0644)
if err != nil {
t.Errorf("Fail to create function " + functionId)
}
//Execution function
evt := []byte(`{"name":"cacia", "age":"19"}`)
req, err := http.NewRequest("POST", "/invoke", bytes.NewBuffer(evt))
if err != nil {
t.Fatal(err)
}
req.Header.Add("function",functionId)
req.Header.Set("Content-Type","applicaiton/json")
rr := httptest.NewRecorder()
handler := GetPrserviceHttpHandler()
handler.ServeHTTP(rr, req)
if status := rr.Code; status != http.StatusOK {
t.Errorf("Function-Invoke handler returned wrong status code: got %v want %v",
status, http.StatusOK)
}
t.Log(rr.Body.String())
}
|
package bench
import (
"bytes"
"io/ioutil"
"os/exec"
"runtime"
"time"
)
type TraceRoute struct {
Name string
Host string
Result string
Duration time.Duration
finished bool
}
func NewTraceRoute(name, host string) (tr *TraceRoute) {
return &TraceRoute{
Name: name,
Host: host,
}
}
func (tr *TraceRoute) Do() (err error) {
buf := &bytes.Buffer{}
if runtime.GOOS == "windows" {
cmd := exec.Command("tracert", tr.Host)
cmd.Stdout = buf
err = cmd.Run()
} else {
cmd := exec.Command("tracepath", "-b", tr.Host)
cmd.Stdout = buf
err = cmd.Run()
}
if err != nil {
return
}
result, err := ioutil.ReadAll(buf)
if err != nil {
return
}
tr.Result = string(result)
return
}
|
package handle
import (
"boiler/pkg/service"
"context"
"github.com/gocraft/work"
)
func New(srv service.Interface) Handle {
return Handle{srv}
}
type Handle struct {
service service.Interface
}
func (h *Handle) DeleteUser(j *work.Job) error {
return h.service.DeleteUser(context.Background(), j.ArgInt64("id"))
}
func (h *Handle) DeleteEmail(j *work.Job) error {
return h.service.DeleteEmail(context.Background(), j.ArgInt64("id"))
}
|
package slacktest
import (
"context"
"testing"
"github.com/stretchr/testify/assert"
)
func TestGenerateDefaultRTMInfo(t *testing.T) {
wsurl := "ws://127.0.0.1:5555/ws"
ctx := context.TODO()
info := generateRTMInfo(ctx, wsurl)
assert.Equal(t, wsurl, info.URL)
assert.True(t, info.Ok)
assert.Equal(t, defaultBotID, info.User.ID)
assert.Equal(t, defaultBotName, info.User.Name)
assert.Equal(t, defaultTeamID, info.Team.ID)
assert.Equal(t, defaultTeamName, info.Team.Name)
assert.Equal(t, defaultTeamDomain, info.Team.Domain)
}
func TestCustomDefaultRTMInfo(t *testing.T) {
wsurl := "ws://127.0.0.1:5555/ws"
ctx := context.TODO()
ctx = context.WithValue(ctx, ServerBotIDContextKey, "U1234567890")
ctx = context.WithValue(ctx, ServerBotNameContextKey, "SomeTestBotThing")
info := generateRTMInfo(ctx, wsurl)
assert.Equal(t, wsurl, info.URL)
assert.True(t, info.Ok)
assert.Equal(t, "U1234567890", info.User.ID)
assert.Equal(t, "SomeTestBotThing", info.User.Name)
assert.Equal(t, defaultTeamID, info.Team.ID)
assert.Equal(t, defaultTeamName, info.Team.Name)
assert.Equal(t, defaultTeamDomain, info.Team.Domain)
}
func TestGetHubMissingServerAddr(t *testing.T) {
mc, err := getHubForServer("")
assert.Nil(t, mc.seen, "seen should be nil")
assert.Nil(t, mc.sent, "sent should be nil")
assert.Nil(t, mc.posted, "posted should be nil")
assert.Error(t, err, "should return an error")
assert.EqualError(t, err, ErrPassedEmptyServerAddr.Error())
}
func TestGetHubNoQueuesForServer(t *testing.T) {
mc, err := getHubForServer("foo")
assert.Nil(t, mc.seen, "seen should be nil")
assert.Nil(t, mc.sent, "sent should be nil")
assert.Nil(t, mc.posted, "posted should be nil")
assert.Error(t, err, "should return an error")
assert.EqualError(t, err, ErrNoQueuesRegisteredForServer.Error())
}
func TestUnableToAddToHub(t *testing.T) {
err := addServerToHub(&Server{}, &messageChannels{})
assert.Error(t, err, "should return and error")
assert.EqualError(t, err, ErrEmptyServerToHub.Error())
}
|
package main
import (
"fmt"
"sort"
)
/*
@DESC
成交额/流通市值 = 换手率
假设
任何一个市场资金都是在流动的, 那么跟踪资金最好的方式就是成交额
换手率表示单股, 热度/交易热情/追逐
评分 = 换手 + 主力流入占比
*/
func strategy001(gds map[string]*GeneralData) {
// 开始打分
var list GeneralDatas
for _, gd := range gds {
gd.score = gd.changehands + gd.mainP
list = append(list, gd)
}
sort.Sort(list)
// 输出
// for _, gd := range list {
// fmt.Printf("%2.2f, %2.2f%% \r\n", gd.score, gd.upAndDownRange)
// fmt.Printf("冷静分析: %s, %2.2f%%, %2.2f, %2.2f \r\n", gd.stockName, gd.changehands/gd.score, gd.changehands, gd.mainP)
// }
// 各分组情况
// statistics001(list)
// 第一分组详情
// statistics002(list[0:100])
// 开盘作为初始价格, 计算涨跌幅
// statistics003(list[0:100])
// 其他分组情况
// statistics003(list[100:200])
// 盘中最高与开盘价, 即 盘中操作空间
// statistics004(list[0:100])
// 盘中最高价与最低价, 即 盘中操作空间
// statistics005(list[0:100])
// 跌破开盘价, 即 盘中低吸机会
// statistics006(list[0:100])
// 最低价与收盘价, 即 盘中反弹情况
// statistics007(list[0:100])
// 前一个交易日收盘价与当日开盘价, 价差
// statistics008(list[0:100])
// 前一个交易日收盘价与当日收盘价, 价差
statistics009(list[0:100])
}
/*
@DESC
经过统计验证, 第一组属于有效分组, 涨幅及大涨幅主要集中在第一分组中
不同涨幅, 在各个分组内的占有率情况
*/
func statistics001(list GeneralDatas) {
var (
step = 100 // 分割统计
count = 0 // 记录符合
totalCount = 0 // 一共符合
condUp float64 = 0 // 分割条件 - 涨跌幅
records []int // 记录分组结果
)
for i, gd := range list {
if i%step == 0 && i != 0 {
records = append(records, count)
totalCount += count
count = 0
}
if gd.upAndDownRange < condUp {
count++
}
}
records = append(records, count)
totalCount += count
for _, record := range records {
fmt.Printf("%d, %2.2f%% \r\n", record, float64(record)/float64(totalCount)*100)
}
}
/*
@DESC
第一分组内的涨跌幅情况
*/
func statistics002(list GeneralDatas) {
var (
upCount int
downCount int
condUp float64 = 0
)
for _, gd := range list {
if gd.upAndDownRange > condUp {
upCount++
} else {
downCount++
}
}
fmt.Printf("胜率: %2.2f \r\n", float64(upCount)/float64(len(list))*100)
fmt.Printf("败率: %2.2f \r\n", float64(downCount)/float64(len(list))*100)
}
/*
@DESC
从开盘到收盘, 涨跌情况
*/
func statistics003(list GeneralDatas) {
var (
upCount int
downCount int
condUp float64 = 1
)
for _, gd := range list {
if gd.currentPrice-gd.open > condUp {
upCount++
} else {
downCount++
}
}
fmt.Printf("胜率: %2.2f \r\n", float64(upCount)/float64(len(list))*100)
fmt.Printf("败率: %2.2f \r\n", float64(downCount)/float64(len(list))*100)
}
/*
@DESC
从开盘到盘中最高, 涨跌情况
*/
func statistics004(list GeneralDatas) {
var (
upCount int
downCount int
condUp float64 = 5
)
for _, gd := range list {
if gd.high-gd.open > condUp {
upCount++
} else {
downCount++
}
}
fmt.Printf("胜率: %2.2f \r\n", float64(upCount)/float64(len(list))*100)
fmt.Printf("败率: %2.2f \r\n", float64(downCount)/float64(len(list))*100)
}
/*
@DESC
最高价与最低价差值, 盘中振幅
*/
func statistics005(list GeneralDatas) {
var (
upCount int
downCount int
condUp float64 = 10
)
for _, gd := range list {
if (gd.high-gd.low)/gd.low*100 > condUp {
upCount++
} else {
downCount++
}
}
fmt.Printf("胜率: %2.2f \r\n", float64(upCount)/float64(len(list))*100)
fmt.Printf("败率: %2.2f \r\n", float64(downCount)/float64(len(list))*100)
}
/*
@DESC
开盘价与最低价差值, 跌破开盘价, 低吸机会
*/
func statistics006(list GeneralDatas) {
var (
upCount int
downCount int
condUp float64 = 4
)
for _, gd := range list {
if (gd.open-gd.low)/gd.open*100 > condUp {
upCount++
} else {
downCount++
}
}
fmt.Printf("胜率: %2.2f \r\n", float64(upCount)/float64(len(list))*100)
fmt.Printf("败率: %2.2f \r\n", float64(downCount)/float64(len(list))*100)
}
/*
@DESC
最低价与收盘价, 反弹回来的概率
*/
func statistics007(list GeneralDatas) {
var (
upCount int
downCount int
condUp float64 = 10
)
for _, gd := range list {
if (gd.currentPrice-gd.low)/gd.low*100 > condUp {
upCount++
} else {
downCount++
}
}
fmt.Printf("胜率: %2.2f \r\n", float64(upCount)/float64(len(list))*100)
fmt.Printf("败率: %2.2f \r\n", float64(downCount)/float64(len(list))*100)
}
/*
@DESC
前交易日收盘与今日开盘, 价差
*/
func statistics008(list GeneralDatas) {
var (
upCount int
downCount int
condUp float64 = 2
)
for _, gd := range list {
if (gd.open-gd.close)/gd.close*100 > condUp {
upCount++
} else {
downCount++
}
}
fmt.Printf("胜率: %2.2f \r\n", float64(upCount)/float64(len(list))*100)
fmt.Printf("败率: %2.2f \r\n", float64(downCount)/float64(len(list))*100)
}
/*
@DESC
前交易日收盘与今日收盘, 价差
*/
func statistics009(list GeneralDatas) {
var (
upCount int
downCount int
condUp float64 = 7
)
for _, gd := range list {
if (gd.currentPrice-gd.close)/gd.close*100 > condUp {
upCount++
} else {
downCount++
}
}
fmt.Printf("胜率: %2.2f \r\n", float64(upCount)/float64(len(list))*100)
fmt.Printf("败率: %2.2f \r\n", float64(downCount)/float64(len(list))*100)
}
|
package project
const (
// Version is the project version.
// It is returned to clients upon connection, in GetState responses, as well as the launcher instance.
Version = "1.0.0"
)
|
// 3. Single-byte XOR cipher
package main
import (
"bufio"
"encoding/hex"
"fmt"
"io"
"io/ioutil"
"os"
)
const sample = "alice.txt"
func main() {
f, err := os.Open(sample)
if err != nil {
fmt.Fprintln(os.Stderr, err)
return
}
score, err := ScoreFunc(f)
if err != nil {
fmt.Fprintln(os.Stderr, err)
return
}
f.Close()
files := os.Args[1:]
if len(files) == 0 {
if err := decrypt(os.Stdin, score); err != nil {
fmt.Fprintln(os.Stderr, err)
}
return
}
for _, file := range files {
f, err := os.Open(file)
if err != nil {
fmt.Fprintln(os.Stderr, err)
continue
}
if err := decrypt(f, score); err != nil {
fmt.Fprintln(os.Stderr, err)
}
f.Close()
}
}
// decrypt reads hex-encoded ciphertext and prints plaintext.
func decrypt(in io.Reader, score func([]byte) int) error {
var buf []byte
input := bufio.NewScanner(in)
for input.Scan() {
line, err := hex.DecodeString(input.Text())
if err != nil {
return err
}
buf = append(buf, line...)
}
if err := input.Err(); err != nil {
return err
}
XORSingleByte(buf, buf, breakSingleXOR(buf, score))
fmt.Println(string(buf))
return nil
}
// breakSingleXOR takes a buffer and scoring function, and returns the probable key.
func breakSingleXOR(buf []byte, score func([]byte) int) byte {
var (
key byte
best int
)
tmp := make([]byte, len(buf))
for i := 0; i <= 0xff; i++ {
XORSingleByte(tmp, buf, byte(i))
if n := score(tmp); n > best {
best = n
key = byte(i)
}
}
return key
}
// ScoreFunc reads sample text and returns a scoring function.
func ScoreFunc(in io.Reader) (func([]byte) int, error) {
m, err := SymbolCounts(in)
if err != nil {
return nil, err
}
return func(buf []byte) int {
var n int
for _, r := range string(buf) {
n += m[r]
}
return n
}, nil
}
// SymbolCounts reads sample text and returns a map of UTF-8 symbol counts.
func SymbolCounts(in io.Reader) (map[rune]int, error) {
buf, err := ioutil.ReadAll(in)
if err != nil {
return nil, err
}
m := make(map[rune]int)
for _, r := range string(buf) {
m[r]++
}
return m, nil
}
// XORSingleByte produces the XOR combination of a buffer with a single byte.
func XORSingleByte(dst, src []byte, b byte) {
// Panic if dst is smaller than src.
for i := range src {
dst[i] = src[i] ^ b
}
}
|
package app
type ResCode int64
const (
// 成功(默认返回状态码)
CodeSuccess ResCode = 0
// 全局未知异常
CodeSeverError ResCode = 500
// 请求失败(一般前端处理,不常用)
CodeBadRequest ResCode = 400
// 请求资源不存在(静态资源不存在,不常用)
CodeDataNotFount ResCode = 404
// 登录、权限认证异常
CodeLoginExpire ResCode = 401
// 权限不足
CodeIdentityNotRow ResCode = 403
)
/*
通用业务
*/
const (
/*
1001-1010 通用操作相关
*/
// 操作失败
CodeOperationFail ResCode = 1001 + iota
// 查询操作失败
CodeSelectOperationFail
// 更新操作失败
CodeUpdateOperationFail
// 删除操作失败
CodeDeleteOperationFail
// 新增操作失败
CodeInsertOperationFail
/*
1011-1050 登录注册相关
*/
// 登录失败,账号或者密码错误
CodeLoginFailResCode ResCode = 1011 + iota
// 登录失败,请重试
CodeLoginFailReLogin
// 验证码错误
CodeLoginFailCode
// 无效token
CodeInvalidToken
// 用户不存在
CodeNoUser
// 注册失败,手机号已经存在
CodeRegisterFail
// 认证失败,手机号不存在
CodeNoUserPhone
// 请求参数不能为空
CodeParamsNotNull
// 用户未激活
CodeUserIsNotEnabled
// 角色名字已存在
CodeRoleNameExist
/*
1051-1070 短信业务相关
*/
// 短信发送失败
CodeSMSNotSend ResCode = 1051 + iota
// 短信验证码失效
CodeSMSCodeExpire
// 短信验证码验证失败
CodeSMSVerityFail
/*
1071-1100 文件、资源相关
*/
// 文件超出规定大小
CodeFileOverstepSize ResCode = 1071 + iota
// 文件上传失败
CodeFileUploadFail
// 文件不存在,加载失败
CodeFileLoadingFail
// 文件类型不支持查看
CodeFileRequestFail
// 图片不能为空
CodeImageIsNotNull
// 请上传图片类型的文件
CodeFileImageFail
/*
1101-1199 请求参数相关
*/
// 参数无效
CodeParamIsInvalid ResCode = 1101 + iota
// 参数为空
CodeParamIsBlank
// 参数类型错误
CodeParamTypeBindError
// 参数缺失
CodeParamNotComplete
)
/*
-----------go_api 业务相关(2xxx)------------
*/
const (
//Code ResCode = 2001 + iota
)
/*
第三方相关(3xxx)
*/
const (
/*
3001-3020 微信公众号
*/
// 微信公众号JSSDK获取access_token失败
CodeWxGzhAccessTokenFail = 3001 + iota
// 微信公众号JSSDK获取jsapi_ticket失败
CodeWxGzhJsApiTicketFail
// 微信公众号JSSDK获取SIGN失败
CodeWxGzhSignFail
// 微信wxCode为空
CodeWxEmpty
// 微信wxCode失效或不正确请重新获取
CodeWxOuttime
)
var codeMsgMap = map[ResCode]string{
CodeSuccess: "success",
CodeSeverError: "服务器繁忙请重试",
CodeBadRequest: "请求失败",
CodeDataNotFount: "未找到资源",
CodeLoginExpire: "请登录后重试",
CodeIdentityNotRow: "权限不足",
CodeRoleNameExist: "角色名字已存在",
CodeOperationFail: "操作失败",
CodeSelectOperationFail: "查询操作失败!",
CodeUpdateOperationFail: "更新操作失败!",
CodeDeleteOperationFail: "删除操作失败!",
CodeInsertOperationFail: "新增操作失败!",
CodeLoginFailResCode: "登录失败,账号或者密码错误",
CodeLoginFailReLogin: "登录失败,请重试",
CodeLoginFailCode: "验证码错误",
CodeInvalidToken: "无效的token",
CodeNoUser: "用户不存在",
CodeRegisterFail: "注册失败,手机号已经存在",
CodeNoUserPhone: "认证失败,手机号不存在",
CodeParamsNotNull: "请求参数不能为空",
CodeUserIsNotEnabled: "用户未激活",
CodeSMSNotSend: "短信发送失败",
CodeSMSCodeExpire: "短信验证码失效",
CodeSMSVerityFail: "短信验证码验证失败",
CodeFileOverstepSize: "文件超出规定大小",
CodeFileUploadFail: "文件上传失败",
CodeFileLoadingFail: "文件不存在,加载失败",
CodeFileRequestFail: "文件类型不支持查看",
CodeImageIsNotNull: "图片不能为空",
CodeFileImageFail: "请上传图片类型的文件",
CodeParamIsInvalid: "参数无效",
CodeParamIsBlank: "参数为空",
CodeParamTypeBindError: "参数类型错误",
CodeParamNotComplete: "参数缺失",
CodeWxGzhAccessTokenFail: "微信公众号JSSDK获取access_token失败",
CodeWxGzhJsApiTicketFail: "微信公众号JSSDK获取jsapi_ticket失败",
CodeWxGzhSignFail: "微信公众号JSSDK获取SIGN失败",
CodeWxEmpty: "微信wxCode为空",
CodeWxOuttime: "微信wxCode失效或不正确请重新获取",
}
func (c ResCode) Msg() string {
msg, ok := codeMsgMap[c]
if !ok {
msg = codeMsgMap[CodeSeverError]
}
return msg
}
|
package main
import (
"bytes"
"encoding/json"
"flag"
"github.com/stbuehler/go-termrecording/exportAsciinemaJson"
"github.com/stbuehler/go-termrecording/recording"
"io"
"io/ioutil"
"os"
)
func bytesSectionReader(b []byte) *io.SectionReader {
return io.NewSectionReader(bytes.NewReader(b), 0, int64(len(b)))
}
func fileSectionReader(f *os.File) (*io.SectionReader, error) {
if stat, err := f.Stat(); err != nil {
return nil, err
} else {
return io.NewSectionReader(f, 0, stat.Size()), nil
}
}
func stringify(v interface{}) string {
str, err := json.Marshal(v)
if err != nil {
return err.Error()
}
return string(str)
}
func main() {
opts := flag.NewFlagSet("recording options", flag.PanicOnError)
outputBaseName := opts.String("out", "recording", "recording output name (used as basename for .html and -stdout.json files); defaults to 'recording'")
err := opts.Parse(os.Args[1:])
if err != nil {
return
}
command := opts.Args()
if len(command) == 0 {
shell := os.Getenv("SHELL")
if shell == "" {
shell = "/bin/sh"
}
command = []string{shell}
}
recordingFile, err := ioutil.TempFile("", "term-recording")
if err != nil {
panic("couldn't create temporary recording file")
}
defer recordingFile.Close()
defer os.Remove(recordingFile.Name())
stdoutJsonName := *outputBaseName + "-stdout.json"
htmlName := *outputBaseName + ".html"
stdoutJsonFile, err := os.OpenFile(
stdoutJsonName,
os.O_RDWR|os.O_CREATE|os.O_EXCL,
0644)
if err != nil {
println(err.Error())
os.Exit(1)
}
htmlFile, err := os.OpenFile(
htmlName,
os.O_RDWR|os.O_CREATE|os.O_EXCL,
0644)
if err != nil {
println(err.Error())
os.Exit(1)
}
println("Recording into " + htmlName)
err = recording.Execute(recordingFile, command[0], command[1:]...)
if err != nil {
panic("record failed: " + err.Error())
}
err = recordingFile.Sync()
if err != nil {
panic("writing recording failed: " + err.Error())
}
// println("parse recording")
fileSecReader, err := fileSectionReader(recordingFile)
if err != nil {
panic("couldn't read recording: " + err.Error())
}
err = exportAsciinemaJson.MakeFilm(stdoutJsonName, stdoutJsonFile, htmlFile, fileSecReader)
if err != nil {
println("couldn't write recording as JSON/HTML: " + err.Error())
return
}
println("Recording finished successfully")
}
|
package cmd
import (
"github.com/myechuri/ukd/server/api"
"github.com/spf13/cobra"
"golang.org/x/net/context"
"google.golang.org/grpc"
"log"
)
func status(cmd *cobra.Command, args []string) {
// TODO: TLS
serverAddress := cmd.InheritedFlags().Lookup("server-endpoint").Value.String()
conn, err := grpc.Dial(serverAddress, grpc.WithInsecure())
if err != nil {
log.Fatalf("fail to dial: %v", err)
}
defer conn.Close()
client := api.NewUkdClient(conn)
statusRequest := &api.StatusRequest{
Name: ukName,
}
reply, _ := client.Status(context.Background(), statusRequest)
log.Printf("Application unikernel status check: %t, status: %s, Info: %s",
reply.Success, reply.Status, reply.Info)
}
func StatusCommand() *cobra.Command {
var statusCommand = &cobra.Command{
Use: "status",
Short: "Status a Unikernel",
Long: `Status a unikernel with given name`,
Run: func(cmd *cobra.Command, args []string) {
status(cmd, args)
},
}
statusCommand.Flags().StringVar(&ukName, "name", "", "name of the application")
return statusCommand
}
|
// The MIT License (MIT)
//
// Copyright (c) 2021 Yawning Angel.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
// Package toolchain enforces the minimum supported toolchain.
package toolchain
var (
// This is enforced so that I can consolidate build constraints
// instead of keeping track of exactly when each 64-bit target got
// support for SSA doing the right thing for bits.Add64/bits.Mul64.
//
// If you absolutely must get this working on older Go versions,
// the 64-bit codepath is safe (and performant) as follows:
//
// * 1.12 - amd64 (all other targets INSECURE due to vartime fallback)
// * 1.13 - arm64, ppcle, ppc64
// * 1.14 - s390x
//
// Last updated: Go 1.17 (src/cmd/compile/internal/ssagen/ssa.go)
_ = __SOFTWARE_REQUIRES_GO_VERSION_1_16__
// WASM does not specify the timing characteristics for any operations.
//
// This package is written under the assumption that certain things
// are constant time.
_ = __SOFTWARE_REQUIRES_GOARCH_NOT_WASM__
)
|
package model
type Comment struct {
isSingleLine bool
comment string
}
func NewComment() *Comment {
return &Comment{}
}
func (this *Comment) SetComment(comment string) {
this.comment = comment
}
func (this *Comment) GetComment() string {
return this.comment
}
func (this *Comment) IsSingleLine() bool {
return this.isSingleLine
}
func (this *Comment) SetSingleLine() {
this.isSingleLine = true
}
func (this *Comment) SetMultiLine() {
this.isSingleLine = false
}
func (this *Comment) Accept(visitor CodeVisitor) {
visitor.VisitComment(this)
}
|
package ip_api
import (
"bytes"
"encoding/json"
"errors"
"log"
"net/http"
"strconv"
"strings"
)
//URI for the free IP-API
const FreeAPIURI = "http://ip-api.com/"
//URI for the pro IP-API
const ProAPIURI = "https://pro.ip-api.com/"
type Location struct {
Status string `json:"status,omitempty"`
Message string `json:"message,omitempty"`
Continent string `json:"continent,omitempty"`
ContinentCode string `json:"continentCode,omitempty"`
Country string `json:"country,omitempty"`
CountryCode string `json:"countryCode,omitempty"`
Region string `json:"region,omitempty"`
RegionName string `json:"regionName,omitempty"`
City string `json:"city,omitempty"`
District string `json:"district,omitempty"`
ZIP string `json:"zip,omitempty"`
Lat *float32 `json:"lat,omitempty"`
Lon *float32 `json:"lon,omitempty"`
Timezone string `json:"timezone,omitempty"`
Currency string `json:"currency,omitempty"`
ISP string `json:"isp,omitempty"`
Org string `json:"org,omitempty"`
AS string `json:"as,omitempty"`
ASName string `json:"asname,omitempty"`
Reverse string `json:"reverse,omitempty"`
Mobile *bool `json:"mobile,omitempty"`
Proxy *bool `json:"proxy,omitempty"`
Hosting *bool `json:"hosting,omitempty"`
Query string `json:"query,omitempty"`
}
type Query struct {
Queries []QueryIP `json:"queries"`
Fields string `json:"fields,omitempty"`
Lang string `json:"lang,omitempty"`
}
type QueryIP struct {
Query string `json:"query"`
Fields string `json:"fields,omitempty"`
Lang string `json:"lang,omitempty"`
}
//Execute a single query (queries field should only contain 1 value
func SingleQuery(query Query, apiKey string, baseURL string, debugging bool) (*Location, error) {
//Make sure that there is only 1 query value
if len(query.Queries) != 1 {
return nil, errors.New("error: only 1 query can be passed to single query api")
}
if debugging {
log.Println(query)
}
//Build URI
uri := buildURI(query, "single",apiKey, baseURL)
//Execute query
req, err := http.NewRequest("GET",uri,nil)
if err != nil {
return nil, err
}
//Set request headers
req.Header.Set("Accept","application/json")
resp, err := http.DefaultClient.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
//Check if invalid api key
if resp.StatusCode == 403 {
if strings.Contains(uri, "?key=") {
return nil, errors.New("error: invalid api key")
} else {
return nil, errors.New("error: exceeded api calls per minute, you need to un-blacklist yourself")
}
}
if resp.StatusCode != http.StatusOK {
return nil, errors.New("error querying ip api: " + resp.Status + " " + strconv.Itoa(resp.StatusCode))
}
var location Location
err = json.NewDecoder(resp.Body).Decode(&location)
if err != nil {
return nil, err
}
return &location, nil
}
//Execute a batch query (queries field should contain 1 or more values
func BatchQuery(query Query, apiKey string, baseURL string, debugging bool) ([]Location, error) {
//Make sure that there are 1 or more query values
if len(query.Queries) < 1 {
return nil, errors.New("error: no queries passed to batch query")
}
//Build URI
uri := buildURI(query,"batch",apiKey, baseURL)
//Build queries list
queries, err := json.Marshal(query.Queries)
if err != nil {
return nil, err
}
if debugging {
log.Println(string(queries))
}
//Execute Query
req, err := http.NewRequest("POST",uri,bytes.NewReader(queries))
if err != nil {
return nil, err
}
//Set request headers
req.Header.Set("Content-Type","application/json")
resp, err := http.DefaultClient.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
//Check if invalid api key
if resp.StatusCode == 403 {
if strings.Contains(uri, "?key=") {
return nil, errors.New("error: invalid api key")
} else {
return nil, errors.New("error: exceeded api calls per minute, you need to un-blacklist yourself")
}
}
if resp.StatusCode != http.StatusOK {
return nil, errors.New("error querying ip api: " + resp.Status + " " + strconv.Itoa(resp.StatusCode))
}
var locations []Location
err = json.NewDecoder(resp.Body).Decode(&locations)
if err != nil {
return nil, err
}
return locations,nil
}
func buildURI(query Query, queryType string, apiKey string, baseURL string) string {
var baseURI string
//Set base URI
if baseURL != "" {
baseURI = baseURL
} else {
switch apiKey {
case "":
baseURI = FreeAPIURI
default:
baseURI = ProAPIURI
}
}
//Update base URI with query type
switch queryType {
case "single":
baseURI = baseURI + "json/" + query.Queries[0].Query
case "batch":
baseURI = baseURI + "batch"
}
//Get fields list if fields len > 0
var fieldsList string
if len(query.Fields) > 0 {
fieldsList = buildFieldList(query.Fields)
}
//Get lang string if lang != ""
var lang string
if query.Lang != "" {
lang = buildLangString(query.Lang)
}
//Update base URI with api key if not ""
switch apiKey {
case "":
if fieldsList != "" && lang != "" {
baseURI = baseURI + "?" + fieldsList + "&" + lang
} else if fieldsList != "" {
baseURI = baseURI + "?" + fieldsList
} else if lang != "" {
baseURI = baseURI + "?" + lang
}
default:
baseURI = baseURI + "?key=" + apiKey
if fieldsList != "" && lang != "" {
baseURI = baseURI + "&" + fieldsList + "&" + lang
} else if fieldsList != "" {
baseURI = baseURI + "&" + fieldsList
} else if lang != "" {
baseURI = baseURI + "&" + lang
}
}
return baseURI
}
//Build fields string from slice
func buildFieldList(fields string) string {
return "fields=" + fields
}
//Build lang string from lang value
func buildLangString(lang string) string {
return "lang=" + lang
}
var AllowedAPIFields = []string{"status","message","continent","continentCode","country","countryCode","region","regionName","city","district","zip","lat","lon","timezone","isp","org","as","asname","reverse","mobile","proxy","hosting","query"}
var AllowedLangs = []string{"en","de","es","pt-BR","fr","ja","zh-CN","ru"}
/*
ValidateFields - validates the fields string to make sure it only has valid parameters
fields - string of comma separated values
*/
func ValidateFields(fields string) (string, error) {
fieldsSlice := strings.Split(fields,",")
for _, field := range fieldsSlice {
if !contains(AllowedAPIFields, field) {
return "", errors.New("error: illegal field provided: " + field)
}
}
return fields, nil
}
/*
ValidateLang - validates the lang string to make sure it is a valid lang option
lang - string with lang value
*/
func ValidateLang(lang string) (string, error) {
if !contains(AllowedLangs,lang) {
return "", errors.New("error: illegal lang value provided: " + lang)
}
return lang, nil
}
/*
contains - checks a string slice to see if it contains a string
slice - string slice which you want to check
item - string which you want to see if exists in the string slice
returns
bool - true if slice contains string, else false
*/
func contains(slice []string, item string) bool {
for _, value := range slice {
if value == item {
return true
}
}
return false
}
|
package robot
import (
"compress/gzip"
"errors"
"io"
"io/ioutil"
"net/http"
"os"
"strings"
"time"
"github.com/golang/glog"
"golang.org/x/text/encoding/simplifiedchinese"
"golang.org/x/text/transform"
)
const maxRetry int = 5
var UA string = "Mozilla/5.0 (iPhone; CPU iPhone OS 9_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13B143 Safari/601.1"
func init() {
ua := os.Getenv("User_Agent")
if len(ua) > 0 {
UA = ua
}
}
func guess_charset(contype string) string {
excel := "application/vnd.ms-excel"
if contype == excel {
return "GBK"
}
charset := strings.ToUpper(contype)
if i := strings.Index(charset, "CHARSET="); i == -1 {
return "UTF8"
} else {
charset = charset[i+len("CHARSET="):]
charset = strings.TrimSpace(charset)
charset = strings.Trim(charset, ";")
}
return charset
}
func Http_get(url string, referer *string, tout time.Duration) (body []byte, err error) {
glog.V(HttpV).Infoln(url)
var res *http.Response
for i := 0; i < maxRetry; i++ {
client := &http.Client{Timeout: tout}
req, err := http.NewRequest("GET", url, nil)
if err != nil {
continue
}
req.Header.Set("Accept-Encoding", "gzip, deflate")
req.Header.Add("Connection", "keep-alive")
req.Header.Set("User-Agent", UA)
if referer != nil {
req.Header.Set("Referer", *referer)
}
res, err = client.Do(req)
if err == nil {
break
}
}
if err != nil {
glog.Warningln("http get fail", url, err)
return
}
if res == nil && err == nil {
err = errors.New("req " + url + " fail")
return
}
defer res.Body.Close()
contype := res.Header.Get("Content-Type")
charset := guess_charset(contype)
var reader io.ReadCloser
switch res.Header.Get("Content-Encoding") {
case "gzip":
reader, _ = gzip.NewReader(res.Body)
defer reader.Close()
default:
reader = res.Body
}
if charset[:2] == "GB" {
body, err = ioutil.ReadAll(transform.NewReader(reader,
simplifiedchinese.GBK.NewDecoder()))
} else {
body, err = ioutil.ReadAll(reader)
}
return body, err
}
|
package main
import (
"go/types"
"github.com/pkg/errors"
)
var (
elmBool = &ElmBasicType{name: "Bool", codec: "bool"}
elmFloat = &ElmBasicType{name: "Float", codec: "float"}
elmInt = &ElmBasicType{name: "Int", codec: "int"}
elmString = &ElmBasicType{name: "String", codec: "string"}
)
// ElmType represents a type in Elm.
type ElmType interface {
Name() string
Decoder(prefix string) string
Encoder(prefix string) string
Equal(other ElmType) bool
Nullable() bool
}
func elmTypeName(t ElmType) string {
if t == nil {
return "<undefined>"
}
return t.Name()
}
// ElmBasicType represents primitive types in Elm.
type ElmBasicType struct {
name string
codec string
}
// Name returns the name of the Elm type.
func (t *ElmBasicType) Name() string {
return t.name
}
// Decoder returns the name of the Elm JSON encoder/decoder for this type.
func (t *ElmBasicType) Decoder(prefix string) string {
return prefix + "." + t.codec
}
// Encoder returns the name of the Elm JSON encoder/decoder for this type.
func (t *ElmBasicType) Encoder(prefix string) string {
return prefix + "." + t.codec
}
// Equal tests for equality with another ElmType.
func (t *ElmBasicType) Equal(other ElmType) bool {
if o, ok := other.(*ElmBasicType); ok {
return t.name == o.name &&
t.codec == o.codec
}
return false
}
// Nullable indicates whether this type can be nil.
func (t *ElmBasicType) Nullable() bool {
return false
}
// ElmList represents a list of another type.
type ElmList struct {
elem ElmType
}
// Name returns the name of the Elm type.
func (t *ElmList) Name() string {
return "List " + t.elem.Name()
}
// Decoder returns the name of the Elm JSON encoder/decoder for this type.
func (t *ElmList) Decoder(prefix string) string {
return "(" + prefix + ".list " + t.elem.Decoder(prefix) + ")"
}
// Encoder returns the name of the Elm JSON encoder/decoder for this type.
func (t *ElmList) Encoder(prefix string) string {
return "(" + prefix + ".list " + t.elem.Encoder(prefix) + ")"
}
// Equal tests for equality with another ElmType.
func (t *ElmList) Equal(other ElmType) bool {
if o, ok := other.(*ElmList); ok {
return t.elem.Equal(o.elem)
}
return false
}
// Nullable indicates whether this type can be nil.
func (t *ElmList) Nullable() bool {
return true
}
// ElmPointer represents a pointer to an instance of another type.
type ElmPointer struct {
elem ElmType
}
// Name returns the name of the Elm type.
func (t *ElmPointer) Name() string {
return t.elem.Name()
}
// Decoder returns the name of the Elm JSON encoder/decoder for this type.
func (t *ElmPointer) Decoder(prefix string) string {
return t.elem.Decoder(prefix)
}
// Encoder returns the name of the Elm JSON encoder/decoder for this type.
func (t *ElmPointer) Encoder(prefix string) string {
return t.elem.Encoder(prefix)
}
// Equal tests for equality with another ElmType.
func (t *ElmPointer) Equal(other ElmType) bool {
if o, ok := other.(*ElmPointer); ok {
return t.elem.Equal(o.elem)
}
return false
}
// Nullable indicates whether this type can be nil.
func (t *ElmPointer) Nullable() bool {
return true
}
// ElmTypeResolver maintains a cache of Go to Elm type conversions.
type ElmTypeResolver struct {
resolved map[string]*ElmRecord
ordered []*ElmRecord
renames TypeNamePairs
}
// NewResolver creates an empty resolver.
func NewResolver(renames TypeNamePairs) *ElmTypeResolver {
return &ElmTypeResolver{
resolved: make(map[string]*ElmRecord),
renames: renames,
}
}
// Convert translates a Go type into an Elm type and JSON decoder pair.
func (r *ElmTypeResolver) Convert(goType types.Type) (ElmType, error) {
switch t := goType.(type) {
case *types.Basic:
switch t.Kind() {
case types.Bool:
return elmBool, nil
case types.Float32, types.Float64:
return elmFloat, nil
case types.Int, types.Int8, types.Int16, types.Int32, types.Int64,
types.Uint, types.Uint8, types.Uint16, types.Uint32, types.Uint64:
return elmInt, nil
case types.String:
return elmString, nil
}
case *types.Pointer:
elemType, err := r.Convert(t.Elem())
if err != nil {
return nil, err
}
return &ElmPointer{elem: elemType}, nil
case *types.Slice:
elemType, err := r.Convert(t.Elem())
if err != nil {
return nil, err
}
return &ElmList{elem: elemType}, nil
case *types.Named:
goName := t.Obj().Name()
switch u := t.Underlying().(type) {
case *types.Struct:
return r.resolveRecord(goName, u)
}
}
return nil, errors.Errorf("don't know how to handle Go type %s (%T)", goType, goType)
}
// CachedRecords returns slice of resolved Elm records.
func (r *ElmTypeResolver) CachedRecords() []*ElmRecord {
return r.ordered
}
// resolveRecord converts the struct to an Elm record, or returns the cached version.
func (r *ElmTypeResolver) resolveRecord(goName string, stype *types.Struct) (*ElmRecord, error) {
if record := r.resolved[goName]; record != nil {
return record, nil
}
record, err := recordFromStruct(r, stype, goName)
if err != nil {
return nil, err
}
logger.Debug().
Str("name", goName).
Str("type", elmTypeName(record)).
Msg("Caching resolved type")
r.resolved[goName] = record
r.ordered = append(r.ordered, record)
return record, nil
}
|
package main
import (
"fmt"
"golangPractice/chat_room/model"
"golangPractice/chat_room/protocol"
)
//保存在线好友列表
var onlineUserMap map[int]*model.User = make(map[int]*model.User, 16)
func showOnlineUserList() {
//显示在线用户列表
fmt.Println("--------------【online user list】-------------")
for id, _ := range onlineUserMap{
fmt.Println("user ", id)
}
fmt.Println("-----------------------------------------------")
}
func updateUserStatus(userStatus protocol.UserStatusNotify) {
user, ok := onlineUserMap[userStatus.UserId]
if !ok {
//不存在 则插入 说明是新上线的用户
user = &model.User{}
user.UserId = userStatus.UserId
}
user.Status = userStatus.Status
onlineUserMap[user.UserId] = user
//判断用户状态
//如果是下线则通知并删除
if user.Status == model.UserStatusOffline {
fmt.Println("【broadcast】user ", user.UserId, " offline")
delete(onlineUserMap, user.UserId)
}else {
fmt.Println("【broadcast】user ", user.UserId, " online")
}
}
|
package web
import (
"log"
"time"
"github.com/tebeka/selenium"
)
const waitingTimeBeforeFind = 500
type Finder struct {
WebDriver selenium.WebDriver
}
func (f *Finder) FindElement(selector string) selenium.WebElement {
log.Printf("%+v\n", selector)
time.Sleep(waitingTimeBeforeFind * time.Millisecond)
we, err := f.WebDriver.FindElement(selenium.ByCSSSelector, selector)
if err != nil {
log.Fatalf("FindElement: %v", err)
}
return we
}
func (f *Finder) FindElements(selector string) []selenium.WebElement {
log.Printf("%+v\n", selector)
time.Sleep(waitingTimeBeforeFind * time.Millisecond)
elementList, err := f.WebDriver.FindElements(selenium.ByCSSSelector, selector)
if err != nil {
log.Fatalf("FindElements: %v", err)
}
return elementList
}
func (f *Finder) FindElementAndClick(selector string) selenium.WebElement {
we := f.FindElement(selector)
we.Click()
return we
}
|
package JsonParsers
type GamesOffersJson struct {
Id int `json:"id"`
ParentId int `json:"parent section id"`
Name string `json:"offer name"`
Price string `json:"price"`
Currency string `json:"currency"`
Discount string `json:"discount"`
Gift string `json:"gift"`
IdSeller string `json:"id_seller"`
}
|
// できるだけコンパクトにKB、MB、...、YBまでのconst宣言を書きなさい
package main
import "fmt"
const (
B = 1
KB = B * 1000
MB = KB * 1000
GB = MB * 1000
TB = GB * 1000
PG = TB * 1000
EB = PG * 1000
ZB = EB * 1000
YB = ZB * 1000
)
func main() {
fmt.Println(B, KB, MB)
}
|
package bp2build
import (
"android/soong/bazel"
"fmt"
)
// Data from the code generation process that is used to improve compatibility
// between build systems.
type CodegenCompatLayer struct {
// A map from the original module name to the generated/handcrafted Bazel
// label for legacy build systems to be able to build a fully-qualified
// Bazel target from an unique module name.
NameToLabelMap map[string]string
}
// Log an entry of module name -> Bazel target label.
func (compatLayer CodegenCompatLayer) AddNameToLabelEntry(name, label string) {
// The module name may be prefixed with bazel.BazelTargetModuleNamePrefix if
// generated from bp2build.
name = bazel.StripNamePrefix(name)
if existingLabel, ok := compatLayer.NameToLabelMap[name]; ok {
panic(fmt.Errorf(
"Module '%s' maps to more than one Bazel target label: %s, %s. "+
"This shouldn't happen. It probably indicates a bug with the bp2build internals.",
name,
existingLabel,
label))
}
compatLayer.NameToLabelMap[name] = label
}
|
package form3_test
import (
"context"
"errors"
"log"
"os"
"testing"
"time"
"github.com/matryer/is"
"github.com/namsral/flag"
"github.com/tehsphinx/form3"
)
var endpoint string
var debugEnabled bool
const orgID = "eb0bd6f5-c3f5-44b2-b677-acd23cdde73c"
func TestMain(m *testing.M) {
flag.StringVar(&endpoint, "endpoint", "http://localhost:8080", "test server endpoint url")
flag.BoolVar(&debugEnabled, "debug", false, "enable colored debug output")
flag.Parse()
cleanAccountsTable()
code := m.Run()
os.Exit(code)
}
func getClient() *form3.Client {
var options []form3.ClientOption
if debugEnabled {
options = append(options, form3.WithDebug())
}
cl := form3.NewClient(endpoint, options...)
return cl
}
/*
Since we want to test the client, not the server, there is little point testing all the error szenarios of the server.
The validation will be tested separately without making calls to the server.
Go tests are executed sequentially and in order by default. I'm using that here to keep the tests simple.
If I wanted to execute them in parallell, the tests would need to be written differently,
each writing their own data, before fetching and deleting it again. Since this is running against a database,
the test data would have to be without overlap. E.g the `List` call would need a filter that would exclude all
other test data or its own server/database to run against.
WARNING: the tests will clean the Account table first.
*/
var accountTests = []struct {
orgID string
uid string
version int
name string
createData *form3.Account
accountData *form3.Account
}{
{
name: "UK account without CoP",
orgID: orgID,
createData: &form3.Account{
Country: "GB",
BaseCurrency: "GBP",
BankID: "400300",
BankIDCode: "GBDSC",
BIC: "NWBKGB22",
},
accountData: &form3.Account{
Country: "GB",
BaseCurrency: "GBP",
BankID: "400300",
BankIDCode: "GBDSC",
BIC: "NWBKGB22",
},
},
{
name: "UK account with CoP",
orgID: orgID,
createData: &form3.Account{
Country: "GB",
BaseCurrency: "GBP",
BankID: "400300",
BankIDCode: "GBDSC",
BIC: "NWBKGB22",
Name: []string{"Samantha Holder"},
AlternativeNames: []string{"Sam Holder"},
AccountClassification: "Personal",
JointAccount: false,
AccountMatchingOptOut: false,
SecondaryIdentification: "A1B2C3D4",
},
accountData: &form3.Account{
Country: "GB",
BaseCurrency: "GBP",
BankID: "400300",
BankIDCode: "GBDSC",
BIC: "NWBKGB22",
AccountClassification: "Personal",
JointAccount: false,
AccountMatchingOptOut: false,
SecondaryIdentification: "A1B2C3D4",
},
},
}
func TestClient_CreateAccount(t *testing.T) {
for i, tt := range accountTests {
i := i // scope variable
t.Run(tt.name, func(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
defer cancel()
cl := getClient()
got, err := cl.CreateAccount(ctx, tt.orgID, tt.createData)
assert := is.New(t)
assert.NoErr(err)
assert.True(got.ID() != "")
assert.Equal(copyAccount(*got), tt.accountData)
// save uid to test case for the other tests
accountTests[i].uid = got.ID()
accountTests[i].version = got.Version()
})
}
}
func TestClient_FetchAccount(t *testing.T) {
for _, tt := range accountTests {
t.Run(tt.name, func(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
defer cancel()
cl := getClient()
got, err := cl.FetchAccount(ctx, tt.uid)
assert := is.New(t)
assert.NoErr(err)
assert.True(got.ID() != "")
assert.Equal(copyAccount(*got), tt.accountData)
})
}
}
func TestClient_ListAccounts(t *testing.T) {
tests := []struct {
name string
options []form3.ListOption
expectedUIDs []string
}{
{
name: "no pagination",
expectedUIDs: []string{accountTests[0].uid, accountTests[1].uid},
},
{
name: "with page option",
options: []form3.ListOption{
form3.WithPageNo(0),
},
expectedUIDs: []string{accountTests[0].uid, accountTests[1].uid},
},
{
name: "with size option",
options: []form3.ListOption{
form3.WithPageSize(5),
},
expectedUIDs: []string{accountTests[0].uid, accountTests[1].uid},
},
{
name: "second page has no data",
options: []form3.ListOption{
form3.WithPageNo(1),
form3.WithPageSize(5),
},
expectedUIDs: []string{},
},
{
name: "first page with size 1",
options: []form3.ListOption{
form3.WithPageNo(0),
form3.WithPageSize(1),
},
expectedUIDs: []string{accountTests[0].uid},
},
{
name: "second page with size 1",
options: []form3.ListOption{
form3.WithPageNo(1),
form3.WithPageSize(1),
},
expectedUIDs: []string{accountTests[1].uid},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
assert := is.New(t)
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
defer cancel()
cl := getClient()
got, err := cl.ListAccounts(ctx, tt.options...)
// assert that there was no error and the expected amount of results
assert.NoErr(err)
assert.Equal(len(got), len(tt.expectedUIDs))
// check if the expected uids are in the result
for _, uid := range tt.expectedUIDs {
var found bool
for _, account := range got {
assert.True(account.ID() != "")
if uid != account.ID() {
continue
}
found = true
}
assert.True(found)
}
// check if the results are the same as expected in accountTests
for _, test := range accountTests {
for _, account := range got {
if test.uid != account.ID() {
continue
}
assert.Equal(copyAccount(account), test.accountData)
}
}
})
}
}
func TestClient_DeleteAccount(t *testing.T) {
for _, tt := range accountTests {
t.Run(tt.name, func(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
defer cancel()
assert := is.NewRelaxed(t)
cl := getClient()
// try to delete wrong version No
err := cl.DeleteAccount(ctx, tt.uid, tt.version+1)
// Note: seems the way the API is implemented server side, a wrong version returns a 404 if that
// version does not exist at all, not a 409 as one could have read from the documentation.
// Is not really a use case that matters though, since normally one would not invent a version number.
assert.True(errors.Is(err, form3.ErrNotFound))
// test deletion
err = cl.DeleteAccount(ctx, tt.uid, tt.version)
assert.NoErr(err)
})
}
}
// use to get rid of private values for comparison
func copyAccount(account form3.Account) *form3.Account {
return &form3.Account{
Country: account.Country,
BaseCurrency: account.BaseCurrency,
AccountNumber: account.AccountNumber,
BankID: account.BankID,
BankIDCode: account.BankIDCode,
BIC: account.BIC,
IBAN: account.IBAN,
Name: account.Name,
AlternativeNames: account.AlternativeNames,
AccountClassification: account.AccountClassification,
JointAccount: account.JointAccount,
AccountMatchingOptOut: account.AccountMatchingOptOut,
SecondaryIdentification: account.SecondaryIdentification,
Switched: account.Switched,
Status: account.Status,
}
}
func cleanAccountsTable() {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
cl := form3.NewClient(endpoint)
accounts, err := cl.ListAccounts(ctx, form3.WithPageSize(100))
if err != nil {
log.Fatal(err)
}
if len(accounts) == 100 {
log.Fatal("tests would delete too much data. aborting")
}
for _, account := range accounts {
if err := cl.DeleteAccount(ctx, account.ID(), account.Version()); err != nil {
log.Fatal(err)
}
}
}
|
package bot
import (
"strings"
tgbotapi "github.com/go-telegram-bot-api/telegram-bot-api"
)
var (
markdownV2Replacer = strings.NewReplacer(
func(chars string) []string {
out := make([]string, 0, len(chars)*2)
for _, c := range chars {
out = append(out, string(c), "\\"+string(c))
}
return out
}("-*[]()~`>#+-=|{}.!")...,
)
)
func escapeMarkdownV2(s string) string {
return markdownV2Replacer.Replace(s)
}
type optionable interface {
setText(string)
setParseMode(string)
setInlineKeyboard(tgbotapi.InlineKeyboardMarkup)
}
type repliable interface {
setReplyMsgID(int)
}
type replyOption func(*tgbotapi.Message, optionable)
type message struct {
tgbotapi.MessageConfig
}
func (m *message) setText(text string) {
m.MessageConfig.Text = text
}
func (m *message) setParseMode(mode string) {
m.MessageConfig.ParseMode = mode
}
func (m *message) setInlineKeyboard(kb tgbotapi.InlineKeyboardMarkup) {
m.MessageConfig.ReplyMarkup = kb
}
func (m *message) setReplyMsgID(id int) {
m.MessageConfig.ReplyToMessageID = id
}
type editMessage struct {
tgbotapi.EditMessageTextConfig
}
func (m *editMessage) setText(text string) {
m.EditMessageTextConfig.Text = text
}
func (m *editMessage) setParseMode(mode string) {
m.EditMessageTextConfig.ParseMode = mode
}
func (m *editMessage) setInlineKeyboard(kb tgbotapi.InlineKeyboardMarkup) {
m.ReplyMarkup = &kb
}
func reply(m *tgbotapi.Message, opts ...replyOption) tgbotapi.Chattable {
msg := message{
MessageConfig: tgbotapi.NewMessage(m.Chat.ID, ""),
}
for _, opt := range opts {
opt(m, &msg)
}
return msg.MessageConfig
}
func edit(m *tgbotapi.Message, opts ...replyOption) tgbotapi.Chattable {
edt := editMessage{
EditMessageTextConfig: tgbotapi.NewEditMessageText(m.Chat.ID, m.MessageID, ""),
}
for _, opt := range opts {
opt(m, &edt)
}
return edt.EditMessageTextConfig
}
func withText(text string) replyOption {
return func(_ *tgbotapi.Message, msg optionable) {
msg.setText(text)
}
}
func withError(err error) replyOption {
return withText("Oops, something went wrong: " + err.Error())
}
func withMarkdownV2() replyOption {
return func(_ *tgbotapi.Message, msg optionable) {
msg.setParseMode("MarkdownV2")
}
}
func withQuoteMessage() replyOption {
return func(m *tgbotapi.Message, msg optionable) {
if r, ok := msg.(repliable); ok {
r.setReplyMsgID(m.MessageID)
}
}
}
func withInlineKeyboard(rows ...[]tgbotapi.InlineKeyboardButton) replyOption {
return func(_ *tgbotapi.Message, msg optionable) {
msg.setInlineKeyboard(tgbotapi.NewInlineKeyboardMarkup(rows...))
}
}
|
package machinery
type MachineryEvent interface {
String() string
AddStateFrom(...MachineryState)
StateTo(MachineryState)
AllowAction(...MachineryAction)
}
type BasicEvent struct {
event string
statesFrom []MachineryState
stateTo MachineryState
actions []MachineryAction
}
func (e BasicEvent) Event() string {
return e.event
}
// AddStateFrom allows to set one (or more) states which this event can move the machine FROM
func (e *BasicEvent) AddStateFrom(states ...MachineryState) *BasicEvent {
e.statesFrom = append(e.statesFrom, states...)
return e
}
// StateTo allows to set one state which this event can move the machine TO
func (e *BasicEvent) StateTo(state MachineryState) *BasicEvent {
e.stateTo = state
return e
}
// AllowAction allows to set one (or more) actions that trigger this event to happen
func (e *BasicEvent) AllowAction(actions ...MachineryAction) *BasicEvent {
e.actions = append(e.actions, actions...)
return e
}
// NewEvent add an Event that changes Machine to some state
func Event(name string) *BasicEvent {
return &BasicEvent{
event: name,
}
}
|
package firewall
import (
"bufio"
"fmt"
"regexp"
"strconv"
"strings"
"github.com/Sirupsen/logrus"
)
var (
iptableRuleRe = regexp.MustCompile(`^(\d+).*?ACCEPT.*?dpt:(\d+)`)
)
// PortExistsError is an error when an iptables definition exists for a port.
type PortExistsError struct {
Port int
}
func (e *PortExistsError) Error() string {
return fmt.Sprintf("rule for port %d exists in iptables", e.Port)
}
// Firewall is an interface for controlling a firewall.
type Firewall interface {
Open(port int) error
Close(port int) error
State() (State, error)
}
// IptablesFirewall manages iptables firewalls.
type IptablesFirewall struct {
log *logrus.Entry
ic IptablesCommand
}
var _ Firewall = &IptablesFirewall{}
// NewIptablesFirewall creates an instance of IptablesFirewall.
func NewIptablesFirewall(ic IptablesCommand, log *logrus.Entry) *IptablesFirewall {
return &IptablesFirewall{
log: log,
ic: ic,
}
}
// State is current state of the firewall.
func (f *IptablesFirewall) State() (State, error) {
out, err := f.ic.ListRules()
if err != nil {
return nil, err
}
return NewIptablesState(string(out))
}
// Open opens a port on the firewall.
func (f *IptablesFirewall) Open(port int) error {
_, err := f.findRuleByPort(port)
if err == nil {
return &PortExistsError{Port: port}
}
return f.ic.PrependRule(port)
}
// Close closes a port on the firewall.
func (f *IptablesFirewall) Close(port int) error {
_, err := f.findRuleByPort(port)
if err != nil {
return err
}
return f.ic.RemoveRule(port)
}
func (f *IptablesFirewall) findRuleByPort(port int) (*Rule, error) {
state, err := f.State()
if err != nil {
return nil, err
}
rules, err := state.Rules()
if err != nil {
return nil, err
}
for _, rule := range rules {
if rule.Destination == port {
return &rule, nil
}
}
return nil, fmt.Errorf("unable to find port %d in iptables", port)
}
// State is interface for returning firewall rules.
type State interface {
Rules() ([]Rule, error)
}
// IptablesState reads iptables to determine the current state.
type IptablesState struct {
in string
matcher *regexp.Regexp
}
var _ State = &IptablesState{}
// NewIptablesState creates an instance of IptablesState.
func NewIptablesState(in string) (*IptablesState, error) {
return &IptablesState{
in: in,
matcher: iptableRuleRe,
}, nil
}
// Rules returns a list of rules as defined in iptables.
func (is *IptablesState) Rules() ([]Rule, error) {
r := strings.NewReader(is.in)
scanner := bufio.NewScanner(r)
rules := []Rule{}
for scanner.Scan() {
m := is.matcher.FindAllStringSubmatch(scanner.Text(), -1)
if m != nil {
ruleNo, err := strconv.Atoi(m[0][1])
if err != nil {
return nil, err
}
port, err := strconv.Atoi(m[0][2])
if err != nil {
return nil, err
}
rule := Rule{
RuleNumber: ruleNo,
Destination: port,
}
rules = append(rules, rule)
}
}
return rules, nil
}
// Rule is a firewall rule. Currently the implementation only knows about TCP ports.
type Rule struct {
// RuleNumber is the iptables rule number in a chain.
RuleNumber int
// Destination is the TCP destiation port.
Destination int
}
|
package main
import "fmt"
import "strconv"
import "os"
func main() {
var PRIME_COUNT_STR = os.Getenv("PRIME_COUNT")
var PRIME_COUNT, err = strconv.Atoi(PRIME_COUNT_STR)
var BENCH_DEBUG = os.Getenv("BENCH_DEBUG")
if err != nil {
fmt.Println("Please set the PRIME_COUNT environment variable.")
os.Exit(-1)
}
primes := make([]int, 0)
var is_prime = func(number int) bool {
for i := 0; i < len(primes); i++ {
if number%primes[i] == 0 {
return false
}
}
return true
}
var number = 2
fmt.Println("begin")
os.Stdout.Sync()
for len(primes) < PRIME_COUNT {
if is_prime(number) {
primes = append(primes, number)
}
number++
}
if BENCH_DEBUG == "true" {
for i := 0; i < len(primes); i++ {
fmt.Println(primes[i])
}
}
fmt.Println("end")
os.Stdout.Sync()
}
|
package lists
import (
"testing"
)
func TestNew(t *testing.T) {
list := New("Hello")
if list.Head.Value != "Hello" {
t.Fail()
}
if list.Head.NextNode != nil {
t.Fail()
}
}
func TestPush(t *testing.T) {
list := New("Hello")
if list.Head.Value != "Hello" {
t.Fatal("Head value != Hello")
}
if list.Head.NextNode != nil {
t.Fatal("Head next not empty")
}
list.Push("World")
node := list.Tail()
if node.Value != "World" {
t.Fatal("Value != world")
}
if node.NextNode != nil {
t.Fatal("Next node not nil")
}
if node.PrevNode != list.Head {
t.Fatal("Prev node not head")
}
}
func TestGet(t *testing.T) {
list := New("Hello")
list.Push("World")
list.Push("!")
element, err := list.Get(2)
if err != nil {
t.Fatal(err)
}
if element.Value != "!" {
t.Fatal("Element not !")
}
_, err = list.Get(3)
if err == nil {
t.Fatal("element should be out of bounds")
}
}
func TestPop(t *testing.T) {
list := New("Hello")
list.Push("World")
list.Push("!")
oldHead := list.Pop()
if oldHead.Value != "Hello" {
t.Fatal("incorrect old head")
}
if list.Head.Value != "World" {
t.Fatal("incorrect new head")
}
if list.Head.PrevNode != nil {
t.Fatal("new head prev has not been removed")
}
}
func TestDelete(t *testing.T) {
list := New("Hello")
list.Push("World")
list.Push("!")
err := list.Delete(1)
if err != nil {
t.Fatal(err)
}
element, err := list.Get(1)
if err != nil {
t.Fatal(err)
}
if element.Value != "!" {
t.Fatal("expected !")
}
}
|
/*
Package servicePacks "Every package should have a package comment, a block comment preceding the package clause.
For multi-file packages, the package comment only needs to be present in one file, and any
one will do. The package comment should introduce the package and provide information
relevant to the package as a whole. It will appear first on the godoc page and should set
up the detailed documentation that follows."
*/
package servicePacks
import (
"encoding/json"
"github.com/MerinEREN/iiPackages/api"
"github.com/MerinEREN/iiPackages/datastore/photo"
"github.com/MerinEREN/iiPackages/datastore/servicePack"
"github.com/MerinEREN/iiPackages/datastore/tagServicePack"
"github.com/MerinEREN/iiPackages/datastore/tagUser"
"github.com/MerinEREN/iiPackages/datastore/user"
"github.com/MerinEREN/iiPackages/session"
"github.com/MerinEREN/iiPackages/storage"
"golang.org/x/net/context"
"google.golang.org/appengine/datastore"
"google.golang.org/appengine/memcache"
"log"
"net/http"
"strconv"
"strings"
"time"
)
// Handler returns account's servicePacks via account ID if provided.
// Otherwise if the user is "admin" returns servicePacks via account's all user's tag keys
// else returns only via user tag keys to show in timeline.
// If the request method is POST, uploads the files if present to the storage and
// puts the servicePack to the datastore.
func Handler(s *session.Session) {
URL := s.R.URL
q := URL.Query()
switch s.R.Method {
case "POST":
uID := q.Get("uID")
if uID == "" {
log.Printf("Path: %s, Error: No user ID.\n", URL.Path)
http.Error(s.W, "No user ID.", http.StatusBadRequest)
return
}
// https://stackoverflow.com/questions/15202448/go-formfile-for-multiple-files
err := s.R.ParseMultipartForm(32 << 20) // 32MB is the default used by FormFile.
if err != nil {
log.Printf("Path: %s, Error: %v\n", URL.Path, err)
http.Error(s.W, err.Error(), http.StatusInternalServerError)
return
}
typ := s.R.MultipartForm.Value["type"][0]
tIDs := s.R.MultipartForm.Value["tagIDs"]
title := s.R.MultipartForm.Value["title"][0]
description := s.R.MultipartForm.Value["description"][0]
sp := &servicePack.ServicePack{
Type: typ,
Title: title,
Description: description,
Status: "active",
Created: time.Now(),
LastModified: time.Now(),
}
fhx := s.R.MultipartForm.File["files"]
px := make([]*photo.Photo, 0, cap(fhx))
for _, v := range fhx {
f, err := v.Open()
if err != nil {
log.Printf("Path: %s, Error: %v\n", URL.Path, err)
http.Error(s.W, err.Error(),
http.StatusInternalServerError)
return
}
defer f.Close()
link, err := storage.UploadFile(s, f, v)
if err != nil {
log.Printf("Path: %s, Error: %v\n", URL.Path, err)
http.Error(s.W, err.Error(),
http.StatusInternalServerError)
return
}
p := &photo.Photo{
Link: link,
Status: "active",
}
px = append(px, p)
}
pk, err := datastore.DecodeKey(uID)
if err != nil {
log.Printf("Path: %s, Error: %v\n", URL.Path, err)
http.Error(s.W, err.Error(), http.StatusInternalServerError)
return
}
k := datastore.NewIncompleteKey(s.Ctx, "ServicePack", pk)
tspx := make([]*tagServicePack.TagServicePack, 0, cap(tIDs))
ktspx := make([]*datastore.Key, 0, cap(tIDs))
kpx := make([]*datastore.Key, 0, cap(px))
err = datastore.RunInTransaction(s.Ctx, func(ctx context.Context) (
err1 error) {
k, err1 = datastore.Put(ctx, k, sp)
if err1 != nil {
return
}
for _, v := range tIDs {
ktsp := datastore.NewKey(s.Ctx, "TagServicePack", v, 0, k)
ktspx = append(ktspx, ktsp)
kt := new(datastore.Key)
kt, err1 = datastore.DecodeKey(v)
if err1 != nil {
return
}
tsp := &tagServicePack.TagServicePack{
Created: time.Now(),
TagKey: kt,
}
tspx = append(tspx, tsp)
}
_, err1 = datastore.PutMulti(ctx, ktspx, tspx)
if err1 != nil {
return
}
for i := 0; i < len(px); i++ {
kp := datastore.NewIncompleteKey(s.Ctx, "Photo", k)
kpx = append(kpx, kp)
}
_, err1 = datastore.PutMulti(ctx, kpx, px)
return
}, nil)
if err != nil {
// REMOVE ALL THE UPLOADED FILES FROM THE STORAGE !!!!!!!!!!!!!!!!!
log.Printf("Path: %s, Error: %v\n", URL.Path, err)
http.Error(s.W, err.Error(), http.StatusInternalServerError)
return
}
s.W.WriteHeader(http.StatusNoContent)
default:
accID := q.Get("aID")
var crsrAsStringx []string
sps := make(servicePack.ServicePacks)
if accID != "" {
ka, err := datastore.DecodeKey(accID)
if err != nil {
log.Printf("Path: %s, Error: %v\n", URL.Path, err)
http.Error(s.W, err.Error(),
http.StatusInternalServerError)
return
}
kux, err := user.GetKeysByParentOrdered(s.Ctx, ka)
if err != nil {
log.Printf("Path: %s, Error: %v\n", URL.Path, err)
http.Error(s.W, err.Error(),
http.StatusInternalServerError)
return
}
after := q["after"]
if len(after) == 0 {
after = make([]string, len(kux))
}
var lim int
limit := q.Get("limit")
if limit == "" {
lim = 0
} else {
lim, err = strconv.Atoi(limit)
if err != nil {
log.Printf("Path: %s, Error: %v\n", URL.Path, err)
}
}
for i, v := range kux {
sps2, crsrAsString, err := servicePack.GetNextByParentLimited(s.Ctx, after[i], v, lim)
if err != nil {
log.Printf("Path: %s, Request: get account servicePacks via users keys, Error: %v\n", URL.Path, err)
http.Error(s.W, err.Error(),
http.StatusInternalServerError)
return
}
for i2, v2 := range sps2 {
sps[i2] = v2
}
crsrAsStringx = append(crsrAsStringx, crsrAsString)
}
next := api.GenerateSubLink(s, crsrAsStringx, "next")
s.W.Header().Set("Link", next)
} else {
// For timeline
uID := q.Get("uID")
if uID == "" {
log.Printf("Path: %s, Error: No user ID.\n", URL.Path)
http.Error(s.W, "No user ID.", http.StatusBadRequest)
return
}
ku, err := datastore.DecodeKey(uID)
if err != nil {
log.Printf("Path: %s, Error: %v\n", URL.Path, err)
http.Error(s.W, err.Error(),
http.StatusInternalServerError)
return
}
u := new(user.User)
item, err := memcache.Get(s.Ctx, "u")
if err == nil {
err = json.Unmarshal(item.Value, u)
if err != nil {
log.Printf("Path: %s, Error: %v\n",
URL.Path, err)
http.Error(s.W, err.Error(),
http.StatusInternalServerError)
return
}
} else {
err = datastore.Get(s.Ctx, ku, u)
if err == datastore.ErrNoSuchEntity {
log.Printf("Path: %s, Error: %v\n", URL.Path, err)
// ALSO LOG THIS WITH DATASTORE LOG !!!!!!!!!!!!!!!
http.Error(s.W, err.Error(), http.StatusNoContent)
return
} else if err != nil {
log.Printf("Path: %s, Error: %v\n", URL.Path, err)
// ALSO LOG THIS WITH DATASTORE LOG !!!!!!!!!!!!!!!
http.Error(s.W, err.Error(),
http.StatusInternalServerError)
return
} else {
bs, err := json.Marshal(u)
if err != nil {
log.Printf("Path: %s, Error: %v\n",
URL.Path, err)
}
item = &memcache.Item{
Key: "u",
Value: bs,
}
err = memcache.Add(s.Ctx, item)
if err != nil {
log.Printf("Path: %s, Error: %v\n",
URL.Path, err)
}
}
}
var ktux []*datastore.Key
var ktax []*datastore.Key
var ktx []*datastore.Key
isAdmin, err := u.IsAdmin(s.Ctx)
if isAdmin {
item, err = memcache.Get(s.Ctx, "ktax")
if err == nil {
err = json.Unmarshal(item.Value, &ktax)
if err != nil {
log.Printf("Path: %s, Error: %v\n",
URL.Path, err)
http.Error(s.W, err.Error(),
http.StatusInternalServerError)
return
}
ktx = ktax
} else {
kux, err := user.GetKeysByParentOrdered(s.Ctx,
ku.Parent())
if err != nil {
log.Printf("Path: %s, Request: get user keys via parent, Error: %v\n", URL.Path, err)
http.Error(s.W, err.Error(),
http.StatusInternalServerError)
return
}
if len(kux) == 0 {
// Impossible !!!!!!!!!!!!!!!!!!!!!!!!!!!!!
log.Printf("Path: %s, Request: get user keys via parent, Error: %v\n", URL.Path, err)
s.W.WriteHeader(http.StatusNoContent)
return
}
for _, v := range kux {
ktux, err = tagUser.GetKeysByUserOrTagKey(s.Ctx, v)
if err == datastore.Done {
if len(ktux) == 0 {
log.Printf("Path: %s, Request: getting user's tags, Error: %v\n", URL.Path, err)
}
} else if err != nil {
log.Printf("Path: %s, Request: getting user's tags, Error: %v\n", URL.Path, err)
http.Error(s.W, err.Error(),
http.StatusInternalServerError)
return
}
for _, v2 := range ktux {
absent := true
for _, v3 := range ktax {
if *v3 == *v2 {
absent = false
}
}
if absent {
ktax = append(ktax, v2)
}
}
}
bs, err := json.Marshal(ktax)
if err != nil {
log.Printf("Path: %s, Error: %v\n",
URL.Path, err)
}
item = &memcache.Item{
Key: "ktax",
Value: bs,
}
err = memcache.Add(s.Ctx, item)
if err != nil {
log.Printf("Path: %s, Error: %v\n",
URL.Path, err)
}
ktx = ktax
}
} else {
item, err = memcache.Get(s.Ctx, "ktux")
if err == nil {
err = json.Unmarshal(item.Value, &ktx)
if err != nil {
log.Printf("Path: %s, Error: %v\n",
URL.Path, err)
http.Error(s.W, err.Error(),
http.StatusInternalServerError)
return
}
} else {
ktux, err = tagUser.GetKeysByUserOrTagKey(s.Ctx, ku)
if err == datastore.Done {
if len(ktux) == 0 {
log.Printf("Path: %s, Request: getting user's tags, Error: %v\n", URL.Path, err)
s.W.WriteHeader(http.StatusNoContent)
return
}
} else if err != nil {
log.Printf("Path: %s, Request: getting user's tags, Error: %v\n", URL.Path, err)
http.Error(s.W, err.Error(),
http.StatusInternalServerError)
return
}
bs, err := json.Marshal(ktux)
if err != nil {
log.Printf("Path: %s, Error: %v\n",
URL.Path, err)
}
item = &memcache.Item{
Key: "ktux",
Value: bs,
}
err = memcache.Add(s.Ctx, item)
if err != nil {
log.Printf("Path: %s, Error: %v\n",
URL.Path, err)
}
ktx = ktux
}
}
before := q["before"]
after := q["after"]
var crsrAsStringx []string
if len(before) != 0 {
for i, v := range ktx {
kx, crsrAsString, err := tagServicePack.GetPrevKeysParentsFilteredByTagKey(s.Ctx, before[i], v)
if err != datastore.Done {
log.Printf("Path: %s, Request: get previous service pack's keys by tag key, Error: %v\n", URL.Path, err)
http.Error(s.W, err.Error(),
http.StatusInternalServerError)
return
}
sps2, err := servicePack.GetMulti(s.Ctx, kx)
if err != nil {
log.Printf("Path: %s, Request: get previous service packs, Error: %v\n", URL.Path, err)
http.Error(s.W, err.Error(),
http.StatusInternalServerError)
return
}
for i2, v2 := range sps2 {
sps[i2] = v2
}
crsrAsStringx = append(crsrAsStringx, crsrAsString)
}
prev := api.GenerateSubLink(s, crsrAsStringx, "prev")
s.W.Header().Set("Link", prev)
} else if len(after) != 0 {
limit := q.Get("limit")
var lim int
if limit == "" {
lim = 0
} else {
lim, err = strconv.Atoi(limit)
if err != nil {
log.Printf("Path: %s, Error: %v\n",
URL.Path, err)
}
}
for i, v := range ktx {
kx, crsrAsString, err := tagServicePack.GetNextKeysParentsFilteredByTagKeyLimited(s.Ctx, after[i], v, lim)
if err != nil {
log.Printf("Path: %s, Request: get next service pack's keys by tag key, Error: %v\n", URL.Path, err)
http.Error(s.W, err.Error(),
http.StatusInternalServerError)
return
}
sps2, err := servicePack.GetMulti(s.Ctx, kx)
if err != nil {
log.Printf("Path: %s, Request: get next service packs, Error: %v\n", URL.Path, err)
http.Error(s.W, err.Error(),
http.StatusInternalServerError)
return
}
for i2, v2 := range sps2 {
sps[i2] = v2
}
crsrAsStringx = append(crsrAsStringx, crsrAsString)
}
next := api.GenerateSubLink(s, crsrAsStringx, "next")
s.W.Header().Set("Link", next)
} else {
limit := q.Get("limit")
var lim int
if limit == "" {
lim = 0
} else {
lim, err = strconv.Atoi(limit)
if err != nil {
log.Printf("Path: %s, Error: %v\n",
URL.Path, err)
}
}
var crsrAsStringx2 []string
for _, v := range ktx {
kx, beforeAsString, afterAsString, err := tagServicePack.GetKeysParentsFilteredByTagKeyLimited(s.Ctx, v, lim)
if err != nil {
log.Printf("Path: %s, Request: get initial service pack's keys by tag key, Error: %v\n", URL.Path, err)
http.Error(s.W, err.Error(),
http.StatusInternalServerError)
return
}
sps2, err := servicePack.GetMulti(s.Ctx, kx)
if err != nil {
log.Printf("Path: %s, Request: get initial service packs, Error: %v\n", URL.Path, err)
http.Error(s.W, err.Error(),
http.StatusInternalServerError)
return
}
for i2, v2 := range sps2 {
sps[i2] = v2
}
crsrAsStringx = append(crsrAsStringx, beforeAsString)
crsrAsStringx2 = append(crsrAsStringx2, afterAsString)
}
prev := api.GenerateSubLink(s, crsrAsStringx, "prev")
next := api.GenerateSubLink(s, crsrAsStringx2, "next")
sx := []string{prev, next}
link := strings.Join(sx, ", ")
s.W.Header().Set("Link", link)
s.W.Header().Set("X-Reset", "true")
}
}
if len(sps) == 0 {
s.W.WriteHeader(http.StatusNoContent)
return
}
s.W.Header().Set("Content-Type", "application/json")
api.WriteResponseJSON(s, sps)
}
}
|
package main
import "fmt"
// 547. 朋友圈
// 班上有 N 名学生。其中有些人是朋友,有些则不是。他们的友谊具有是传递性。如果已知 A 是 B 的朋友,B 是 C 的朋友,那么我们可以认为 A 也是 C 的朋友。所谓的朋友圈,是指所有朋友的集合。
// 给定一个 N * N 的矩阵 M,表示班级中学生之间的朋友关系。如果M[i][j] = 1,表示已知第 i 个和 j 个学生互为朋友关系,否则为不知道。你必须输出所有学生中的已知的朋友圈总数.
// 注意:
// N 在[1,200]的范围内。
// 对于所有学生,有M[i][i] = 1。
// 如果有M[i][j] = 1,则有M[j][i] = 1。
// https://leetcode-cn.com/problems/friend-circles/
func main() {
fmt.Println(findCircleNum2([][]int{
{1, 1, 0},
{1, 1, 1},
{0, 1, 1},
})) // 1
}
// 法一:dfs
func findCircleNum(M [][]int) (count int) {
if len(M) == 0 {
return 0
}
n := len(M)
visited := make([]bool, n)
for i := 0; i < n; i++ {
if !visited[i] {
findCircleNumDFSHelper(M, visited, i, n)
count++
}
}
return count
}
func findCircleNumDFSHelper(M [][]int, visited []bool, i, n int) {
for j := 0; j < n; j++ {
if M[i][j] == 1 && !visited[j] {
visited[j] = true
findCircleNumDFSHelper(M, visited, j, n)
}
}
}
// 法二:并查集
func findCircleNum2(M [][]int) int {
if len(M) == 0 {
return 0
}
n := len(M)
uf := NewUnionFind(n)
for i := 0; i < n; i++ {
for j := 0; j < n; j++ {
if M[i][j] == 1 {
uf.union(i, j)
}
}
}
return uf.getCount()
}
type unionFind struct {
parent []int
count int
}
func NewUnionFind(n int) *unionFind {
p := make([]int, n)
for i := 0; i < n; i++ {
p[i] = i
}
return &unionFind{
parent: p,
count: n,
}
}
// 返回集合的根元素
func (this unionFind) find(p int) int {
root := p
for root != this.parent[root] {
root = this.parent[root]
}
// 压缩路径
for p != this.parent[p] {
next := this.parent[p]
this.parent[p] = root
p = next
}
return root
}
func (this *unionFind) union(x, y int) {
rootX := this.find(x)
rootY := this.find(y)
if rootX == rootY {
return
}
this.parent[rootX] = rootY
this.count--
}
func (this unionFind) getCount() int {
return this.count
}
|
package main
import (
"fmt"
"os"
"strings"
"testing"
"time"
"github.com/nuttapp/pinghist/dal"
"github.com/nuttapp/pinghist/ping"
. "github.com/smartystreets/goconvey/convey"
)
func Test_main_unit(t *testing.T) {
Convey("main", t, func() {
parts := strings.Split(time.Now().Format("2006-01-02-07:00"), "-")
y, m, d, z := parts[0], parts[1], parts[2], parts[3]
testTable := map[string]string{
"01/01 01:52 pm": fmt.Sprintf("%s-01-01T13:52:00-%s", y, z), // full
"1/01 01:00 pm": fmt.Sprintf("%s-01-01T13:00:00-%s", y, z), // short day/month
"01/1 01:00 pm": fmt.Sprintf("%s-01-01T13:00:00-%s", y, z), // short day/month
"1/1 01:00 pm": fmt.Sprintf("%s-01-01T13:00:00-%s", y, z), // short day/month
"01/01 1:00 pm": fmt.Sprintf("%s-01-01T13:00:00-%s", y, z), // short hour
"1/01 1:00 pm": fmt.Sprintf("%s-01-01T13:00:00-%s", y, z), // short hour
"01/1 1:00 pm": fmt.Sprintf("%s-01-01T13:00:00-%s", y, z), // short hour
"1/1 1:00 pm": fmt.Sprintf("%s-01-01T13:00:00-%s", y, z), // short hour
"01:52 pm": fmt.Sprintf("%s-%s-%sT13:52:00-%s", y, m, d, z), // time
"1:52 pm": fmt.Sprintf("%s-%s-%sT13:52:00-%s", y, m, d, z), // time
"01/01 13:52": fmt.Sprintf("%s-01-01T13:52:00-%s", y, z), // 24hr time
"1/01 13:52": fmt.Sprintf("%s-01-01T13:52:00-%s", y, z), // 24hr time
"01/1 13:52": fmt.Sprintf("%s-01-01T13:52:00-%s", y, z), // 24hr time
"1/1 13:52": fmt.Sprintf("%s-01-01T13:52:00-%s", y, z), // 24hr time
"13:52": fmt.Sprintf("%s-%s-%sT13:52:00-%s", y, m, d, z), // 24hr time
}
for teststr, goodstr := range testTable {
Convey("Given "+teststr, func() {
Convey("time should equal "+goodstr, func() {
t, err := ParseTime(teststr)
So(err, ShouldBeNil)
So(t.Format(time.RFC3339), ShouldEqual, goodstr)
})
})
}
})
}
func Test_main_integration(t *testing.T) {
Convey("Should ping localhost once and save to db", t, func() {
Reset(func() {
os.Remove("pinghist.db")
})
ip := "127.0.0.1"
startTime := time.Now()
pr, err := ping.Ping(ip)
So(pr, ShouldNotBeNil)
So(err, ShouldBeNil)
d := dal.NewDAL()
d.CreateBuckets()
err = d.SavePing(ip, startTime, float32(pr.Time))
So(err, ShouldBeNil)
groups, err := d.GetPings(ip, startTime, startTime.Add(1*time.Minute), 1*time.Hour)
So(err, ShouldBeNil)
So(len(groups), ShouldEqual, 1)
So(groups[0].MaxTime, ShouldEqual, pr.Time)
So(groups[0].MinTime, ShouldEqual, pr.Time)
So(groups[0].AvgTime, ShouldEqual, pr.Time)
So(groups[0].StdDev, ShouldEqual, 0)
So(groups[0].Received, ShouldEqual, 1)
So(groups[0].Timedout, ShouldEqual, 0)
So(groups[0].TotalTime, ShouldEqual, pr.Time)
})
}
|
package command
import (
"fmt"
"os"
"text/tabwriter"
"github.com/jclem/graphsh/introspection"
"github.com/jclem/graphsh/types"
)
// Ls lists fields for the current node
type Ls struct{}
func testLs(input string) (Command, error) {
if input == "ls" {
return &Ls{}, nil
}
return nil, nil
}
// Execute implements the Command interface
func (c Ls) Execute(s types.Session) error {
fields, err := introspection.GetFields(s.Client(), s.RootQuery())
if err != nil {
return err
}
tw := tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', 0)
fmt.Fprintln(tw, fmt.Sprintf("%s\t%s\t%s", "NAME", "TYPE", "DESCRIPTION"))
for _, field := range fields {
fmt.Fprintln(tw, fmt.Sprintf("%s\t%s\t%s", field.Name, field.GetHumanTypeName(), field.Description))
}
tw.Flush()
return nil
}
|
package 套模板
var combinationSequence [][]int // 结果集
func combinationSum3(k int, n int) [][]int {
/* 1. 进行一些预处理 */
candidates := make([]int, 9)
combinationSequence = make([][]int, 0)
for i := 1; i <= 9; i++ {
candidates[i-1] = i
}
/* 2. 调用回溯函数 */
combinationSumExec(candidates, n, k, make([]int, 0, 10))
/* 5. 返回结果集 */
return combinationSequence
}
func combinationSumExec(candidates []int, n int, k int, sequence []int) {
/* 3. 判断是否需要加入结果集以及进行剪枝 */
if n == 0 && k == 0 {
combinationSequence = append(combinationSequence, newSlice(sequence))
return
}
if n == 0 || k == 0 {
return
}
for i := 0; i < len(candidates); i++ {
/* 4. 继续调用回溯函数 */
// 因为题目要求的是组合数且不能重复选取,所以下一层处理的是 candidates[i+1:]
combinationSumExec(candidates[i+1:], n-candidates[i], k-1, append(sequence, candidates[i]))
}
}
// 深拷贝
func newSlice(oldSlice []int) []int {
slice := make([]int, len(oldSlice))
copy(slice, oldSlice)
return slice
}
/*
题目链接:
https://leetcode-cn.com/problems/combination-sum-iii/submissions/ 组合求和3
*/
/*
总结
1. 该题题意是:
在一个不可重复选取、元素大于0、元素不存在重复的数组中,选出k个数,使它们的值等于n,输出所有的组合。
*/
|
package msg
// xlattice_go/msg/in_q_test.go
import (
"encoding/hex"
"fmt"
xr "github.com/jddixon/rnglib_go"
xi "github.com/jddixon/xlNodeID_go"
xn "github.com/jddixon/xlNode_go"
xt "github.com/jddixon/xlTransport_go"
xu "github.com/jddixon/xlUtil_go"
. "gopkg.in/check.v1"
"time"
)
var _ = fmt.Print
var _ = time.Millisecond
var (
TWO uint64 = 2
THREE uint64 = 3
FOUR uint64 = 4
FIVE uint64 = 5
SIX uint64 = 6
)
func (s *XLSuite) makeANode(c *C) (badGuy *xn.Node, acc xt.AcceptorI) {
rng := xr.MakeSimpleRNG()
id := make([]byte, xu.SHA1_BIN_LEN)
rng.NextBytes(id)
nodeID, err := xi.NewNodeID(id)
c.Assert(err, IsNil)
name := rng.NextFileName(8)
lfs := "tmp/" + hex.EncodeToString(id)
badGuy, err = xn.NewNew(name, nodeID, lfs)
c.Assert(err, IsNil)
accCount := badGuy.SizeAcceptors()
c.Assert(accCount, Equals, 0)
ep, err := xt.NewTcpEndPoint("127.0.0.1:0")
c.Assert(err, IsNil)
ndx, err := badGuy.AddEndPoint(ep)
c.Assert(err, IsNil)
c.Assert(ndx, Equals, 0)
acc = badGuy.GetAcceptor(0)
return
}
// If we receive a hello on a connection but do not know recognize the
// nodeID we just drop the connection. We only deal with known peers.
// If either the crypto public key or sig public key is wrong, we send
// an error message and close the connection. If the nodeID, cKey, and
// sKey are correct, we advance the handler's state to HELLO_RCVD
func (s *XLSuite) TestHelloHandler(c *C) {
if VERBOSITY > 0 {
fmt.Println("TEST_HELLO_HANDLER")
}
// Create a node and add a mock peer. This is a cluster of 2.
nodes, accs := xn.MockLocalHostCluster(2)
defer func() {
for i := 0; i < 2; i++ {
if accs[i] != nil {
accs[i].Close()
}
}
}()
myNode, peerNode := nodes[0], nodes[1]
meAsPeer := peerNode.GetPeer(0)
myAcc, peerAcc := accs[0], accs[1]
_ = peerAcc // never used
c.Assert(myAcc, Not(IsNil))
myAccEP := myAcc.GetEndPoint()
myCtor, err := xt.NewTcpConnector(myAccEP)
c.Assert(err, IsNil)
// myNode's server side
stopCh := make(chan bool, 1) // has buffer so won't block
stoppedCh := make(chan bool, 1)
go func() {
for {
cnx, err := myAcc.Accept()
if err != nil {
break
}
// each connection handled by a separate goroutine
go func() {
_, _ = NewInHandler(myNode, cnx, stopCh, stoppedCh)
}()
}
}()
// -- WELL-FORMED HELLO -----------------------------------------
// Known peer sends Hello with all parameters correct. We reply
// with an Ack and advance state to open.
conn, err := myCtor.Connect(xt.ANY_TCP_END_POINT)
c.Assert(err, IsNil)
c.Assert(conn, Not(IsNil))
cnx2 := conn.(*xt.TcpConnection)
defer cnx2.Close()
oh := &OutHandler{
Node: peerNode,
CnxHandler: CnxHandler{Cnx: cnx2, Peer: meAsPeer}}
// manually create and send a hello message -
// XXX HELLO_MSG IS OBSOLETE; it's done with RSA/AES handshake
peerHello, err := MakeHelloMsg(peerNode)
c.Assert(err, IsNil)
c.Assert(peerHello, Not(IsNil))
data, err := EncodePacket(peerHello)
c.Assert(err, IsNil)
c.Assert(data, Not(IsNil))
count, err := cnx2.Write(data)
c.Assert(err, IsNil)
c.Assert(count, Equals, len(data))
oh.MsgN = ONE
// end manual hello -------------------------
time.Sleep(100 * time.Millisecond)
// wait for ack
ack, err := oh.readMsg()
c.Assert(err, IsNil)
c.Assert(ack, Not(IsNil))
// verify msg returned is an ack and has the correct parameters
c.Assert(ack.GetOp(), Equals, XLatticeMsg_Ack)
c.Assert(ack.GetMsgN(), Equals, TWO)
c.Assert(ack.GetYourMsgN(), Equals, ONE) // FOO
// -- KEEPALIVE -------------------------------------------------
cmd := XLatticeMsg_KeepAlive
keepAlive := &XLatticeMsg{
Op: &cmd,
MsgN: &THREE,
}
data, err = EncodePacket(keepAlive)
c.Assert(err, IsNil)
c.Assert(data, Not(IsNil))
count, err = cnx2.Write(data)
c.Assert(err, IsNil)
c.Assert(count, Equals, len(data))
// Wait for ack. In a better world we time out if an ack is not
// received in some short period rather than blocking forever.
ack, err = oh.readMsg()
c.Assert(err, IsNil)
c.Assert(ack, Not(IsNil))
// verify msg returned is an ack and has the correct parameters
c.Assert(ack.GetOp(), Equals, XLatticeMsg_Ack)
c.Assert(ack.GetMsgN(), Equals, FOUR)
c.Assert(ack.GetYourMsgN(), Equals, THREE)
// -- BYE -------------------------------------------------------
cmd = XLatticeMsg_Bye
bye := &XLatticeMsg{
Op: &cmd,
MsgN: &FIVE,
}
data, err = EncodePacket(bye)
c.Assert(err, IsNil)
c.Assert(data, Not(IsNil))
count, err = cnx2.Write(data)
c.Assert(err, IsNil)
c.Assert(count, Equals, len(data))
// Wait for ack. In a better world we time out if an ack is not
// received in some short period rather than blocking forever.
ack, err = oh.readMsg()
c.Assert(err, IsNil)
c.Assert(ack, Not(IsNil))
// verify msg returned is an ack and has the correct parameters
c.Assert(ack.GetOp(), Equals, XLatticeMsg_Ack)
c.Assert(ack.GetMsgN(), Equals, SIX)
c.Assert(ack.GetYourMsgN(), Equals, FIVE)
// -- STOP THE SERVER -------------------------------------------
stopCh <- true
select {
case <-stoppedCh:
case <-time.After(100 * time.Millisecond):
}
} // END HANDLER
func (s *XLSuite) TestHelloFromStranger(c *C) {
if VERBOSITY > 0 {
fmt.Println("TEST_HELLO_FROM_STRANGER")
}
myNode, myAcc := s.makeANode(c)
defer myAcc.Close()
c.Assert(myAcc, Not(IsNil))
myAccEP := myAcc.GetEndPoint()
myCtor, err := xt.NewTcpConnector(myAccEP)
c.Assert(err, IsNil)
// myNode's server side
stopCh := make(chan bool, 1)
stoppedCh := make(chan bool, 1)
go func() {
for {
cnx, err := myAcc.Accept()
if err != nil {
break
}
c.Assert(err, IsNil)
// each connection handled by a separate goroutine
go func() {
_, _ = NewInHandler(myNode, cnx, stopCh, stoppedCh)
}()
}
}()
// Create a second mock peer unknown to myNode.
badGuy, badAcc := s.makeANode(c)
defer badAcc.Close()
// XXX HELLO_MSG IS OBSOLETE; it's done with RSA/AES handshake
badHello, err := MakeHelloMsg(badGuy)
c.Assert(err, IsNil)
c.Assert(badHello, Not(IsNil))
time.Sleep(100 * time.Millisecond)
// Unknown peer sends Hello. Test node should just drop the
// connection. It is an error if we receive a reply.
conn, err := myCtor.Connect(xt.ANY_TCP_END_POINT)
c.Assert(err, IsNil)
c.Assert(conn, Not(IsNil))
cnx := conn.(*xt.TcpConnection)
defer cnx.Close()
data, err := EncodePacket(badHello)
c.Assert(err, IsNil)
c.Assert(data, Not(IsNil))
count, err := cnx.Write(data)
c.Assert(err, IsNil)
c.Assert(count, Equals, len(data))
time.Sleep(100 * time.Millisecond)
// XXX THIS TEST FAILS because of a deficiency in
// transport/tcp_connection.GetState() - it does not look at
// the state of the underlying connection
// c.Assert(cnx.GetState(), Equals, xt.DISCONNECTED)
// -- STOP THE SERVER -------------------------------------------
stopCh <- true
select {
case <-stoppedCh:
case <-time.After(100 * time.Millisecond):
}
}
// -- ILL-FORMED HELLO ------------------------------------------
// Known peer sends Hello with at least one of cKey or sKey wrong.
// We expect to receive an error msg and then the connection
// should be closed.
// XXX STUB XXX
// -- SECOND WELL-FORMED HELLO ----------------------------------
// In this implementation, a second hello is an error and like all
// errors will cause the peer to close the connection.
// --------------------------------------------------------------
func (s *XLSuite) TestSecondHello(c *C) {
if VERBOSITY > 0 {
fmt.Println("TEST_SECOND_HELLO")
}
// Create a node and add a mock peer. This is a cluster of 2.
nodes, accs := xn.MockLocalHostCluster(2)
defer func() {
for i := 0; i < 2; i++ {
if accs[i] != nil {
accs[i].Close()
}
}
}()
serverNode, clientNode := nodes[0], nodes[1]
serverAsPeer := clientNode.GetPeer(0)
serverAcc := accs[0]
c.Assert(serverAcc, Not(IsNil))
serverAccEP := serverAcc.GetEndPoint()
serverCtor, err := xt.NewTcpConnector(serverAccEP)
c.Assert(err, IsNil)
// serverNode's server side
stopCh := make(chan bool, 1)
stoppedCh := make(chan bool, 1)
// XXX If you comment out this goroutine, there are no mysterious
// failures.
go func() {
for {
cnx, err := serverAcc.Accept()
// ADDING THIS ELIMINATES MYSTERY FAILURES
if err != nil {
break
}
// each connection handled by a separate goroutine
go func() {
_, _ = NewInHandler(serverNode, cnx, stopCh, stoppedCh)
}()
}
}() // END FUNC
// -- WELL-FORMED HELLO -----------------------------------------
// Known peer sends Hello with all parameters correct. Server
// replies with an Ack and advance state to open.
conn, err := serverCtor.Connect(xt.ANY_TCP_END_POINT)
c.Assert(err, IsNil)
c.Assert(conn, Not(IsNil))
cnx2 := conn.(*xt.TcpConnection)
defer cnx2.Close()
oh := &OutHandler{Node: clientNode,
CnxHandler: CnxHandler{Cnx: cnx2, Peer: serverAsPeer}}
err = oh.SendHello()
c.Assert(err, IsNil)
// wait for ack
ack, err := oh.readMsg()
c.Assert(err, IsNil) // XXX "EOF" instead
c.Assert(ack, Not(IsNil))
// verify msg returned is an ack and has the correct parameters
c.Assert(ack.GetOp(), Equals, XLatticeMsg_Ack)
c.Assert(ack.GetMsgN(), Equals, TWO)
c.Assert(ack.GetYourMsgN(), Equals, ONE) // FOO
// -- SECOND WELL-FORMED HELLO ----------------------------------
// manually create and send a hello message -
// XXX HELLO_MSG IS OBSOLETE; it's done with RSA/AES handshake
peerHello, err := MakeHelloMsg(clientNode)
c.Assert(err, IsNil)
c.Assert(peerHello, Not(IsNil))
data, err := EncodePacket(peerHello)
c.Assert(err, IsNil)
c.Assert(data, Not(IsNil))
count, err := cnx2.Write(data)
c.Assert(err, IsNil)
c.Assert(count, Equals, len(data))
oh.MsgN = ONE
// end manual hello -------------------------
// wait for error message
reply, err := oh.readMsg()
c.Assert(err, IsNil)
c.Assert(reply, Not(IsNil))
// verify msg returned is an reply and has the correct parameters
c.Assert(reply.GetOp(), Equals, XLatticeMsg_Error)
c.Assert(reply.GetMsgN(), Equals, FOUR)
// -- STOP THE SERVER -------------------------------------------
stopCh <- true
select {
case <-stoppedCh:
case <-time.After(100 * time.Millisecond):
}
}
|
package parser
// MapArgs is an Args implementation which is used for the type
// inference necessary to support the postgres wire protocol.
// See various TypeCheck() implementations for details.
//
// key is 1 index.
type MapArgs map[string]Datum
|
package handler
import (
"fmt"
"github.com/gin-gonic/gin"
"log"
"net/http"
"proxy_download/model"
"regexp"
"strconv"
"strings"
)
func EmailDetail(context *gin.Context) {
var email model.Email
idString := context.Param("id")
id, _ := strconv.Atoi(idString)
emailDetail, err := email.Detail(id)
if err != nil {
fmt.Println("query table email err = ", err)
return
}
context.JSON(http.StatusOK, Data{"list": emailDetail})
}
func EmailEdit(context *gin.Context) {
var email model.Email
if err := context.ShouldBind(&email); err != nil {
context.JSON(http.StatusOK, Data{"err": "输入的数据不合法"})
log.Panicln("err ->", err.Error())
return
}
if email.ID != 0 {
err := email.Update()
if err != nil {
fmt.Println("update email err = ", err)
context.JSON(http.StatusBadRequest, Data{"err": "update email err" + err.Error()})
return
}
context.JSON(http.StatusOK, Data{"msg": "update email success"})
return
}
id, err := email.Save()
if err != nil {
fmt.Println("save email err ", err)
context.JSON(http.StatusBadRequest, Data{"err": "save email err" + err.Error()})
return
}
context.JSON(http.StatusOK, Data{"msg": "save email success, id:" + strconv.Itoa(id)})
}
func EmailList(context *gin.Context) {
var email model.Email
page, err := strconv.Atoi(context.DefaultQuery("page", "1"))
pagesize, err := strconv.Atoi(context.DefaultQuery("pagesize", "10"))
fmt.Println("page ")
emails, count, err := email.List(page, pagesize)
if err != nil {
err := fmt.Errorf("query table email err = %v", err.Error())
fmt.Println(err)
context.JSON(http.StatusBadGateway, err)
return
}
context.JSON(http.StatusOK, Data{"list": emails, "count": count})
}
func EmailDel(context *gin.Context) {
var emails NullMap
var email model.Email
err := context.BindJSON(&emails)
if err != nil {
log.Println("json.Unmarshal err = ", err)
context.JSON(http.StatusOK, Data{"err": "get ids error"})
return
}
switch ids := emails["ids"].(type) {
// 对返回的元素进行判断 float64 id []interface{} ids
case float64:
if err = email.Delete(ids); err != nil {
fmt.Println("delete email err :", err)
context.JSON(http.StatusBadRequest, Data{"err": err})
return
}
context.JSON(http.StatusOK, Data{"msg": "del success"})
return
case []interface{}:
if err = email.Deletes(ids); err != nil {
fmt.Println("list delete email err :", err)
context.JSON(http.StatusBadRequest, Data{"err": err})
return
}
context.JSON(http.StatusOK, Data{"msg": "del list success"})
}
}
func EmailNameValidate(context *gin.Context) {
result, err := NameValidate(context, "emails")
if err != nil {
fmt.Println("err = ", err)
context.JSON(http.StatusBadRequest, Data{"err": err.Error()})
return
}
context.JSON(http.StatusOK, result)
}
func EmailToUserListValidate(context *gin.Context) {
var params = struct {
ToUserList string `json:"to_user_list"`
}{}
result := true
err := context.BindJSON(¶ms)
if err != nil {
fmt.Println("context.BindJSON EmailToUserListValidate err = ", err)
context.JSON(http.StatusBadRequest, Data{"err": err.Error()})
return
}
UserList := strings.Split(params.ToUserList, ",")
reg := regexp.MustCompile("^.+@(\\[?)[a-zA-Z0-9\\-.]+\\.([a-zA-Z]{2,3}|[0-9]{1,3})(]?)$")
for i := 0; i < len(UserList); i++ {
regResult := reg.FindAllStringSubmatch(UserList[i], -1)
if len(regResult) == 0 {
result = false
}
}
context.JSON(http.StatusOK, result)
}
|
/*
You're given strings J representing the types of stones that are jewels, and S representing the stones you have. Each character in S is a type of stone you have. You want to know how many of the stones you have are also jewels.
The letters in J are guaranteed distinct, and all characters in J and S are letters. Letters are case sensitive, so "a" is considered a different type of stone from "A".
*/
package main
import "fmt"
func main() {
fmt.Println(count("aA", "aAAbbbb"))
fmt.Println(count("z", "ZZ"))
}
func count(j, s string) int {
m := make(map[rune]bool)
for _, r := range j {
m[r] = true
}
c := 0
for _, r := range s {
if m[r] {
c++
}
}
return c
}
|
package main
import (
"fmt"
"os"
)
func test(s ...string) {
for i, a := range s {
fmt.Println(i, a)
}
}
func main() {
argument := os.Args
if len(argument) == 1 {
return
}
test(argument...)
test("Ankita", "somi", "harsh", "mansi", "deepika")
}
|
package main
import (
"fmt"
"html/template"
"io/ioutil"
"log"
"net/http"
"regexp"
)
var templates *template.Template
var validPath *regexp.Regexp
func init() {
// call ParseFiles once at program initialization,
// parsing all templates into a single *Template.
// Then we can use the ExecuteTemplate method to render
//a specific template.
templates = template.Must(template.ParseFiles("edit.html", "view.html"))
// small protection against user input
validPath = regexp.MustCompile("^/(edit|save|view)/([a-zA-Z0-9]+)$")
}
func handler(w http.ResponseWriter, r *http.Request) {
fmt.Fprintf(w, "Hi there, I love %s!", r.URL.Path[1:])
}
func viewHandler(w http.ResponseWriter, r *http.Request, ttl string) {
p, err := loadPage(ttl)
if err != nil {
// The http.Redirect function adds an HTTP status code
// of http.StatusFound (302) and a Location header to
// the HTTP response.
http.Redirect(w, r, "/edit/"+ttl, http.StatusFound)
return
}
renderTemplate(w, "view", p)
}
func saveHandler(w http.ResponseWriter, r *http.Request, ttl string) {
b := r.FormValue("body")
p := &Page{Title: ttl, Body: []byte(b)}
err := p.save()
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
http.Redirect(w, r, "/view/"+ttl, http.StatusFound)
}
func editHandler(w http.ResponseWriter, r *http.Request, ttl string) {
p, err := loadPage(ttl)
if err != nil {
p = &Page{Title: ttl}
}
renderTemplate(w, "edit", p)
}
func main() {
http.HandleFunc("/view/", makeHandler(viewHandler))
http.HandleFunc("/save/", makeHandler(saveHandler))
http.HandleFunc("/edit/", makeHandler(editHandler))
log.Fatal(http.ListenAndServe(":8080", nil))
}
func makeHandler(fn func(w http.ResponseWriter, r *http.Request, ttl string)) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
m := validPath.FindStringSubmatch(r.URL.Path)
if m == nil {
http.NotFound(w, r)
return
}
fn(w, r, m[2])
}
}
func renderTemplate(w http.ResponseWriter, tpl string, p *Page) {
err := templates.ExecuteTemplate(w, tpl+".html", p)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
}
func loadPage(t string) (*Page, error) {
fn := t + ".txt"
b, err := ioutil.ReadFile(fn)
if err != nil {
return nil, err
}
return &Page{Title: t, Body: b}, nil
}
|
package authorization
import (
"fmt"
"regexp"
"strings"
"github.com/authelia/authelia/v4/internal/utils"
)
// NewAccessControlDomain creates a new SubjectObjectMatcher that matches the domain as a basic string.
func NewAccessControlDomain(domain string) (subjcets bool, rule AccessControlDomain) {
m := &AccessControlDomainMatcher{}
domain = strings.ToLower(domain)
switch {
case strings.HasPrefix(domain, "*."):
m.Wildcard = true
m.Name = domain[1:]
case strings.HasPrefix(domain, "{user}"):
m.UserWildcard = true
m.Name = domain[6:]
case strings.HasPrefix(domain, "{group}"):
m.GroupWildcard = true
m.Name = domain[7:]
default:
m.Name = domain
}
return m.UserWildcard || m.GroupWildcard, AccessControlDomain{m}
}
// NewAccessControlDomainRegex creates a new SubjectObjectMatcher that matches the domain either in a basic way or
// dynamic User/Group subexpression group way.
func NewAccessControlDomainRegex(pattern regexp.Regexp) (subjects bool, rule AccessControlDomain) {
var iuser, igroup = -1, -1
for i, group := range pattern.SubexpNames() {
switch group {
case subexpNameUser:
iuser = i
case subexpNameGroup:
igroup = i
}
}
if iuser != -1 || igroup != -1 {
return true, AccessControlDomain{RegexpGroupStringSubjectMatcher{pattern, iuser, igroup}}
}
return false, AccessControlDomain{RegexpStringSubjectMatcher{pattern}}
}
// AccessControlDomainMatcher is the basic domain matcher.
type AccessControlDomainMatcher struct {
Name string
Wildcard bool
UserWildcard bool
GroupWildcard bool
}
// IsMatch returns true if this rule matches.
func (m AccessControlDomainMatcher) IsMatch(domain string, subject Subject) (match bool) {
switch {
case m.Wildcard:
return strings.HasSuffix(domain, m.Name)
case m.UserWildcard:
if subject.IsAnonymous() && strings.HasSuffix(domain, m.Name) {
return len(domain) > len(m.Name)
}
return domain == fmt.Sprintf("%s%s", subject.Username, m.Name)
case m.GroupWildcard:
if subject.IsAnonymous() && strings.HasSuffix(domain, m.Name) {
return len(domain) > len(m.Name)
}
i := strings.Index(domain, ".")
return domain[i:] == m.Name && utils.IsStringInSliceFold(domain[:i], subject.Groups)
default:
return strings.EqualFold(domain, m.Name)
}
}
// AccessControlDomain represents an ACL domain.
type AccessControlDomain struct {
Matcher StringSubjectMatcher
}
// IsMatch returns true if the ACL domain matches the object domain.
func (acl AccessControlDomain) IsMatch(subject Subject, object Object) (match bool) {
return acl.Matcher.IsMatch(object.Domain, subject)
}
|
package integers
// Add takes two integers and returns the sum of them
func Add(x,y int)(sum int){
sum = x + y
return
}
|
package main
import (
"flag"
"fmt"
"log"
"os"
"path/filepath"
"github.com/gomods/athens/cmd/proxy/actions"
"github.com/gomods/athens/pkg/build"
"github.com/gomods/athens/pkg/config"
)
var (
configFile = flag.String("config_file", filepath.Join("..", "..", "config.dev.toml"), "The path to the config file")
version = flag.Bool("version", false, "Print version information and exit")
)
func main() {
flag.Parse()
if *version {
fmt.Println(build.String())
os.Exit(0)
}
if configFile == nil {
log.Fatal("Invalid config file path provided")
}
conf, err := config.ParseConfigFile(*configFile)
if err != nil {
log.Fatal(err)
}
app, err := actions.App(conf)
if err != nil {
log.Fatal(err)
}
if err := app.Serve(); err != nil {
log.Fatal(err)
}
}
|
package ppu
import (
"github.com/vfreex/gones/pkg/emulator/memory"
)
// The logical screen resolution processed by the PPU is 256x240 pixels
// The PPU renders 262 scanlines per frame.
// Each scanline lasts for 341 PPU clock cycles (113.667 CPU clock cycles; 1 CPU cycle = 3 PPU cycles),
// with each clock cycle producing one pixel
func (ppu *PPUImpl) renderSprites() {
// http://wiki.nesdev.com/w/index.php/PPU_sprite_evaluation
dot := ppu.dotInScanline
y := ppu.scanline //- 21
if y == 261 {
y = 255
}
switch {
case dot == 0:
ppu.currentSpritesCount = ppu.spriteCount
ppu.currentSprites = ppu.sprites
case dot == 64:
ppu.spriteCount = 0
ppu.registers.status &= ^PPUStatus_SpriteOverflow
case dot == 256:
for spriteIndex := 0; spriteIndex < 64; spriteIndex++ {
spriteY := int(ppu.sprRam.Peek(memory.Ptr(spriteIndex * 4)))
deltaY := y - spriteY
spriteHeight := 8
if ppu.registers.ctrl&PPUCtrl_SpriteSize != 0 {
spriteHeight = 16
}
if deltaY < 0 || deltaY >= spriteHeight {
// sprite is not in range
continue
}
// the sprite is in this scanline
if ppu.spriteCount >= 8 {
// sprite overflow
// TODO: implement hardware bug
ppu.registers.status |= PPUStatus_SpriteOverflow
break
}
// evaluate sprite
sprite := &ppu.sprites[ppu.spriteCount]
sprite.Id = spriteIndex
sprite.Y = spriteY
sprite.TileId = int(ppu.sprRam.Peek(memory.Ptr(spriteIndex*4 + 1)))
sprite.X = int(ppu.sprRam.Peek(memory.Ptr(spriteIndex*4 + 3)))
sprite.Attr.Unmarshal(ppu.sprRam.Peek(memory.Ptr(spriteIndex*4 + 2)))
ppu.spriteCount++
}
if ppu.spriteCount > 0 {
logger.Debugf("renderSprites: Scanline #%d has %d sprites.", y, ppu.spriteCount)
}
case dot >= 257 && dot <= 320:
// sprite fetches
i := (dot - 257) / 8
if i >= ppu.spriteCount {
break
}
sprite := &ppu.sprites[i]
switch (dot - 257) % 8 {
case 5:
addr := ppu.spriteTileAddr(sprite)
sprite.TileRowLow = ppu.vram.Peek(addr)
case 7:
addr := ppu.spriteTileAddr(sprite)
sprite.TileRowHigh = ppu.vram.Peek(addr + 8)
}
}
}
func (ppu *PPUImpl) spriteTileAddr(sprite *Sprite) memory.Ptr {
y := ppu.scanline
var addr memory.Ptr
var spriteHeight int
if ppu.registers.ctrl&PPUCtrl_SpriteSize == 0 {
// 8*8 sprite
spriteHeight = 8
if ppu.registers.ctrl&PPUCtrl_SpritePatternTable != 0 {
addr = 0x1000
}
addr += memory.Ptr(sprite.TileId * 16)
} else {
// 8 * 16 sprite
spriteHeight = 16
if sprite.TileId&1 != 0 {
addr = 0x1000
}
addr += memory.Ptr(sprite.TileId & ^1 * 16)
}
deltaY := y - sprite.Y
if sprite.Attr.VerticalFlip {
deltaY ^= spriteHeight - 1 // i.e. deltaY = spriteHeight - 1 - deltaY
}
addr += memory.Ptr(deltaY + deltaY&0x8)
return addr
}
func (ppu *PPUImpl) fillShifters() {
ppu.registers.bgHighShift = ppu.registers.bgHighShift&0xff00 | uint16(ppu.registers.bgHighLatch)
ppu.registers.bgLowShift = ppu.registers.bgLowShift&0xff00 | uint16(ppu.registers.bgLowLatch)
ppu.registers.attrHighShift = ppu.registers.attrHighShift&0xff00 | uint16(ppu.registers.attrHighLatch)
ppu.registers.attrLowShift = ppu.registers.attrLowShift&0xff00 | uint16(ppu.registers.attrLowLatch)
}
func (ppu *PPUImpl) drawPixel() {
x := ppu.dotInScanline - 2
y := ppu.scanline
if y >= 0 && y < VisualScanlines && x >= 0 && x < VisualDotsPerScanline {
var currentPalette byte
// Draw background
if ppu.registers.mask&PPUMask_BackgroundVisibility != 0 &&
(ppu.registers.mask&PPUMask_NoBackgroundClipping != 0 || x >= 8) {
fineX := ppu.registers.x
currentPalette = byte(ppu.registers.bgHighShift>>byte(15-fineX)&1<<1 |
ppu.registers.bgLowShift>>byte(15-fineX)&1)
if currentPalette > 0 {
attr := byte(ppu.registers.attrHighShift>>byte(15-fineX)&1<<1 |
ppu.registers.attrLowShift>>byte(15-fineX)&1)
currentPalette |= attr & 3 << 2
}
}
// Draw sprites
if ppu.registers.mask&PPUMask_SpriteVisibility != 0 &&
(ppu.registers.mask&PPUMask_NoSpriteClipping != 0 || x >= 8) {
// Each four bytes in SPR-RAM define attributes for one sprite
for spriteIndex := 0; spriteIndex < ppu.currentSpritesCount; spriteIndex++ {
sprite := &ppu.currentSprites[spriteIndex]
deltaX := x - sprite.X
if deltaX < 0 || deltaX >= 8 {
// sprite is not in range
continue
}
if sprite.Attr.HorizontalFlip {
deltaX ^= 7 // i.e. delta = 7 - delta
}
colorLow := sprite.TileRowLow >> byte(7-deltaX) & 1
colorHigh := sprite.TileRowHigh >> byte(7-deltaX) & 1
spritePalette := byte(colorLow | colorHigh<<1)
if spritePalette == 0 {
// transparent pixel
continue
}
if sprite.Id == 0 && currentPalette != 0 {
// set sprite 0 hit flag
ppu.registers.status |= PPUStatus_Sprite0Hit
}
if currentPalette != 0 && sprite.Attr.BackgroundPriority {
// background pixel covers this sprite pixel
continue
}
spritePalette |= byte(sprite.Attr.PaletteId << 2)
currentPalette = spritePalette + 0x10
break
}
}
color := ppu.Palette.Peek(0x3F00 + memory.Ptr(currentPalette))
if ppu.registers.mask&PPUMask_Greyscale != 0 {
color &= 0x30
}
ppu.RenderedBuffer[y][x] = Color(color).ToGRBColor()
}
ppu.registers.bgHighShift <<= 1
ppu.registers.bgLowShift <<= 1
ppu.registers.attrHighShift <<= 1
ppu.registers.attrLowShift <<= 1
}
func (ppu *PPUImpl) fetchBgTileRow(step int) {
switch step {
case 1:
// fetch nametable
ntAddr := 0x2000 | ppu.registers.v.Address()&0xfff
ppu.registers.bgNameLatch = ppu.vram.Peek(ntAddr)
case 3:
// fetch attrtable
v := ppu.registers.v.Address()
attrAddr := 0x23C0 | v&0x0C00 | v>>4&0x38 | v>>2&0x07
attr := ppu.vram.Peek(attrAddr)
if ppu.registers.v.CoarseY()%4 >= 2 {
attr >>= 4
}
if ppu.registers.v.CoarseX()%4 >= 2 {
attr >>= 2
}
paletteId := attr & 3
if paletteId&1 != 0 {
ppu.registers.attrLowLatch = 0xff
} else {
ppu.registers.attrLowLatch = 0
}
if paletteId&2 != 0 {
ppu.registers.attrHighLatch = 0xff
} else {
ppu.registers.attrHighLatch = 0
}
case 5:
// fetch bitmap low from pattern table\
lowAddr := memory.Ptr(ppu.registers.bgNameLatch)*16 + memory.Ptr(ppu.registers.v.FineY())
if ppu.registers.ctrl&PPUCtrl_BackgroundPatternTable != 0 {
lowAddr |= 0x1000
}
ppu.registers.bgLowLatch = ppu.vram.Peek(lowAddr)
case 7:
// fetch bitmap high from pattern table
highAddr := memory.Ptr(ppu.registers.bgNameLatch)*16 + 8 + memory.Ptr(ppu.registers.v.FineY())
if ppu.registers.ctrl&PPUCtrl_BackgroundPatternTable != 0 {
highAddr |= 0x1000
}
ppu.registers.bgHighLatch = ppu.vram.Peek(highAddr)
logger.Debugf("at (%v, %v): v=%v", ppu.scanline, ppu.dotInScanline, ppu.registers.v.String())
}
}
func (ppu *PPUImpl) Step() {
// http://wiki.nesdev.com/w/index.php/PPU_rendering
// http://wiki.nesdev.com/w/index.php/File:Ntsc_timing.png
scanline := ppu.scanline
dot := ppu.dotInScanline
switch {
case scanline == 261: // pre
switch {
case dot == 1:
ppu.registers.status &= ^(PPUStatus_Sprite0Hit | PPUStatus_SpriteOverflow | PPUStatus_VBlank)
case dot >= 280 && dot <= 304:
if ppu.registers.mask&(PPUMask_BackgroundVisibility|PPUMask_SpriteVisibility) != 0 {
ppu.registers.v.SetCoarseY(ppu.registers.t.CoarseY())
ppu.registers.v.SetFineY(ppu.registers.t.FineY())
ppu.registers.v.SetNametable(ppu.registers.v.Nametable()&1 | ppu.registers.t.Nametable()&2)
}
case dot == 340 && ppu.frame&1 != 0 &&
ppu.registers.mask&(PPUMask_BackgroundVisibility|PPUMask_SpriteVisibility) != 0:
//on every odd frame, scanline 0, dot 0 is skipped
ppu.dotInScanline = 0
ppu.scanline = 0
goto end
}
fallthrough
case scanline >= 0 && scanline <= 239: // Visible scanlines
ppu.renderSprites()
if dot >= 2 && dot <= 257 || dot >= 322 && dot <= 337 {
ppu.drawPixel()
if dot%8 == 1 {
ppu.fillShifters()
}
}
switch {
case dot == 0: // idle
case dot >= 1 && dot <= 256: // fetches 3rd..34th tile in scanline
if scanline == 261 {
break
}
ppu.fetchBgTileRow((dot - 1) % 8)
if ppu.registers.mask&(PPUMask_BackgroundVisibility|PPUMask_SpriteVisibility) != 0 {
if dot%8 == 0 { // dots 8, 16, 24, ..., 256: increase coarseX
ppu.registers.v.IncreaseCoarseX()
}
if dot == 256 { // increase fineY
ppu.registers.v.IncreaseFineY()
}
}
case dot == 257:
if ppu.registers.mask&(PPUMask_BackgroundVisibility|PPUMask_SpriteVisibility) != 0 {
ppu.registers.v.SetCoarseX(ppu.registers.t.CoarseX())
ppu.registers.v.SetNametable(ppu.registers.v.Nametable()&2 | ppu.registers.t.Nametable()&1)
}
case dot >= 257 && dot <= 320: // fetching the sprites on the next scanline
case dot >= 321 && dot <= 336: // fetching the first two tiles for the next scanline
ppu.fetchBgTileRow((dot - 321) % 8)
if ppu.registers.mask&(PPUMask_BackgroundVisibility|PPUMask_SpriteVisibility) != 0 {
if dot%8 == 0 { // dots 328, 336: increase coarseX
ppu.registers.v.IncreaseCoarseX()
}
}
case dot >= 337 && dot <= 340:
}
case scanline == 240: // post scanline
if dot == 0 && ppu.NewFrameHandler != nil {
ppu.NewFrameHandler(&ppu.RenderedBuffer, ppu.frame)
}
case scanline == 241: // VINT
if dot == 1 {
ppu.registers.status |= PPUStatus_VBlank
if ppu.registers.ctrl&PPUCtrl_NMIOnVBlank != 0 {
ppu.cpu.NMI = true
}
}
}
end:
ppu.dotInScanline++
if ppu.dotInScanline >= DotsPerScanline {
ppu.dotInScanline %= DotsPerScanline
ppu.scanline++
if ppu.scanline >= ScanlinesPerFrame {
ppu.scanline %= ScanlinesPerFrame
ppu.frame++
}
}
}
|
// Package app contains business object (BO) and data access object (DAO) implementations for Application.
package app
import (
"encoding/json"
"log"
"net/url"
"reflect"
"sort"
"strings"
"github.com/btnguyen2k/consu/reddo"
"github.com/btnguyen2k/henge"
"main/src/gvabe/bo"
)
// NewApp is helper function to create new App bo.
func NewApp(tagVersion uint64, id, ownerId, desc string) *App {
app := &App{
UniversalBo: henge.NewUniversalBo(id, tagVersion, henge.UboOpt{TimeLayout: bo.UboTimeLayout, TimestampRounding: bo.UboTimestampRounding}),
}
app.
SetOwnerId(ownerId).
SetAttrsPublic(AppAttrsPublic{IsActive: true, Description: strings.TrimSpace(desc)})
return app.sync()
}
var typMapStrBool = reflect.TypeOf(map[string]bool{})
var typSliceStr = reflect.TypeOf([]string{})
// NewAppFromUbo is helper function to create new App bo from a universal bo.
func NewAppFromUbo(ubo *henge.UniversalBo) *App {
if ubo == nil {
return nil
}
ubo = ubo.Clone()
app := &App{UniversalBo: ubo}
if v, err := app.GetExtraAttrAs(FieldAppOwnerId, reddo.TypeString); err == nil && v != nil {
app.SetOwnerId(v.(string))
}
if v, err := app.GetDataAttrAs(AttrAppDomains, typSliceStr); err == nil && v != nil {
app.SetDomains(v.([]string))
}
if publicAttrsRaw, err := app.GetDataAttr(AttrAppPublicAttrs); err == nil && publicAttrsRaw != nil {
var publicAttrs AppAttrsPublic
var v interface{}
ok := false
if v, ok = publicAttrsRaw.(AppAttrsPublic); ok {
publicAttrs = v.(AppAttrsPublic)
} else if v, ok = publicAttrsRaw.(*AppAttrsPublic); ok {
publicAttrs = *v.(*AppAttrsPublic)
}
if !ok {
if v, err := app.GetDataAttrAs(AttrAppPublicAttrs+".actv", reddo.TypeBool); err == nil && v != nil {
publicAttrs.IsActive = v.(bool)
}
if v, err := app.GetDataAttrAs(AttrAppPublicAttrs+".desc", reddo.TypeString); err == nil && v != nil {
publicAttrs.Description = strings.TrimSpace(v.(string))
}
if v, err := app.GetDataAttrAs(AttrAppPublicAttrs+".rurl", reddo.TypeString); err == nil && v != nil {
publicAttrs.DefaultReturnUrl = strings.TrimSpace(v.(string))
}
if v, err := app.GetDataAttrAs(AttrAppPublicAttrs+".curl", reddo.TypeString); err == nil && v != nil {
publicAttrs.DefaultCancelUrl = strings.TrimSpace(v.(string))
}
if v, err := app.GetDataAttrAs(AttrAppPublicAttrs+".rpub", reddo.TypeString); err == nil && v != nil {
publicAttrs.RsaPublicKey = strings.TrimSpace(v.(string))
}
if v, err := app.GetDataAttrAs(AttrAppPublicAttrs+".isrc", typMapStrBool); err == nil && v != nil {
publicAttrs.IdentitySources = v.(map[string]bool)
}
if v, err := app.GetDataAttrAs(AttrAppPublicAttrs+".tags", typSliceStr); err == nil && v != nil {
publicAttrs.Tags = v.([]string)
}
}
app.SetAttrsPublic(publicAttrs)
}
return app.sync()
}
// AppAttrsPublic holds application's public attributes.
type AppAttrsPublic struct {
IsActive bool `json:"actv"` // is this app active or not
Description string `json:"desc"` // description text
DefaultReturnUrl string `json:"rurl"` // default return url after login
DefaultCancelUrl string `json:"curl"` // default cancel url after login
IdentitySources map[string]bool `json:"isrc"` // sources of identity
Tags []string `json:"tags"` // arbitrary tags
RsaPublicKey string `json:"rpub"` // RSA public key in ASCII-armor format
}
func (apub AppAttrsPublic) clone() AppAttrsPublic {
clone := AppAttrsPublic{
IsActive: apub.IsActive,
Description: apub.Description,
DefaultReturnUrl: apub.DefaultReturnUrl,
DefaultCancelUrl: apub.DefaultCancelUrl,
RsaPublicKey: apub.RsaPublicKey,
}
if apub.IdentitySources != nil {
clone.IdentitySources = make(map[string]bool)
for k, v := range apub.IdentitySources {
clone.IdentitySources[k] = v
}
}
if apub.Tags != nil {
clone.Tags = append([]string{}, apub.Tags...)
}
return clone
}
const (
FieldAppOwnerId = "oid"
AttrAppDomains = "domains"
AttrAppPublicAttrs = "apub"
AttrAppUbo = "_ubo"
)
// App is the business object.
// App inherits unique id from bo.UniversalBo.
type App struct {
*henge.UniversalBo `json:"_ubo"`
ownerId string `json:"oid"` // user id who owns this app
domains []string `json:"domains"` // app's domain whitelist (must contain domains from AppAttrsPublic.DefaultReturnUrl and AppAttrsPublic.DefaultCancelUrl)
attrsPublic AppAttrsPublic `json:"apub"` // app's public attributes, can be access publicly
}
// _generateUrl validates 'preferred-url' and build the final url.
// If 'preferred-url' is invalid, this function returns empty string.
func _generateUrl(preferredUrl, defaultUrl string, whitelistDomains []string) string {
preferredUrl = strings.TrimSpace(preferredUrl)
if preferredUrl == "" {
return defaultUrl
}
uPreferredUrl, err := url.Parse(preferredUrl)
if err != nil {
log.Printf("[WARN] Preferred url is invalid: %s", preferredUrl)
return ""
}
if !uPreferredUrl.IsAbs() {
uDefaultUrl, err := url.Parse(defaultUrl)
if err != nil {
log.Printf("[WARN] Default url is invalid: %s", defaultUrl)
return ""
}
if !uDefaultUrl.IsAbs() || defaultUrl == "" {
// preferred-url and default-url are both relative
return preferredUrl
}
// default-url is absolute, complete the url by prepending default-url's scheme and host
return uDefaultUrl.Scheme + "://" + uDefaultUrl.Host + "/" + strings.TrimPrefix(preferredUrl, "/")
}
// if preferred-url is absolute, its host must be in whitelist
for _, domain := range whitelistDomains {
if uPreferredUrl.Host == domain {
return preferredUrl
}
}
log.Printf("[WARN] Preferred url [%s] is not in whitelist.", preferredUrl)
return ""
}
// GenerateReturnUrl validates 'preferredReturnUrl' and builds "return url" for the app.
//
// - if 'preferredReturnUrl' is invalid, this function returns empty string
func (app *App) GenerateReturnUrl(preferredReturnUrl string) string {
domains := app.domains
if u, e := url.Parse(app.attrsPublic.DefaultReturnUrl); e == nil && u != nil {
domains = append(domains, u.Host)
}
return _generateUrl(preferredReturnUrl, app.attrsPublic.DefaultReturnUrl, domains)
}
// GenerateCancelUrl validates 'preferredCancelUrl' and builds "cancel url" for the app.
//
// - if 'preferredCancelUrl' is invalid, this function returns empty string
func (app *App) GenerateCancelUrl(preferredCancelUrl string) string {
domains := app.domains
if u, e := url.Parse(app.attrsPublic.DefaultCancelUrl); e == nil && u != nil {
domains = append(domains, u.Host)
}
return _generateUrl(preferredCancelUrl, app.attrsPublic.DefaultCancelUrl, domains)
}
// MarshalJSON implements json.encode.Marshaler.MarshalJSON.
// TODO: lock for read?
func (app *App) MarshalJSON() ([]byte, error) {
app.sync()
m := map[string]interface{}{
AttrAppUbo: app.UniversalBo.Clone(),
bo.SerKeyFields: map[string]interface{}{
FieldAppOwnerId: app.GetOwnerId(),
},
bo.SerKeyAttrs: map[string]interface{}{
AttrAppDomains: app.GetDomains(),
AttrAppPublicAttrs: app.attrsPublic.clone(),
},
}
return json.Marshal(m)
}
// UnmarshalJSON implements json.decode.Unmarshaler.UnmarshalJSON.
// TODO: lock for write?
func (app *App) UnmarshalJSON(data []byte) error {
var m map[string]interface{}
if err := json.Unmarshal(data, &m); err != nil {
return err
}
if m[AttrAppUbo] != nil {
js, _ := json.Marshal(m[AttrAppUbo])
if err := json.Unmarshal(js, &app.UniversalBo); err != nil {
return err
}
}
if _cols, ok := m[bo.SerKeyFields].(map[string]interface{}); ok {
if v, err := reddo.ToString(_cols[FieldAppOwnerId]); err != nil {
return err
} else {
app.SetOwnerId(v)
}
}
if _attrs, ok := m[bo.SerKeyAttrs].(map[string]interface{}); ok {
if v, err := reddo.ToSlice(_attrs[AttrAppDomains], typSliceStr); err != nil {
return err
} else {
app.SetDomains(v.([]string))
}
if _attrs[AttrAppPublicAttrs] != nil {
js, _ := json.Marshal(_attrs[AttrAppPublicAttrs])
if err := json.Unmarshal(js, &app.attrsPublic); err != nil {
return err
}
}
}
app.sync()
return nil
}
// GetOwnerId returns app's 'owner-id' value.
func (app *App) GetOwnerId() string {
return app.ownerId
}
// SetOwnerId sets app's 'owner-id' value.
func (app *App) SetOwnerId(value string) *App {
app.ownerId = strings.TrimSpace(strings.ToLower(value))
return app
}
// GetDomains returns app's 'whitelist-domains' value.
//
// Available since v0.7.0
func (app *App) GetDomains() []string {
domains := make([]string, len(app.domains))
copy(domains, app.domains)
return domains
}
// SetDomains sets app's 'whitelist-domains' value.
//
// Available since v0.7.0
func (app *App) SetDomains(value []string) *App {
domainsMap := make(map[string]bool)
for _, domain := range value {
domainsMap[domain] = true
}
app.domains = make([]string, len(domainsMap))
i := 0
for k := range domainsMap {
app.domains[i] = k
i++
}
sort.Slice(app.domains, func(i, j int) bool {
return app.domains[i] < app.domains[j]
})
return app
}
// GetAttrsPublic returns app's public attributes.
func (app *App) GetAttrsPublic() AppAttrsPublic {
return app.attrsPublic.clone()
}
// SetAttrsPublic sets app's public attributes.
func (app *App) SetAttrsPublic(apub AppAttrsPublic) *App {
app.attrsPublic = apub.clone()
// domains := app.GetDomains()
// if u, e := url.Parse(app.attrsPublic.DefaultReturnUrl); e == nil && u.Host != "" {
// domains = append(domains, u.Host)
// }
// if u, e := url.Parse(app.attrsPublic.DefaultCancelUrl); e == nil && u.Host != "" {
// domains = append(domains, u.Host)
// }
// app.SetDomains(domains)
return app
}
func (app *App) sync() *App {
app.SetExtraAttr(FieldAppOwnerId, app.ownerId)
app.SetDataAttr(AttrAppDomains, app.domains)
app.SetDataAttr(AttrAppPublicAttrs, app.attrsPublic)
app.UniversalBo.Sync()
return app
}
|
// Copyright 2021, Pulumi Corporation. All rights reserved.
package logging
import (
"bufio"
"github.com/go-logr/logr"
"github.com/pulumi/pulumi/sdk/v3/go/common/util/contract"
"io"
logf "sigs.k8s.io/controller-runtime/pkg/log"
)
// Logger is a simple wrapper around go-logr to simplify distinguishing debug
// logs from info.
type Logger interface {
logr.Logger
// Debug prints the message and key/values at debug level (level 1) in go-logr
Debug(msg string, keysAndValues ...interface{})
// LogWriterDebug returns the write end of a pipe which streams data to
// the logger at debug level.
LogWriterDebug(msg string, keysAndValues ...interface{}) io.WriteCloser
// LogWriterInfo returns the write end of a pipe which streams data to
// the logger at info level.
LogWriterInfo(msg string, keysAndValues ...interface{}) io.WriteCloser
}
type logger struct {
logr.Logger
}
func (l *logger) Info(msg string, keysAndValues ...interface{}) {
l.Logger.Info(msg, keysAndValues...)
}
func (l *logger) Debug(msg string, keysAndValues ...interface{}) {
l.Logger.V(1).Info(msg, keysAndValues...)
}
func (l *logger) LogWriterDebug(msg string, keysAndValues ...interface{}) io.WriteCloser {
return l.logWriter(l.Debug, msg, keysAndValues...)
}
func (l *logger) LogWriterInfo(msg string, keysAndValues ...interface{}) io.WriteCloser {
return l.logWriter(l.Info, msg, keysAndValues...)
}
// logWriter constructs an io.Writer that logs to the provided logging.Logger
func (l *logger) logWriter(logFunc func(msg string, keysAndValues ...interface{}),
msg string,
keysAndValues ...interface{}) io.WriteCloser {
stdoutR, stdoutW := io.Pipe()
go func() {
defer contract.IgnoreClose(stdoutR)
outs := bufio.NewScanner(stdoutR)
for outs.Scan() {
text := outs.Text()
logFunc(msg, append([]interface{}{"Stdout", text}, keysAndValues...)...)
}
err := outs.Err()
if err != nil {
l.Error(err, msg, keysAndValues...)
}
}()
return stdoutW
}
// NewLogger creates a new Logger using the specified name and keys/values.
func NewLogger(name string, keysAndValues ...interface{}) Logger {
return &logger{
Logger: logf.Log.WithName(name).WithValues(keysAndValues...),
}
}
// WithValues creates a new Logger using the passed logr.Logger with
// the specified key/values.
func WithValues(l logr.Logger, keysAndValues ...interface{}) Logger {
return &logger{Logger: l.WithValues(keysAndValues...)}
}
|
package commands
type Destroy struct{}
func (command *Destroy) Execute(handles []string) error {
client := globalClient()
for _, handle := range handles {
err := client.Destroy(handle)
failIf(err)
}
return nil
}
|
package main
import (
"os"
"os/exec"
"syscall"
log "github.com/sirupsen/logrus"
)
func main() {
if len(os.Args) < 2 {
log.Errorln("missing commands")
return
}
switch os.Args[1] {
case "run":
run()
default:
log.Errorln("wrong command")
return
}
}
func run() {
log.Infof("Running %v", os.Args[2:])
cmd := exec.Command(os.Args[2], os.Args[3:]...)
cmd.Stdin = os.Stdin
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
cmd.SysProcAttr = &syscall.SysProcAttr{
Cloneflags: syscall.CLONE_NEWUTS,
}
check(cmd.Run())
}
func check(err error) {
if err != nil {
log.Errorln(err)
}
}
|
package main
import (
"bytes"
"crypto/tls"
"crypto/x509"
"io/ioutil"
"log"
"net/http"
)
func main() {
// load client cert
cert, err := tls.LoadX509KeyPair("client.crt", "client.key")
if err != nil {
log.Fatal(err)
}
// load CA cert
caCert, err := ioutil.ReadFile("ca.crt")
if err != nil {
log.Fatal(err)
}
caCertPool := x509.NewCertPool()
caCertPool.AppendCertsFromPEM(caCert)
// https client tls config
// InsecureSkipVerify true means not validate server certificate (so no need to set RootCAs)
tlsConfig := &tls.Config{
Certificates: []tls.Certificate{cert},
//RootCAs: caCertPool,
InsecureSkipVerify: true,
}
tlsConfig.BuildNameToCertificate()
transport := &http.Transport{TLSClientConfig: tlsConfig}
// https client request
url := "https://www.chainz.com:8443/promize"
j := []byte(`{"id": "3232323", "name": "lambda"}`)
req, err := http.NewRequest("POST", url, bytes.NewBuffer(j))
req.Header.Set("Content-Type", "application/json")
client := &http.Client{Transport: transport}
// read response
resp, err := client.Do(req)
if err != nil {
log.Fatal(err)
}
contents, err := ioutil.ReadAll(resp.Body)
log.Println(string(contents))
}
|
package main
import "fmt"
// type speakHit interface {
// speak()
// // 只要实现speak()方法的变量,全部都是speakHit类型
// }
// // 引出接口的实例
// type cat struct {
// }
// type dog struct {
// }
// type person struct {
// }
// func (c cat) speak() {
// fmt.Println("miao miao miao~")
// }
// func (d dog) speak() {
// fmt.Println("wang wang wang~")
// }
// func (p person) speak() {
// fmt.Println("a a a~")
// }
// func da(x speakHit) {
// // 接收一个参数,传进来,我就打谁
// x.speak() // 挨打会叫
// }
// func main() {
// var c1 cat
// var d1 dog
// var p1 person
// da(c1)
// da(d1)
// da(p1)
// }
// type car interface {
// run()
// }
// type falali struct {
// brand string
// }
// func (f falali) run() {
// fmt.Printf("%s速度700迈~", f.brand)
// }
// type baoshijie struct {
// brand string
// }
// func (f baoshijie) run() {
// fmt.Printf("%s速度200迈~", f.brand)
// }
// func drive(c car) {
// c.run()
// }
// func main() {
// var f1 = falali{
// brand: "法拉利",
// }
// var f2 = baoshijie{
// brand: "保时捷",
// }
// drive(f1)
// drive(f2)
// }
// 接口的实现
type animal interface {
move()
eat(string)
}
type cat struct {
name string
feet int8
}
type chicken struct {
feet int8
}
func (c chicken) move() {
fmt.Println("鸡动!")
}
func (c chicken) eat(food string) {
fmt.Printf("吃%s!", food)
}
func (c *cat) move() {
fmt.Println("走猫步~")
}
func (c *cat) eat(food string) {
fmt.Printf("猫吃%s~", food)
}
func main() {
var a1 animal
fmt.Printf("a1=%T.", a1)
bc := cat{
name: "蓝猫",
feet: 4,
}
// 少一个参数eat(string)
a1 = &bc
a1.eat("小黄鱼")
fmt.Println(a1)
var a2 chicken
kfc := chicken{
feet: 2,
}
a2.eat("白斩鸡")
a2 = kfc
fmt.Println(a2)
fmt.Printf("a1=%T,a2=%T.", a1, a2)
fmt.Println()
var cc animal
c1 := cat{"tom", 4}
c2 := &cat{"jerry", 4}
cc = &c1
cc = c2
fmt.Println(cc)
}
|
package testutil
import (
"bytes"
"errors"
"fmt"
"math/rand"
"sync"
"testing"
ma "gx/ipfs/QmNTCey11oxhb1AxDnQBRHtdhap6Ctud872NjAYPYYXPuc/go-multiaddr"
ci "gx/ipfs/QmNiJiXwWE3kRhZrC5ej3kSjWHm337pYfhjLGSCDNKJP2s/go-libp2p-crypto"
peer "gx/ipfs/QmPJxxDsX2UbchSHobbYuvz7qnyJTFKvaKMzE2rZWJ4x5B/go-libp2p-peer"
ptest "gx/ipfs/QmPJxxDsX2UbchSHobbYuvz7qnyJTFKvaKMzE2rZWJ4x5B/go-libp2p-peer/test"
mh "gx/ipfs/QmerPMzPk1mJVowm8KgmoknWa4yCYvvugMPsgWmDNUvDLW/go-multihash"
)
// ZeroLocalTCPAddress is the "zero" tcp local multiaddr. This means:
// /ip4/127.0.0.1/tcp/0
var ZeroLocalTCPAddress ma.Multiaddr
func init() {
// initialize ZeroLocalTCPAddress
maddr, err := ma.NewMultiaddr("/ip4/127.0.0.1/tcp/0")
if err != nil {
panic(err)
}
ZeroLocalTCPAddress = maddr
}
func RandTestKeyPair(bits int) (ci.PrivKey, ci.PubKey, error) {
return ptest.RandTestKeyPair(bits)
}
func SeededTestKeyPair(seed int64) (ci.PrivKey, ci.PubKey, error) {
return ptest.SeededTestKeyPair(seed)
}
// RandPeerID generates random "valid" peer IDs. it does not NEED to generate
// keys because it is as if we lost the key right away. fine to read randomness
// and hash it. to generate proper keys and corresponding PeerID, use:
// sk, pk, _ := testutil.RandKeyPair()
// id, _ := peer.IDFromPublicKey(pk)
func RandPeerID() (peer.ID, error) {
buf := make([]byte, 16)
rand.Read(buf)
h, _ := mh.Sum(buf, mh.SHA2_256, -1)
return peer.ID(h), nil
}
func RandPeerIDFatal(t testing.TB) peer.ID {
p, err := RandPeerID()
if err != nil {
t.Fatal(err)
}
return p
}
// RandLocalTCPAddress returns a random multiaddr. it suppresses errors
// for nice composability-- do check the address isn't nil.
//
// NOTE: for real network tests, use ZeroLocalTCPAddress so the kernel
// assigns an unused TCP port. otherwise you may get clashes. This
// function remains here so that p2p/net/mock (which does not touch the
// real network) can assign different addresses to peers.
func RandLocalTCPAddress() ma.Multiaddr {
// chances are it will work out, but it **might** fail if the port is in use
// most ports above 10000 aren't in use by long running processes, so yay.
// (maybe there should be a range of "loopback" ports that are guaranteed
// to be open for the process, but naturally can only talk to self.)
lastPort.Lock()
if lastPort.port == 0 {
lastPort.port = 10000 + SeededRand.Intn(50000)
}
port := lastPort.port
lastPort.port++
lastPort.Unlock()
addr := fmt.Sprintf("/ip4/127.0.0.1/tcp/%d", port)
maddr, _ := ma.NewMultiaddr(addr)
return maddr
}
var lastPort = struct {
port int
sync.Mutex
}{}
// PeerNetParams is a struct to bundle together the four things
// you need to run a connection with a peer: id, 2keys, and addr.
type PeerNetParams struct {
ID peer.ID
PrivKey ci.PrivKey
PubKey ci.PubKey
Addr ma.Multiaddr
}
func (p *PeerNetParams) checkKeys() error {
if !p.ID.MatchesPrivateKey(p.PrivKey) {
return errors.New("p.ID does not match p.PrivKey")
}
if !p.ID.MatchesPublicKey(p.PubKey) {
return errors.New("p.ID does not match p.PubKey")
}
buf := new(bytes.Buffer)
buf.Write([]byte("hello world. this is me, I swear."))
b := buf.Bytes()
sig, err := p.PrivKey.Sign(b)
if err != nil {
return fmt.Errorf("sig signing failed: %s", err)
}
sigok, err := p.PubKey.Verify(b, sig)
if err != nil {
return fmt.Errorf("sig verify failed: %s", err)
}
if !sigok {
return fmt.Errorf("sig verify failed: sig invalid")
}
return nil // ok. move along.
}
func RandPeerNetParamsOrFatal(t *testing.T) PeerNetParams {
p, err := RandPeerNetParams()
if err != nil {
t.Fatal(err)
return PeerNetParams{} // TODO return nil
}
return *p
}
func RandPeerNetParams() (*PeerNetParams, error) {
var p PeerNetParams
var err error
p.Addr = ZeroLocalTCPAddress
p.PrivKey, p.PubKey, err = RandTestKeyPair(1024)
if err != nil {
return nil, err
}
p.ID, err = peer.IDFromPublicKey(p.PubKey)
if err != nil {
return nil, err
}
if err := p.checkKeys(); err != nil {
return nil, err
}
return &p, nil
}
|
package main
import (
"fmt"
"io"
"math"
"math/rand"
"os"
"regexp"
"time"
)
var stripAnsiStart = regexp.MustCompile("^\033" + `\[(\d+)(;\d+)?(;\d+)?[m|K]`)
type LolWriter struct {
base io.Writer
os int
li int
spread float64
freq float64
}
var tabSpaces = []byte(" ")
func (w *LolWriter) Write(data []byte) (int, error) {
for i := 0; i < len(data); i++ {
c := data[i]
if c == '\n' {
w.li = 0
w.os++
w.base.Write([]byte{'\n'})
} else if c == '\t' {
w.li += len(tabSpaces)
w.base.Write(tabSpaces)
} else {
matchPos := stripAnsiStart.FindIndex(data[i:])
if matchPos != nil {
i += matchPos[1] - 1
continue
}
r, g, b := rainbow(w.freq, float64(w.os)+(float64(w.li)/w.spread))
fmt.Fprint(w.base, colored(string(c), r, g, b))
w.li++
}
}
return len(data), nil
}
func main() {
rand.Seed(time.Now().UTC().UnixNano())
seed := int(rand.Int31n(256))
if len(os.Args) == 1 {
runLol(seed, os.Stdout, os.Stdin)
return
}
var exit int
for _, filename := range os.Args[1:] {
f, err := os.Open(filename)
if err != nil {
exit = 1
fmt.Printf("%s: %s\n", os.Args[0], err.Error())
continue
}
defer f.Close()
runLol(seed, os.Stdout, f)
}
os.Exit(exit)
}
func runLol(seed int, output io.Writer, input io.Reader) {
defer func() {
if r := recover(); r != nil {
io.Copy(output, input)
}
}()
writer := LolWriter{
os: seed,
base: output,
freq: 0.1,
spread: 3.0,
}
cat(&writer, input)
}
func cat(writer io.Writer, reader io.Reader) {
io.Copy(writer, reader)
}
func rainbow(freq, i float64) (int, int, int) {
red := int(math.Sin(freq*i+0)*127 + 128)
green := int(math.Sin(freq*i+2*math.Pi/3)*127 + 128)
blue := int(math.Sin(freq*i+4*math.Pi/3)*127 + 128)
return red, green, blue
}
func colored(str string, r, g, b int) string {
return fmt.Sprintf("\033[38%sm%s\033[0m", rgb(float64(r), float64(g), float64(b)), str)
}
func toBaseColor(color float64, mod int) int {
return int(6*(color/256)) * mod
}
func rgb(red, green, blue float64) string {
grayPossible := true
sep := 42.5
var gray bool
for grayPossible {
if red < sep || green < sep || blue < sep {
gray = red < sep && green < sep && blue < sep
grayPossible = false
}
sep += 42.5
}
if gray {
return fmt.Sprintf(";5;%d", 232+int((red+green+blue)/33.0))
} else {
value := 16 + toBaseColor(red, 36) + toBaseColor(green, 6) + toBaseColor(blue, 1)
return fmt.Sprintf(";5;%d", value)
}
}
|
package cmd
import (
"github.com/Atrox/homedir"
"github.com/spf13/cobra"
"github.com/daticahealth/datikube/kubectl"
"github.com/daticahealth/datikube/logs"
)
var setContext = func() *cobra.Command {
cmd := &cobra.Command{
Use: "set-context <name> <cluster-url> <ca-file>",
Short: "Add or update cluster context in local kubeconfig",
Long: "Add or update cluster context in local kubeconfig. This command will prompt for " +
"valid Datica account credentials, then set user, cluster, and context entries " +
"appropriately. " + `
<name> is the name you'd like to use for this cluster, like "prod" or "staging".
<cluster-url> is a URL at which this cluster's kube-apiserver is accessible.
<ca-file> is a relative path to the CA cert for this cluster.
For example, using a local minikube setup:
datikube set-context my-minikube https://192.168.99.100:8443 ~/.minikube/ca.crt
`,
Args: cobra.ExactArgs(3),
RunE: func(cmd *cobra.Command, args []string) error {
name, clusterURL, caPath := args[0], args[1], args[2]
expCAPath, err := homedir.Expand(caPath)
if err != nil {
return err
}
addSkipVerify, err := cmd.Flags().GetBool("insecure-skip-tls-verify")
if err != nil {
return err
}
exp, err := expandedConfigPath()
if err != nil {
return err
}
authInfo, err := kubectl.GetUserInfo(exp)
if err != nil {
return err
}
sessionToken := ""
if authInfo != nil {
sessionToken = authInfo.Token
}
user, err := getUser(sessionToken, false)
if err != nil {
return err
}
err = kubectl.PersistUser(exp, user.SessionToken)
if err != nil {
return err
}
kargs := []string{"config", "set-cluster", name, "--server", clusterURL, "--certificate-authority", expCAPath}
if addSkipVerify {
kargs = append(kargs, "--insecure-skip-tls-verify", "true")
}
_, err = kubectl.Execute(exp, kargs...)
if err != nil {
return err
}
_, err = kubectl.Execute(exp, "config", "set-context", name, "--cluster", name, "--user", kubectl.UserName)
if err != nil {
return err
}
logs.Print("Context set. Use \"--context=%s\" in your kubectl commands for this cluster. Example:", name)
logs.Print("")
logs.Print("\tkubectl --context=%s get pods", name)
return nil
},
}
cmd.Flags().Bool("insecure-skip-tls-verify", false, "Add the --insecure-skip-tls-verify option to the cluster")
return cmd
}()
|
package main
import (
"fmt"
"math/rand"
)
func main() {
fmt.Println(rand10())
fmt.Println(rand10())
fmt.Println(rand10())
fmt.Println(rand10())
}
func rand10() int {
//1,7
// 1,7
// 1,49
x := rand7() + rand7()
return x%10 + 1
}
func rand7() int {
return 1 + rand.Intn(7)
}
|
package main
import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"github.com/rightscale/rsc/gen"
)
var _ = Describe("ParamAnalyzer", func() {
var (
params map[string]interface{}
analyzer *ParamAnalyzer
)
JustBeforeEach(func() {
analyzer = NewAnalyzer(params)
})
Context("with an empty path and a simple param", func() {
BeforeEach(func() {
params = map[string]interface{}{"foo": map[string]interface{}{"class": "String"}}
})
It("Analyze returns the parsed param", func() {
analyzer.Analyze()
params := analyzer.Params
Ω(params).Should(HaveLen(1))
param := params[0]
Ω(param.Name).Should(Equal("foo"))
s := gen.BasicDataType("string")
Ω(param.Type).Should(BeEquivalentTo(&s))
})
})
Context("with a simple array param", func() {
BeforeEach(func() {
params = map[string]interface{}{"foo": map[string]interface{}{"class": "Array"}}
})
It("Analyze returns the parsed param", func() {
analyzer.Analyze()
params := analyzer.Params
Ω(params).Should(HaveLen(1))
param := params[0]
Ω(param.Name).Should(Equal("foo"))
s := gen.BasicDataType("string")
item := gen.ActionParam{
Name: "item",
QueryName: "foo[item]",
VarName: "item",
Type: &s,
}
Ω(param.Type).Should(BeEquivalentTo(&gen.ArrayDataType{&item}))
})
})
Context("with a simple hash param", func() {
BeforeEach(func() {
params = map[string]interface{}{
"foo": map[string]interface{}{"class": "Hash"},
"foo[bar]": map[string]interface{}{"class": "String"},
"foo[baz]": map[string]interface{}{"class": "Integer"},
}
})
It("Analyze returns the parsed param", func() {
analyzer.Analyze()
params := analyzer.Params
Ω(params).Should(HaveLen(1))
param := params[0]
Ω(param.Name).Should(Equal("foo"))
s := gen.BasicDataType("string")
bar := gen.ActionParam{
Name: "bar",
QueryName: "foo[bar]",
VarName: "bar",
Type: &s,
}
i := gen.BasicDataType("int")
baz := gen.ActionParam{
Name: "baz",
QueryName: "foo[baz]",
VarName: "baz",
Type: &i,
}
Ω(param.Type).Should(BeEquivalentTo(
&gen.ObjectDataType{"Foo", []*gen.ActionParam{&bar, &baz}}))
})
})
Context("with a simple enumerable param", func() {
BeforeEach(func() {
params = map[string]interface{}{
"foo": map[string]interface{}{"class": "Enumerable"},
"foo[*]": map[string]interface{}{"class": "String"},
}
})
It("Analyze returns the parsed param", func() {
analyzer.Analyze()
params := analyzer.Params
Ω(params).Should(HaveLen(1))
param := params[0]
Ω(param.Name).Should(Equal("foo"))
Ω(param.Type).Should(BeEquivalentTo(new(gen.EnumerableDataType)))
})
})
Context("with a hash of enumerable params", func() {
BeforeEach(func() {
params = map[string]interface{}{
"foo": map[string]interface{}{"class": "Hash"},
"foo[bar]": map[string]interface{}{"class": "String"},
"foo[baz]": map[string]interface{}{"class": "Enumerable"},
"foo[baz][*]": map[string]interface{}{"class": "String"},
}
})
It("Analyze returns the parsed param", func() {
analyzer.Analyze()
params := analyzer.Params
Ω(params).Should(HaveLen(1))
param := params[0]
Ω(param.Name).Should(Equal("foo"))
s := gen.BasicDataType("string")
bar := gen.ActionParam{
Name: "bar",
QueryName: "foo[bar]",
Type: &s,
VarName: "bar",
}
baz := gen.ActionParam{
Name: "baz",
QueryName: "foo[baz]",
Type: new(gen.EnumerableDataType),
VarName: "baz",
}
Ω(param.Type).Should(BeEquivalentTo(
&gen.ObjectDataType{"Foo", []*gen.ActionParam{&bar, &baz}}))
})
})
Context("with a hash star param", func() {
BeforeEach(func() {
params = map[string]interface{}{
"foo": map[string]interface{}{"class": "Hash"},
"foo[*]": map[string]interface{}{"class": "String"},
}
})
It("Analyze returns the parsed param", func() {
analyzer.Analyze()
params := analyzer.Params
Ω(params).Should(HaveLen(1))
param := params[0]
Ω(param.Name).Should(Equal("foo"))
Ω(param.Type).Should(BeEquivalentTo(new(gen.EnumerableDataType)))
})
})
Context("with a orphan star param", func() {
BeforeEach(func() {
params = map[string]interface{}{
"foo[*]": map[string]interface{}{"class": "String"},
}
})
It("Analyze returns the parsed param", func() {
analyzer.Analyze()
params := analyzer.Params
Ω(params).Should(HaveLen(1))
param := params[0]
Ω(param.Name).Should(Equal("foo"))
Ω(param.Type).Should(BeEquivalentTo(new(gen.EnumerableDataType)))
})
})
Context("with an array of hashes", func() {
BeforeEach(func() {
params = map[string]interface{}{
"foo": map[string]interface{}{"class": "Array"},
"foo[][bar]": map[string]interface{}{"class": "String"},
"foo[][baz]": map[string]interface{}{"class": "Integer"},
}
})
It("Analyze returns the parsed param", func() {
analyzer.Analyze()
params := analyzer.Params
Ω(params).Should(HaveLen(1))
param := params[0]
Ω(param.Name).Should(Equal("foo"))
s := gen.BasicDataType("string")
bar := gen.ActionParam{
Name: "bar",
QueryName: "foo[][bar]",
Type: &s,
VarName: "bar",
}
i := gen.BasicDataType("int")
baz := gen.ActionParam{
Name: "baz",
QueryName: "foo[][baz]",
Type: &i,
VarName: "baz",
}
t := gen.ObjectDataType{"Foo", []*gen.ActionParam{&bar, &baz}}
item := gen.ActionParam{
Name: "item",
QueryName: "foo[][item]",
Type: &t,
VarName: "item",
}
Ω(param.Type).Should(BeEquivalentTo(&gen.ArrayDataType{&item}))
})
})
Context("with an array of hashes with sub-array", func() {
BeforeEach(func() {
params = map[string]interface{}{
"foo": map[string]interface{}{"class": "Array"},
"foo[][bar]": map[string]interface{}{"class": "String"},
"foo[][baz]": map[string]interface{}{"class": "Array"},
"foo[][baz][][goo]": map[string]interface{}{"class": "String"},
}
})
It("Analyze returns the parsed param", func() {
analyzer.Analyze()
params := analyzer.Params
Ω(params).Should(HaveLen(1))
param := params[0]
Ω(param.Name).Should(Equal("foo"))
s := gen.BasicDataType("string")
bar := gen.ActionParam{
Name: "bar",
QueryName: "foo[][bar]",
Type: &s,
VarName: "bar",
}
goo := gen.ActionParam{
Name: "goo",
QueryName: "foo[][baz][][goo]",
Type: &s,
VarName: "goo",
}
t := gen.ObjectDataType{"Baz", []*gen.ActionParam{&goo}}
bazItem := gen.ActionParam{
Name: "item",
QueryName: "foo[][baz][][item]",
Type: &t,
VarName: "item",
}
baz := gen.ActionParam{
Name: "baz",
QueryName: "foo[][baz][]",
Type: &gen.ArrayDataType{&bazItem},
VarName: "baz",
}
p := gen.ObjectDataType{"Foo", []*gen.ActionParam{&bar, &baz}}
item := gen.ActionParam{
Name: "item",
QueryName: "foo[][item]",
Type: &p,
VarName: "item",
}
Ω(param.Type).Should(BeEquivalentTo(&gen.ArrayDataType{&item}))
})
})
Context("with a mix of arrays, enumerables and hashes", func() {
BeforeEach(func() {
params = map[string]interface{}{
"foo4": map[string]interface{}{"class": "Array"},
"foo4[][bar]": map[string]interface{}{"class": "String"},
"foo2": map[string]interface{}{"class": "String"},
"foo4[][baz][][zoo][*]": map[string]interface{}{"class": "String"},
"foo1[*]": map[string]interface{}{"class": "String"},
"foo4[][baz]": map[string]interface{}{"class": "Array"},
"foo3": map[string]interface{}{"class": "Hash"},
"foo4[][baz][][goo]": map[string]interface{}{"class": "String"},
"foo3[baz]": map[string]interface{}{"class": "Integer"},
"foo1": map[string]interface{}{"class": "Enumerable"},
"foo": map[string]interface{}{"class": "Array"},
"foo4[][baz][][zoo]": map[string]interface{}{"class": "Enumerable"},
}
})
It("Analyze returns the parsed params", func() {
analyzer.Analyze()
params := analyzer.Params
Ω(params).Should(HaveLen(5))
s := gen.BasicDataType("string")
i := gen.BasicDataType("int")
param := params[0]
Ω(param.Name).Should(Equal("foo"))
item := gen.ActionParam{
Name: "item",
QueryName: "foo[item]",
Type: &s,
VarName: "item",
}
Ω(param.Type).Should(BeEquivalentTo(&gen.ArrayDataType{&item}))
param = params[1]
Ω(param.Name).Should(Equal("foo1"))
Ω(param.Type).Should(BeEquivalentTo(new(gen.EnumerableDataType)))
param = params[2]
Ω(param.Type).Should(BeEquivalentTo(&s))
param = params[3]
baz := gen.ActionParam{
Name: "baz",
QueryName: "foo3[baz]",
Type: &i,
VarName: "baz",
}
p := gen.ObjectDataType{"Foo3", []*gen.ActionParam{&baz}}
Ω(param.Type).Should(BeEquivalentTo(&p))
param = params[4]
Ω(param.Name).Should(Equal("foo4"))
bar := gen.ActionParam{
Name: "bar",
QueryName: "foo4[][bar]",
Type: &s,
VarName: "bar",
}
goo := gen.ActionParam{
Name: "goo",
QueryName: "foo4[][baz][][goo]",
Type: &s,
VarName: "goo",
}
zoo := gen.ActionParam{
Name: "zoo",
QueryName: "foo4[][baz][][zoo]",
Type: new(gen.EnumerableDataType),
VarName: "zoo",
}
t := gen.ObjectDataType{"Baz", []*gen.ActionParam{&goo, &zoo}}
bazItem := gen.ActionParam{
Name: "item",
QueryName: "foo4[][baz][][item]",
Type: &t,
VarName: "item",
}
baz = gen.ActionParam{
Name: "baz",
QueryName: "foo4[][baz][]",
Type: &gen.ArrayDataType{&bazItem},
VarName: "baz",
}
p = gen.ObjectDataType{"Foo4", []*gen.ActionParam{&bar, &baz}}
item = gen.ActionParam{
Name: "item",
QueryName: "foo4[][item]",
Type: &p,
VarName: "item",
}
Ω(param.Type).Should(BeEquivalentTo(&gen.ArrayDataType{&item}))
})
})
})
|
package server
import (
"io"
"io/ioutil"
"log"
"mime/multipart"
"os"
"strings"
)
// File used to handle file path and file operation.
// We use interface so we can swap it to other file storage easily
type File interface {
GetFile(src string) ([]byte, error)
Upload(file *multipart.FileHeader, destPath string) error
}
type LocalFile struct {
}
func (f LocalFile) GetFile(srcPath string) ([]byte, error) {
file, err := ioutil.ReadFile(srcPath)
return file, err
}
// Upload saves uploaded file to the destined path
func (f LocalFile) Upload(file *multipart.FileHeader, destPath string) error {
src, err := file.Open()
if err != nil {
return err
}
defer src.Close()
// Create all directory if not exists
s := strings.Split(destPath, "/")
s = s[:len(s)-1]
sJoin := strings.Join(s, "/")
if _, err := os.Stat(sJoin); os.IsNotExist(err) {
log.Print("Upload folder is missing. Creating folder...")
os.MkdirAll(sJoin, os.ModePerm)
log.Print("Folder created in ", sJoin)
}
// Destination
dst, err := os.Create(destPath)
if err != nil {
return err
}
defer dst.Close()
// Copy
if _, err = io.Copy(dst, src); err != nil {
return err
}
return nil
}
|
/*
Copyright 2019 Gravitational, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package installer
import (
"context"
"net"
"time"
"github.com/gravitational/gravity/lib/state"
"github.com/gravitational/trace"
log "github.com/sirupsen/logrus"
grpc "google.golang.org/grpc"
)
// NewClient returns a new client using the specified state directory
// to look for socket file
func NewClient(ctx context.Context, socketPath string, logger log.FieldLogger, opts ...grpc.DialOption) (AgentClient, error) {
dialOptions := []grpc.DialOption{
// Don't use TLS, as we communicate over domain sockets
grpc.WithInsecure(),
// Retry every second after failure
grpc.WithBackoffMaxDelay(1 * time.Second),
grpc.WithBlock(),
grpc.WithContextDialer(func(ctx context.Context, addr string) (net.Conn, error) {
conn, err := (&net.Dialer{}).DialContext(ctx, "unix", socketPath)
logger.WithFields(log.Fields{
log.ErrorKey: err,
"addr": socketPath,
}).Debug("Connect to installer service.")
if err != nil {
return nil, trace.Wrap(err)
}
return conn, nil
}),
}
dialOptions = append(dialOptions, opts...)
conn, err := grpc.DialContext(ctx, "unix:///installer.sock", dialOptions...)
if err != nil {
return nil, trace.Wrap(err)
}
client := NewAgentClient(conn)
return client, nil
}
// SocketPath returns the default path to the installer service socket
func SocketPath() (path string, err error) {
return state.GravityInstallDir("installer.sock")
}
|
package main
import "testing"
type tbp struct {
Dir string
Fn string
Page
}
// TODO add more test cases and tests in general!
func TestBuildPage(t *testing.T) {
tableTests := []tbp{
{
Dir: "pages",
Fn: "index.html",
Page: Page{
BaseDir: "pages",
LinkDir: "",
FileName: "index.html",
Name: "index",
Ext: "html",
FullPath: "pages/index.html",
BuildDir: "build",
BuildFullPath: "build/index.html",
}},
{
Dir: "pages/warren",
Fn: "author.md",
Page: Page{
BaseDir: "pages/warren",
LinkDir: "warren/author",
FileName: "author.md",
Name: "author",
Ext: "md",
FullPath: "pages/warren/author.md",
BuildDir: "build/warren/author",
BuildFullPath: "build/warren/author/index.html",
}},
}
for _, tt := range tableTests {
p := _buildPage(tt.Dir, tt.Fn)
if p.BaseDir != tt.Page.BaseDir {
t.Errorf("BaseDir got (%s) wanted (%s)", p.BaseDir, tt.Page.BaseDir)
}
if p.LinkDir != tt.Page.LinkDir {
t.Errorf("LinkDir got (%s) wanted (%s)", p.LinkDir, tt.Page.LinkDir)
}
if p.FileName != tt.Page.FileName {
t.Errorf("FileName got (%s) wanted (%s)", p.FileName, tt.Page.FileName)
}
if p.Name != tt.Page.Name {
t.Errorf("Name got (%s) wanted (%s)", p.Name, tt.Page.Name)
}
if p.Ext != tt.Page.Ext {
t.Errorf("Ext got (%s) wanted (%s)", p.Ext, tt.Page.Ext)
}
if p.FullPath != tt.Page.FullPath {
t.Errorf("FullPath got (%s) wanted (%s)", p.FullPath, tt.Page.FullPath)
}
if p.BuildDir != tt.Page.BuildDir {
t.Errorf("BuildDir got (%s) wanted (%s)", p.BuildDir, tt.Page.BuildDir)
}
if p.BuildFullPath != tt.Page.BuildFullPath {
t.Errorf("BuildFullPath got (%s) wanted (%s)", p.BuildFullPath, tt.Page.BuildFullPath)
}
}
}
|
package coinchange
import "testing"
func TestCoinChange(t *testing.T) {
var tests = []struct {
coins []int
amount int
expect int
}{
{[]int{1, 2, 5}, 11, 3},
{[]int{2}, 3, -1},
{[]int{7, 2, 3, 6}, 13, 2},
{[]int{3, 2, 4}, 6, 2},
}
for _, test := range tests {
if got, _ := coinChange(test.coins, test.amount); got != test.expect {
t.Errorf("coinChange(%v, %v) = %v (expected %v)", test.coins, test.amount, got, test.expect)
}
}
}
func TestPrintCoinChangeCombination(t *testing.T) {
var tests = []struct {
coins []int
amount int
expect string
}{
{[]int{1, 2, 5}, 11, "[5 5 1]"},
{[]int{2}, 3, "[]"},
{[]int{7, 2, 3, 6}, 13, "[6 7]"},
{[]int{3, 2, 4}, 6, "[3 3]"},
}
for _, test := range tests {
_, R := coinChange(test.coins, test.amount)
if got := printCoinChangeCombination(test.coins, R); got != test.expect {
t.Errorf("coinChange(%v, %v) = %v (expected %s)", test.coins, test.amount, got, test.expect)
}
}
}
func TestChange(t *testing.T) {
var tests = []struct {
coins []int
amount int
expect int
}{
{[]int{1, 2, 5}, 5, 4},
{[]int{2}, 3, 0},
{[]int{10}, 10, 1},
}
for _, test := range tests {
if got := change(test.amount, test.coins); got != test.expect {
t.Errorf("coin(%v) = %v (expected %v)", test.coins, got, test.expect)
}
}
}
|
package rectangle
import "fmt"
//A is variable for ...
const A, b = 20, 30
func init() {
fmt.Println("rectmetr.go init function")
fmt.Println("A var is:", A, "b var is:", b)
}
//Area is function for ......
func Area(width, length float64) float64 {
return width * length
}
func innerArea(width, length float64) float64 {
return width*length + 1
}
|
package main
import "fmt"
//Map adalah kumpulan key value yang dimana key nya bersifat unik tidak boleh sama
//Tipe data valuenya haruslah bertipe yang sama
//Berbeda dengan Array dan Slice data yang dimasukan ke Map boleh sebanyak banyaknya, dengan catatan keynya harus berbeda
//Bila key nya sama maka otomatis key data sebelumnya akan di replace dgn key data yang baru
func main() {
person := map[string]string{
"name": "Zuhri",
"address": "Aceh",
}
person["job"] = "Developer"
fmt.Println(person)
fmt.Println(person["name"])
fmt.Println(person["address"])
books := make(map[string]string)
books["title"] = "Golang Programming"
books["author"] = "Muhammad Zhuhry"
books["typo"] = "typo here"
fmt.Println(books)
delete(books, "typo")
fmt.Println(books)
}
//Function Map
//len(mapName) -> mendapatkan panjang map
//map[key] -> mendapatkan data/value sesuai dengan key yg diinputkan
//map[key] = value -> merubah/menambahkan nilai map sesuai key value yg diinputkan
//make(map[TypeKey]TypeValue) -> membuat map baru
//delete(map, "key") -> menghapuskan data(key value) sesuai dengan key yg diinputkan
|
package main
import "fmt"
func main() {
ch := make(chan int, 2)
ch <- 100
ch <- 200
fmt.Println(<-ch)
fmt.Println(<-ch)
}
|
// Package sdrtime groups utility functions to convert time and ticks.
package sdrtime
// #cgo CFLAGS: -g -Wall
// #cgo LDFLAGS: -lSoapySDR
// #include <SoapySDR/Time.h>
import "C"
// TicksToTimeNs converts a tick count into a time in nanoseconds using the tick rate.
//
// Params:
// - ticks: a integer tick count
// - rate: the ticks per second
//
// Return the time in nanoseconds
func TicksToTimeNs(ticks int, rate float64) int {
return int(C.SoapySDR_ticksToTimeNs(C.longlong(ticks), C.double(rate)))
}
// TimeNsToTicks converts a time in nanoseconds into a tick count using the tick rate.
//
// Params:
// - timeNs: time in nanoseconds
// - rate: the ticks per second
//
// Return the integer tick count
func TimeNsToTicks(timeNs int, rate float64) int {
return int(C.SoapySDR_timeNsToTicks(C.longlong(timeNs), C.double(rate)))
}
|
package main
import (
"fmt"
)
func main() {
var x float64
fmt.Scan(&x)
var y float64
fmt.Scan(&y)
fmt.Printf("%.3f km/l\n", x/y)
}
|
package rank
// beginner, master...
type Rank struct {
ID string `datastore:"-"`
Values map[string]string `json:"values"`
}
type Ranks map[string]*Rank
|
package fixer
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
)
// Client ...
type Client struct {
APIKey string
BaseURL *url.URL
UserAgent string
httpClient *http.Client
}
// APIError ...
type APIError struct {
Code int `json:"code"`
Type string `json:"type"`
Info string `json:"info"`
}
// LatestResponse ...
type LatestResponse struct {
Success bool `json:"success"`
Timestamp int64 `json:"timestamp"`
Base string `json:"base"`
Date string `json:"date"`
Rates map[string]float32 `json:"rates"`
Error APIError `json:"error"`
}
func (c *Client) newRequest(method, path string, body interface{}) (*http.Request, error) {
rel := &url.URL{Path: path, RawQuery: fmt.Sprintf("access_key=%s", c.APIKey)}
u := c.BaseURL.ResolveReference(rel)
var buf io.ReadWriter
if body != nil {
buf = new(bytes.Buffer)
err := json.NewEncoder(buf).Encode(body)
if err != nil {
return nil, err
}
}
req, err := http.NewRequest(method, u.String(), buf)
if err != nil {
return nil, err
}
if body != nil {
req.Header.Set("Content-Type", "application/json")
}
req.Header.Set("Accept", "application/json")
req.Header.Set("User-Agent", c.UserAgent)
return req, nil
}
func (c *Client) do(req *http.Request, v interface{}) (*http.Response, error) {
res, err := c.httpClient.Do(req)
if err != nil {
return nil, err
}
defer res.Body.Close()
err = json.NewDecoder(res.Body).Decode(v)
return res, err
}
// Latest ...
func (c *Client) Latest() (*LatestResponse, error) {
req, err := c.newRequest("GET", "/latest", nil)
if err != nil {
return nil, err
}
var latestResponse LatestResponse
_, err = c.do(req, &latestResponse)
return &latestResponse, err
}
// NewClient ...
func NewClient(apiKey string, httpClient *http.Client) *Client {
if httpClient == nil {
httpClient = http.DefaultClient
}
baseURL, _ := url.Parse("http://data.fixer.io/api")
c := &Client{
httpClient: httpClient,
BaseURL: baseURL,
APIKey: apiKey,
UserAgent: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36",
}
return c
}
|
// +build integrate
package postgres
import (
"testing"
"github.com/jackc/pgx/pgtype"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/neuronlabs/neuron-core/query"
)
// TestRepositoryGet tests get repository function.
func TestRepositoryGet(t *testing.T) {
c, db := prepareIntegrateRepository(t)
defer db.Close()
defer deleteTestModelTable(t, db)
t.Run("Valid", func(t *testing.T) {
s, err := query.NewC(c, &testModel{})
require.NoError(t, err)
require.NoError(t, s.FilterField(query.NewFilter(s.Struct().Primary(), query.OpEqual, 2)))
if assert.NoError(t, s.Get()) {
m, ok := s.Value.(*testModel)
if assert.True(t, ok) {
assert.Equal(t, testModelInstances[1].AttrString, m.AttrString)
assert.Equal(t, testModelInstances[1].Int, m.Int)
assert.Equal(t, 2, m.ID)
}
}
})
t.Run("NonNullTime", func(t *testing.T) {
tm := &testModel{}
s, err := query.NewC(c, tm)
require.NoError(t, err)
require.NoError(t, s.FilterField(query.NewFilter(s.Struct().Primary(), query.OpEqual, 1)))
require.NoError(t, s.Get())
ti := pgtype.Timestamp{}
err = db.QueryRow("SELECT created_at FROM test_models WHERE id = $1;", 1).Scan(&ti)
require.NoError(t, err)
if assert.NotNil(t, tm.CreatedAt) && assert.NotEqual(t, ti.Status, pgtype.Null) {
assert.Equal(t, ti.Time.Unix(), tm.CreatedAt.Unix())
}
})
t.Run("NullTime", func(t *testing.T) {
tm := &testModel{}
s, err := query.NewC(c, tm)
require.NoError(t, err)
require.NoError(t, s.FilterField(query.NewFilter(s.Struct().Primary(), query.OpEqual, 4)))
require.NoError(t, s.Get())
ti := pgtype.Date{}
err = db.QueryRow("SELECT updated_at FROM test_models WHERE id = $1;", 4).Scan(&ti)
require.NoError(t, err)
if assert.Equal(t, ti.Status, pgtype.Null) {
assert.Nil(t, tm.UpdatedAt)
}
})
}
|
// Copyright (c) 2018-2020 Double All rights reserved.
// Use of this source code is governed by a MIT style
// license that can be found in the LICENSE file.
package captcha
import (
"crypto/aes"
"crypto/cipher"
"crypto/md5"
"crypto/rand"
"encoding/hex"
"io"
)
// Some get or a default value
func Some(target interface{}, initValue interface{}) interface{} {
if target != nil && target != "" && target != 0 {
return target
}
return initValue
}
// LeftV get left value
func LeftV(left interface{}, right interface{}) interface{} {
return left
}
func encrypt(data []byte, passphrase string) []byte {
block, _ := aes.NewCipher([]byte(createHash(passphrase)))
gcm, err := cipher.NewGCM(block)
if err != nil {
panic(err.Error())
}
nonce := make([]byte, gcm.NonceSize())
if _, err = io.ReadFull(rand.Reader, nonce); err != nil {
panic(err.Error())
}
ciphertext := gcm.Seal(nonce, nonce, data, nil)
return ciphertext
}
func createHash(key string) string {
hasher := md5.New()
hasher.Write([]byte(key))
return hex.EncodeToString(hasher.Sum(nil))
}
func decrypt(data []byte, passphrase string) []byte {
key := []byte(createHash(passphrase))
block, err := aes.NewCipher(key)
if err != nil {
panic(err.Error())
}
gcm, err := cipher.NewGCM(block)
if err != nil {
panic(err.Error())
}
nonceSize := gcm.NonceSize()
nonce, ciphertext := data[:nonceSize], data[nonceSize:]
plaintext, err := gcm.Open(nil, nonce, ciphertext, nil)
if err != nil {
panic(err.Error())
}
return plaintext
}
|
package agora
import (
"fmt"
"math"
)
const pageSize = 4
const pageSize8 = 8
// GetPageSize method
func GetPageSize(isDualCamera int8) int {
if isDualCamera == 1 {
return pageSize
} else {
return pageSize8
}
}
// GetTotalPage method.
func GetTotalPage(total int, isDualCamera int8) int8 {
if isDualCamera == 1 {
return int8(math.Ceil(float64(total) / float64(pageSize)))
} else {
return int8(math.Ceil(float64(total) / float64(pageSize8)))
}
}
// GetWrittenMeetingNo - 获取笔试考场id
func GetWrittenMeetingNo(env string, examroomID uint, uids []uint, uid uint, isDualCamera int8) string {
// w
pos := int(0)
for _idx, _uid := range uids {
if _uid == uid {
pos = _idx + 1
break
}
}
pageN := GetTotalPage(pos, isDualCamera)
return ToWrittenMeetingNo(env, examroomID, pageN)
}
// GetInviteMeetingNo - 获取面试候考区考场id
func GetInviteMeetingNo(env string, examroomID uint, uids []uint, uid uint, isDualCamera int8) string {
// w
pos := int(0)
for _idx, _uid := range uids {
if _uid == uid {
pos = _idx + 1
break
}
}
pageN := GetTotalPage(pos, isDualCamera)
return ToInviteMeetingNo(env, examroomID, pageN)
}
// ToWrittenMeetingNo - 拼接笔试考场id
func ToWrittenMeetingNo(env string, examroomID uint, pageN int8) string {
return fmt.Sprintf("%s_w_%d_%d", env, examroomID, pageN)
}
// 拼接面试候考区考场id
func ToInviteMeetingNo(env string, examroomID uint, pageN int8) string {
return fmt.Sprintf("%s_c_%d_%d", env, examroomID, pageN)
}
// GetMeetingNo - 获取考场id
func GetMeetingNo(env string, id uint) string {
// m
return fmt.Sprintf("%s_m_%d", env, id)
}
// GetCandidateMeetingNo - 获取侯考场id
func GetCandidateMeetingNo(env string, id uint) string {
return fmt.Sprintf("%s_c_%d", env, id)
}
// GetExamineeIDStr - 获取考生idstr
func GetExamineeIDStr(uid uint) string {
return fmt.Sprintf("10%d", uid)
}
// GetCandidateExamineeIDStr - 获取考生idstr
func GetCandidateExamineeIDStr(uid uint) string {
return fmt.Sprintf("11%d", uid)
}
// GetServerIDStr - 获取服务端id,用于录制
func GetServerIDStr() string {
return fmt.Sprintf("90")
}
|
// Copyright (C) 2018 Storj Labs, Inc.
// See LICENSE for copying information.
package overlay
//go:generate protoc --go_out=plugins=grpc:. overlay.proto
|
package ecr
import (
"fmt"
"testing"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/aws/session"
awsecr "github.com/aws/aws-sdk-go/service/ecr"
"github.com/aws/aws-sdk-go/service/ecr/ecriface"
ecrapi "github.com/awslabs/amazon-ecr-credential-helper/ecr-login/api"
)
func TestRegistryManagerZeroValue(t *testing.T) {
// verify that the zero value is functional without panics
rm := RegistryManager{}
if !rm.IsECR("123456789.dkr.ecr.us-west-2.amazonaws.com/widgets") {
t.Fatalf("IsECR: expected true")
}
_, _, err := rm.GetDockerAuthConfig("123456789.dkr.ecr.us-west-2.amazonaws.com/widgets")
if err == nil {
t.Fatalf("GetDockerAuthConfig: expected error")
}
_, _, err = rm.AllTagsExist([]string{"foo", "bar"}, "123456789.dkr.ecr.us-west-2.amazonaws.com/widgets")
if err == nil {
t.Fatalf("AllTagsExist: expected error")
}
}
func TestRegistryManager_GetDockerAuthConfig(t *testing.T) {
type fields struct {
ECRAuthClientFactoryFunc func(s *session.Session, cfg *aws.Config) ecrapi.Client
}
type args struct {
serverURL string
}
authfunc := func(s *session.Session, cfg *aws.Config) ecrapi.Client {
return &FakeECRAuthClient{
GetCredsFunc: func(serverURL string) (*ecrapi.Auth, error) {
return &ecrapi.Auth{
Username: "foo",
Password: "bar",
}, nil
},
}
}
tests := []struct {
name string
fields fields
args args
want string
want1 string
wantErr bool
}{
{
name: "success",
fields: fields{
ECRAuthClientFactoryFunc: authfunc,
},
args: args{
serverURL: "123456789.dkr.ecr.us-west-2.amazonaws.com",
},
want: "foo",
want1: "bar",
},
{
name: "bad ecr url",
fields: fields{
ECRAuthClientFactoryFunc: authfunc,
},
args: args{
serverURL: "quay.io/acme/foobar",
},
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
r := RegistryManager{
ECRAuthClientFactoryFunc: tt.fields.ECRAuthClientFactoryFunc,
}
got, got1, err := r.GetDockerAuthConfig(tt.args.serverURL)
if (err != nil) != tt.wantErr {
t.Errorf("GetDockerAuthConfig() error = %v, wantErr %v", err, tt.wantErr)
return
}
if got != tt.want {
t.Errorf("GetDockerAuthConfig() got = %v, want %v", got, tt.want)
}
if got1 != tt.want1 {
t.Errorf("GetDockerAuthConfig() got1 = %v, want %v", got1, tt.want1)
}
})
}
}
func TestRegistryManager_IsECR(t *testing.T) {
type fields struct {
AccessKeyID string
SecretAccessKey string
ECRAuthClientFactoryFunc func(s *session.Session, cfg *aws.Config) ecrapi.Client
ecrClientFactoryFunc func(s *session.Session) ecriface.ECRAPI
}
type args struct {
repo string
}
tests := []struct {
name string
fields fields
args args
want bool
}{
{
name: "ecr repo",
args: args{repo: "123456789.dkr.ecr.us-west-2.amazonaws.com/widgets"},
want: true,
},
{
name: "ecr repo with tag",
args: args{repo: "123456789.dkr.ecr.us-west-2.amazonaws.com/widgets:master"},
want: true,
},
{
name: "non-ecr repo (quay)",
args: args{repo: "quay.io/acme/foobar"},
want: false,
},
{
name: "non-ecr repo (docker hub)",
args: args{repo: "acme/foobar"},
want: false,
},
{
name: "non-ecr repo with tag (quay)",
args: args{repo: "quay.io/acme/foobar:master"},
want: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
r := RegistryManager{
AccessKeyID: tt.fields.AccessKeyID,
SecretAccessKey: tt.fields.SecretAccessKey,
ECRAuthClientFactoryFunc: tt.fields.ECRAuthClientFactoryFunc,
ECRClientFactoryFunc: tt.fields.ecrClientFactoryFunc,
}
if got := r.IsECR(tt.args.repo); got != tt.want {
t.Errorf("IsECR() = %v, want %v", got, tt.want)
}
})
}
}
func sliceContentsEqual(s1, s2 []string) bool {
if len(s1) != len(s2) {
return false
}
m := make(map[string]struct{}, len(s1))
for i := range s1 {
m[s1[i]] = struct{}{}
}
for _, s := range s2 {
if _, ok := m[s]; !ok {
return false
}
}
return true
}
func TestRegistryManager_AllTagsExist(t *testing.T) {
type fields struct {
ecrClientFactoryFunc func(s *session.Session) ecriface.ECRAPI
}
type args struct {
tags []string
repo string
}
tests := []struct {
name string
fields fields
args args
want bool
want1 []string
wantErr bool
}{
{
name: "all exist",
fields: fields{
ecrClientFactoryFunc: func(_ *session.Session) ecriface.ECRAPI {
return &FakeECRClient{
DescribeImagesPagesFunc: func(input *awsecr.DescribeImagesInput, fn func(*awsecr.DescribeImagesOutput, bool) bool) error {
if len(input.ImageIds) != 1 {
return fmt.Errorf("expected 1 image id: %v", len(input.ImageIds))
}
fn(&awsecr.DescribeImagesOutput{
ImageDetails: []*awsecr.ImageDetail{
&awsecr.ImageDetail{
RepositoryName: input.RepositoryName,
ImageTags: []*string{
input.ImageIds[0].ImageTag,
},
},
},
}, true)
return nil
},
}
},
},
args: args{
tags: []string{"master", "release", "asdf"},
repo: "123456789.dkr.ecr.us-west-2.amazonaws.com/widgets",
},
want: true,
want1: []string{},
wantErr: false,
},
{
name: "missing tag",
fields: fields{
ecrClientFactoryFunc: func(_ *session.Session) ecriface.ECRAPI {
return &FakeECRClient{
DescribeImagesPagesFunc: func(input *awsecr.DescribeImagesInput, fn func(*awsecr.DescribeImagesOutput, bool) bool) error {
if len(input.ImageIds) != 1 {
return fmt.Errorf("expected 1 image id: %v", len(input.ImageIds))
}
if *input.ImageIds[0].ImageTag == "release" {
return awserr.New(awsecr.ErrCodeImageNotFoundException, "some message", fmt.Errorf("some err"))
}
fn(&awsecr.DescribeImagesOutput{
ImageDetails: []*awsecr.ImageDetail{
&awsecr.ImageDetail{
RepositoryName: input.RepositoryName,
ImageTags: []*string{
input.ImageIds[0].ImageTag,
},
},
},
}, true)
return nil
},
}
},
},
args: args{
tags: []string{"master", "release", "asdf"},
repo: "123456789.dkr.ecr.us-west-2.amazonaws.com/widgets",
},
want: false,
want1: []string{"release"},
wantErr: false,
},
{
name: "non-ecr repo",
args: args{
tags: []string{"master", "release", "asdf"},
repo: "quay.io/acme/widgets",
},
want: false,
want1: []string{},
wantErr: true,
},
{
name: "malformed repo",
args: args{
tags: []string{"master", "release", "asdf"},
repo: "somerandomthing",
},
want: false,
want1: []string{},
wantErr: true,
},
{
name: "repo with tag",
args: args{
tags: []string{"master", "release", "asdf"},
repo: "123456789.dkr.ecr.us-west-2.amazonaws.com/widgets:master",
},
want: false,
want1: []string{},
wantErr: true,
},
{
name: "aws error",
fields: fields{
ecrClientFactoryFunc: func(_ *session.Session) ecriface.ECRAPI {
return &FakeECRClient{
DescribeImagesPagesFunc: func(input *awsecr.DescribeImagesInput, fn func(*awsecr.DescribeImagesOutput, bool) bool) error {
if len(input.ImageIds) != 1 {
return fmt.Errorf("expected 1 image id: %v", len(input.ImageIds))
}
if *input.ImageIds[0].ImageTag == "release" {
return awserr.New(awsecr.ErrCodeServerException, "some message", fmt.Errorf("some err"))
}
fn(&awsecr.DescribeImagesOutput{
ImageDetails: []*awsecr.ImageDetail{
&awsecr.ImageDetail{
RepositoryName: input.RepositoryName,
ImageTags: []*string{
input.ImageIds[0].ImageTag,
},
},
},
}, true)
return nil
},
}
},
},
args: args{
tags: []string{"master", "release", "asdf"},
repo: "123456789.dkr.ecr.us-west-2.amazonaws.com/widgets",
},
want: false,
want1: []string{},
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
r := RegistryManager{
ECRClientFactoryFunc: tt.fields.ecrClientFactoryFunc,
}
got, got1, err := r.AllTagsExist(tt.args.tags, tt.args.repo)
if (err != nil) != tt.wantErr {
t.Errorf("AllTagsExist() error = %v, wantErr %v", err, tt.wantErr)
return
}
if got != tt.want {
t.Errorf("AllTagsExist() got = %v, want %v", got, tt.want)
}
if !sliceContentsEqual(got1, tt.want1) {
t.Errorf("AllTagsExist() got1 = %v, want %v", got1, tt.want1)
}
})
}
}
|
package text
import (
"strings"
"unicode/utf8"
"github.com/Vovan-VE/maze-go/pkg/maze/data"
)
// Exporter implements MazeExporter for text format
type Exporter struct {
*baseConfig
}
// NewExporter creates new Exporter
func NewExporter() *Exporter {
return &Exporter{newBaseConfig()}
}
// ConfigureExport configures an instance with given values
func (c *Exporter) ConfigureExport(config map[string]string) {
c.configure(config)
}
// ExportMaze does export given Maze in appropriate format
func (c *Exporter) ExportMaze(maze *data.Maze) string {
w := maze.Width()
h := maze.Height()
charsCount := utf8.RuneCountInString(c.wall)
space := strings.Repeat(" ", charsCount)
lines := make([][]string, h*2+1)
for i := range lines {
line := make([]string, w*2+1)
for j := range line {
line[j] = c.wall
}
lines[i] = line
}
for y := 0; y < h; y++ {
sy := y*2 + 1
for x := 0; x < w; x++ {
sx := x*2 + 1
lines[sy][sx] = space
cell := maze.Cell(x, y)
if y == 0 && !cell.TopWall {
lines[sy-1][sx] = space
}
if x == 0 && !cell.LeftWall {
lines[sy][sx-1] = space
}
if !cell.BottomWall {
lines[sy+1][sx] = space
}
if !cell.RightWall {
lines[sy][sx+1] = space
}
}
}
if in := maze.Entrance(); in != nil {
markDoor(lines, in, repeatStringToLength(c.in, charsCount))
}
if out := maze.Exit(); out != nil {
markDoor(lines, out, repeatStringToLength(c.out, charsCount))
}
str := make([]string, len(lines))
for y, line := range lines {
str[y] = strings.Join(line, "")
}
return strings.Join(str, "\n")
}
func markDoor(lines [][]string, door *data.DoorPosition, str string) {
var x, y int
switch door.Side() {
case data.TOP:
x = door.Offset()*2 + 1
y = 0
case data.RIGHT:
x = len(lines[0]) - 1
y = door.Offset()*2 + 1
case data.BOTTOM:
x = door.Offset()*2 + 1
y = len(lines) - 1
case data.LEFT:
x = 0
y = door.Offset()*2 + 1
}
lines[y][x] = str
}
|
package browser
import (
"github.com/golang/freetype/truetype"
"github.com/llgcode/draw2d"
)
type FontCache map[string]*truetype.Font
func (f FontCache) Load(fd draw2d.FontData) (*truetype.Font, error) {
font, ok := f[fd.Name]
if !ok {
return f["roboto"], nil
}
return font, nil
}
func (f *FontCache) Store(fd draw2d.FontData, tf *truetype.Font) {
(*f)[fd.Name] = tf
}
|
package fixtures
// Useful SQL queries:
// ---
// Generate 10 UUID v4
// SELECT uuid_generate_v4() FROM generate_series(1,10);
//
// Get timezones
// Select * from pg_timezone_names()
import (
"database/sql"
"time"
models "github.com/gomeetups/gomeetups/models"
)
// Addresses - Contains address fixtures for groups, events and users.... va
var Addresses = []models.Address{
{
AddressID: "e7b33956-c64c-4643-830d-e681663528e5",
AddressType: "group",
BelongsTo: "6db72c07-1fdd-480e-b9af-7dd96efa4986", // group: GoLang NYC - Manhattan
Lat: 40.754336,
Long: -73.968502,
AddressLine1: models.NullString{sql.NullString{String: "928 2nd Ave", Valid: true}},
FormattedAddress: models.NullString{sql.NullString{String: "928 2nd Ave\nNew York, NY 10022\n", Valid: true}},
Country: models.NullString{sql.NullString{String: "US", Valid: true}},
State: models.NullString{sql.NullString{String: "NY", Valid: true}},
City: models.NullString{sql.NullString{String: "New York", Valid: true}},
ZipCode: models.NullString{sql.NullString{String: "10022", Valid: true}},
TimeZone: models.NullString{sql.NullString{String: "America/New_York", Valid: true}},
MapsLink: models.NullString{sql.NullString{String: "https://www.google.com/maps/place/Sip+Sak/@40.754364,-73.9707237,17z/data=!3m1!4b1!4m5!3m4!1s0x89c258e2f309a3e1:0x349646006d6ae1a2!8m2!3d40.754364!4d-73.968535", Valid: true}},
CreatedAt: time.Date(2016, time.December, 20, 7, 10, 0, 0, newYork),
CreatedBy: "d468bd91-39a2-46a1-99c3-4c4b0f20e78a", // user: aydin
UpdatedAt: time.Date(2016, time.December, 20, 7, 10, 0, 0, newYork),
UpdatedBy: "d468bd91-39a2-46a1-99c3-4c4b0f20e78a", // user: aydin
},
{
AddressID: "15baeb85-ecee-4363-86c8-0133cea23809",
AddressType: "group",
BelongsTo: "1d7bffd6-80ab-48f1-b35f-96378f0e78a8", // group: GoLang NYC - Manhattan
Lat: 40.74387,
Long: -73.9221448,
AddressLine1: models.NullString{sql.NullString{String: "42-03 Queens Blvd", Valid: true}},
FormattedAddress: models.NullString{sql.NullString{String: "42-03 Queens Blvd\nSunnyside, NY 11104\n", Valid: true}},
Country: models.NullString{sql.NullString{String: "US", Valid: true}},
State: models.NullString{sql.NullString{String: "NY", Valid: true}},
City: models.NullString{sql.NullString{String: "New York", Valid: true}},
ZipCode: models.NullString{sql.NullString{String: "11104", Valid: true}},
TimeZone: models.NullString{sql.NullString{String: "America/New_York", Valid: true}},
MapsLink: models.NullString{sql.NullString{String: "https://www.google.com/maps/place/42-03+Queens+Blvd,+Sunnyside,+NY+11104/@40.743874,-73.9243335,17z/data=!3m1!4b1!4m5!3m4!1s0x89c25ed9b2e3016b:0xd8eac585eecdeac5!8m2!3d40.74387!4d-73.9221448", Valid: true}},
CreatedAt: time.Date(2016, time.December, 20, 8, 30, 0, 0, newYork),
CreatedBy: "478f4b8e-b231-4efe-828b-f63b877fbbe3", // user: arc
UpdatedAt: time.Date(2016, time.December, 20, 8, 30, 0, 0, newYork),
UpdatedBy: "478f4b8e-b231-4efe-828b-f63b877fbbe3", // user: arc
},
{
AddressID: "bd0549d6-2eb0-43af-a9ea-dd5e824d479b",
AddressType: "space",
BelongsTo: "863137fc-3cee-4e8c-ae49-0375fcbe2707", // space: Moma NYC
Lat: 40.7614124038247,
Long: -73.9775069992493,
AddressLine1: models.NullString{sql.NullString{String: "11 W 53rd St", Valid: true}},
FormattedAddress: models.NullString{sql.NullString{String: "Midtown West\n11 W 53rd St\nNew York, NY 10019", Valid: true}},
Country: models.NullString{sql.NullString{String: "US", Valid: true}},
State: models.NullString{sql.NullString{String: "NY", Valid: true}},
City: models.NullString{sql.NullString{String: "New York", Valid: true}},
ZipCode: models.NullString{sql.NullString{String: "10019", Valid: true}},
TimeZone: models.NullString{sql.NullString{String: "America/New_York", Valid: true}},
MapsLink: models.NullString{sql.NullString{String: "https://www.google.com/maps/place/The+Museum+of+Modern+Art/@40.7614367,-73.9798103,17z/data=!3m2!4b1!5s0x89c258fbd5f614c7:0x7edf0a3af8aa9fae!4m5!3m4!1s0x89c258f97bdb102b:0xea9f8fc0b3ffff55!8m2!3d40.7614327!4d-73.9776216", Valid: true}},
CreatedAt: time.Date(2016, time.December, 20, 8, 30, 0, 0, newYork),
CreatedBy: "478f4b8e-b231-4efe-828b-f63b877fbbe3", // user: arc
UpdatedAt: time.Date(2016, time.December, 20, 8, 30, 0, 0, newYork),
UpdatedBy: "478f4b8e-b231-4efe-828b-f63b877fbbe3", // user: arc
},
{
AddressID: "bc86ea5d-6ef8-4a8d-a585-4a0161ebdf99",
AddressType: "space",
BelongsTo: "77ab205b-494e-4012-a1f8-96a38e1c3e52", // space: Bohemian Hall & Beer Garden
Lat: 40.77273,
Long: -73.915772,
AddressLine1: models.NullString{sql.NullString{String: "2919 24th Ave", Valid: true}},
FormattedAddress: models.NullString{sql.NullString{String: "Astoria\n2919 24th Ave\nAstoria, NY 11102", Valid: true}},
Country: models.NullString{sql.NullString{String: "US", Valid: true}},
State: models.NullString{sql.NullString{String: "NY", Valid: true}},
City: models.NullString{sql.NullString{String: "New York", Valid: true}},
ZipCode: models.NullString{sql.NullString{String: "11102", Valid: true}},
TimeZone: models.NullString{sql.NullString{String: "America/New_York", Valid: true}},
MapsLink: models.NullString{sql.NullString{String: "https://www.google.com/maps/place/Bohemian+Hall+and+Beer+Garden/@40.772838,-73.915595,15z/data=!4m2!3m1!1s0x0:0x73ceb523ea2222e8?sa=X&ved=0ahUKEwjh0LzGhJDRAhWLy4MKHa2aBUAQ_BIIezAR", Valid: true}},
CreatedAt: time.Date(2016, time.December, 20, 8, 30, 0, 0, newYork),
CreatedBy: "478f4b8e-b231-4efe-828b-f63b877fbbe3", // user: arc
UpdatedAt: time.Date(2016, time.December, 20, 8, 30, 0, 0, newYork),
UpdatedBy: "478f4b8e-b231-4efe-828b-f63b877fbbe3", // user: arc
},
}
|
/*
* @lc app=leetcode.cn id=98 lang=golang
*
* [98] 验证二叉搜索树
*/
package main
import "math"
type TreeNode struct {
Val int
Left *TreeNode
Right *TreeNode
}
/*
直接判断左右值
func isValidBST(root *TreeNode) bool {
return isValidbst(root, math.MinInt64, math.MaxInt64)
}
func isValidbst(root *TreeNode, min, max float64) bool {
if root == nil {
return true
}
v = root.Val
return v < max && v > min && isValidbst(root.Left, min, v) && isValidbst(root.Right, v, max)
}
*/
// 中序遍历,数据一定是升序
// @lc code=start
func validBST(root *TreeNode, pre *int) bool {
if root == nil {
return true
}
if !validBST(root.Left, pre) {
return false
}
if root.Val <= *pre {
return false
}
*pre = root.Val
return validBST(root.Right, pre)
}
func isValidBST(root *TreeNode) bool {
pre := math.MinInt64
return validBST(root, &pre)
}
// @lc code=end
|
package main
import (
"fmt"
"sort"
"strconv"
)
func main() {
var vec []int = make([]int, 0, 3)
for {
var temp string
fmt.Scan(&temp)
if temp == "X" {
break
}
number, err := strconv.Atoi(temp)
_ = err
vec = append(vec, number)
sort.Ints(vec)
for i, v := range vec {
fmt.Printf("%d ", v)
i = i + 1
}
fmt.Print("\n")
}
}
|
package sigma
import (
"math"
"github.com/yash-ontic/morgoth"
"github.com/yash-ontic/morgoth/counter"
)
// Simple fingerprinter that computes both mean and standard deviation of a window.
// Fingerprints are compared to see if the means are more than n deviations apart.
type Sigma struct {
deviations float64
}
func New(deviations float64) *Sigma {
return &Sigma{
deviations: deviations,
}
}
func (self *Sigma) Fingerprint(window *morgoth.Window) morgoth.Fingerprint {
mean, std := calcStats(window.Data)
return SigmaFingerprint{
mean: mean,
threshold: self.deviations * std,
}
}
func calcStats(xs []float64) (mean, std float64) {
n := 0.0
M2 := 0.0
for _, x := range xs {
n++
delta := x - mean
mean = mean + delta/n
M2 += delta * (x - mean)
}
std = math.Sqrt(M2 / n)
return
}
type SigmaFingerprint struct {
mean float64
threshold float64
}
func (self SigmaFingerprint) IsMatch(other counter.Countable) bool {
o, ok := other.(SigmaFingerprint)
if !ok {
return false
}
return math.Abs(self.mean-o.mean) <= self.threshold
}
|
package urlutil
import (
"net/url"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestIsRedirectAllowed(t *testing.T) {
// from: https://raw.githubusercontent.com/random-robbie/open-redirect/master/payloads.txt
rawurls := strings.Fields(`
&%0d%0a1Location:https://google.com
@google.com
@https://www.google.com
%2f%2e%2e
crlftest%0dLocation:https://google.com
/https:/%5cblackfan.ru/
//example.com@google.com/%2f..
///google.com/%2f..
///example.com@google.com/%2f..
////google.com/%2f..
////example.com@google.com/%2f..
/x:1/:///%01javascript:alert(document.cookie)/
https://google.com/%2f..
https://example.com@google.com/%2f..
/https://google.com/%2f..
/https://example.com@google.com/%2f.//example.com@google.com/%2f..
///google.com/%2f..
///example.com@google.com/%2f..
////google.com/%2f..
////example.com@google.com/%2f..
https://google.com/%2f..
https://example.com@google.com/%2f..
/https://google.com/%2f..
/https://example.com@google.com/%2f..
//google.com/%2f%2e%2e
//example.com@google.com/%2f%2e%2e
///google.com/%2f%2e%2e
///example.com@google.com/%2f%2e%2e
////google.com/%2f%2e%2e
////example.com@google.com/%2f%2e%2e
https://google.com/%2f%2e%2e
https://example.com@google.com/%2f%2e%2e
/https://google.com/%2f%2e%2e
/https://example.com@google.com/%2f%2e%2e
//google.com/
//example.com@google.com/
///google.com/
///example.com@google.com/
////google.com/
////example.com@google.com/
https://google.com/
https://example.com@google.com/
/https://google.com/
/https://example.com@google.com/
//google.com//
//example.com@google.com//
///google.com//
///example.com@google.com//
////google.com//
////example.com@google.com//
https://google.com//
https://example.com@google.com//
//https://google.com//
//https://example.com@google.com//
//google.com/%2e%2e%2f
//example.com@google.com/%2e%2e%2f
///google.com/%2e%2e%2f
///example.com@google.com/%2e%2e%2f
////google.com/%2e%2e%2f
////example.com@google.com/%2e%2e%2f
https://google.com/%2e%2e%2f
https://example.com@google.com/%2e%2e%2f
//https://google.com/%2e%2e%2f
//https://example.com@google.com/%2e%2e%2f
///google.com/%2e%2e
///example.com@google.com/%2e%2e
////google.com/%2e%2e
////example.com@google.com/%2e%2e
https:///google.com/%2e%2e
https:///example.com@google.com/%2e%2e
//https:///google.com/%2e%2e
//example.com@https:///google.com/%2e%2e
/https://google.com/%2e%2e
/https://example.com@google.com/%2e%2e
///google.com/%2f%2e%2e
///example.com@google.com/%2f%2e%2e
////google.com/%2f%2e%2e
////example.com@google.com/%2f%2e%2e
https:///google.com/%2f%2e%2e
https:///example.com@google.com/%2f%2e%2e
/https://google.com/%2f%2e%2e
/https://example.com@google.com/%2f%2e%2e
/https:///google.com/%2f%2e%2e
/https:///example.com@google.com/%2f%2e%2e
/%09/google.com
/%09/example.com@google.com
//%09/google.com
//%09/example.com@google.com
///%09/google.com
///%09/example.com@google.com
////%09/google.com
////%09/example.com@google.com
https://%09/google.com
https://%09/example.com@google.com
/%5cgoogle.com
/%5cexample.com@google.com
//%5cgoogle.com
//%5cexample.com@google.com
///%5cgoogle.com
///%5cexample.com@google.com
////%5cgoogle.com
////%5cexample.com@google.com
https://%5cgoogle.com
https://%5cexample.com@google.com
/https://%5cgoogle.com
/https://%5cexample.com@google.com
https://google.com
https://example.com@google.com
//google.com
https:google.com
//google%E3%80%82com
\/\/google.com/
/\/google.com/
//google%00.com
http://0xd8.0x3a.0xd6.0xce
http://example.com@0xd8.0x3a.0xd6.0xce
http://3H6k7lIAiqjfNeN@0xd8.0x3a.0xd6.0xce
http://XY>.7d8T\205pZM@0xd8.0x3a.0xd6.0xce
http://0xd83ad6ce
http://example.com@0xd83ad6ce
http://3H6k7lIAiqjfNeN@0xd83ad6ce
http://XY>.7d8T\205pZM@0xd83ad6ce
http://3627734734
http://example.com@3627734734
http://3H6k7lIAiqjfNeN@3627734734
http://XY>.7d8T\205pZM@3627734734
http://472.314.470.462
http://example.com@472.314.470.462
http://3H6k7lIAiqjfNeN@472.314.470.462
http://XY>.7d8T\205pZM@472.314.470.462
http://0330.072.0326.0316
http://example.com@0330.072.0326.0316
http://3H6k7lIAiqjfNeN@0330.072.0326.0316
http://XY>.7d8T\205pZM@0330.072.0326.0316
http://00330.00072.0000326.00000316
http://example.com@00330.00072.0000326.00000316
http://3H6k7lIAiqjfNeN@00330.00072.0000326.00000316
http://XY>.7d8T\205pZM@00330.00072.0000326.00000316
http://[::216.58.214.206]
http://example.com@[::216.58.214.206]
http://3H6k7lIAiqjfNeN@[::216.58.214.206]
http://XY>.7d8T\205pZM@[::216.58.214.206]
http://[::ffff:216.58.214.206]
http://example.com@[::ffff:216.58.214.206]
http://3H6k7lIAiqjfNeN@[::ffff:216.58.214.206]
http://XY>.7d8T\205pZM@[::ffff:216.58.214.206]
http://0xd8.072.54990
http://example.com@0xd8.072.54990
http://3H6k7lIAiqjfNeN@0xd8.072.54990
http://XY>.7d8T\205pZM@0xd8.072.54990
http://0xd8.3856078
http://example.com@0xd8.3856078
http://3H6k7lIAiqjfNeN@0xd8.3856078
http://XY>.7d8T\205pZM@0xd8.3856078
http://00330.3856078
http://example.com@00330.3856078
http://3H6k7lIAiqjfNeN@00330.3856078
http://XY>.7d8T\205pZM@00330.3856078
http://00330.0x3a.54990
http://example.com@00330.0x3a.54990
http://3H6k7lIAiqjfNeN@00330.0x3a.54990
http://XY>.7d8T\205pZM@00330.0x3a.54990
http:0xd8.0x3a.0xd6.0xce
http:example.com@0xd8.0x3a.0xd6.0xce
http:3H6k7lIAiqjfNeN@0xd8.0x3a.0xd6.0xce
http:XY>.7d8T\205pZM@0xd8.0x3a.0xd6.0xce
http:0xd83ad6ce
http:example.com@0xd83ad6ce
http:3H6k7lIAiqjfNeN@0xd83ad6ce
http:XY>.7d8T\205pZM@0xd83ad6ce
http:3627734734
http:example.com@3627734734
http:3H6k7lIAiqjfNeN@3627734734
http:XY>.7d8T\205pZM@3627734734
http:472.314.470.462
http:example.com@472.314.470.462
http:3H6k7lIAiqjfNeN@472.314.470.462
http:XY>.7d8T\205pZM@472.314.470.462
http:0330.072.0326.0316
http:example.com@0330.072.0326.0316
http:3H6k7lIAiqjfNeN@0330.072.0326.0316
http:XY>.7d8T\205pZM@0330.072.0326.0316
http:00330.00072.0000326.00000316
http:example.com@00330.00072.0000326.00000316
http:3H6k7lIAiqjfNeN@00330.00072.0000326.00000316
http:XY>.7d8T\205pZM@00330.00072.0000326.00000316
http:[::216.58.214.206]
http:example.com@[::216.58.214.206]
http:3H6k7lIAiqjfNeN@[::216.58.214.206]
http:XY>.7d8T\205pZM@[::216.58.214.206]
http:[::ffff:216.58.214.206]
http:example.com@[::ffff:216.58.214.206]
http:3H6k7lIAiqjfNeN@[::ffff:216.58.214.206]
http:XY>.7d8T\205pZM@[::ffff:216.58.214.206]
http:0xd8.072.54990
http:example.com@0xd8.072.54990
http:3H6k7lIAiqjfNeN@0xd8.072.54990
http:XY>.7d8T\205pZM@0xd8.072.54990
http:0xd8.3856078
http:example.com@0xd8.3856078
http:3H6k7lIAiqjfNeN@0xd8.3856078
http:XY>.7d8T\205pZM@0xd8.3856078
http:00330.3856078
http:example.com@00330.3856078
http:3H6k7lIAiqjfNeN@00330.3856078
http:XY>.7d8T\205pZM@00330.3856078
http:00330.0x3a.54990
http:example.com@00330.0x3a.54990
http:3H6k7lIAiqjfNeN@00330.0x3a.54990
http:XY>.7d8T\205pZM@00330.0x3a.54990
〱google.com
〵google.com
ゝgoogle.com
ーgoogle.com
ーgoogle.com
/〱google.com
/〵google.com
/ゝgoogle.com
/ーgoogle.com
/ーgoogle.com
%68%74%74%70%3a%2f%2f%67%6f%6f%67%6c%65%2e%63%6f%6d
http://%67%6f%6f%67%6c%65%2e%63%6f%6d
<>//google.com
//google.com\@example.com
https://:@google.com\@example.com
http://google.com:80#@example.com/
http://google.com:80?@example.com/
http://3H6k7lIAiqjfNeN@example.com+@google.com/
http://XY>.7d8T\205pZM@example.com+@google.com/
http://3H6k7lIAiqjfNeN@example.com@google.com/
http://XY>.7d8T\205pZM@example.com@google.com/
http://example.com+&@google.com#+@example.com/
http://google.com\texample.com/
//google.com:80#@example.com/
//google.com:80?@example.com/
//3H6k7lIAiqjfNeN@example.com+@google.com/
//XY>.7d8T\205pZM@example.com+@google.com/
//3H6k7lIAiqjfNeN@example.com@google.com/
//XY>.7d8T\205pZM@example.com@google.com/
//example.com+&@google.com#+@example.com/
//google.com\texample.com/
//;@google.com
http://;@google.com
@google.com
http://google.com%2f%2f.example.com/
http://google.com%5c%5c.example.com/
http://google.com%3F.example.com/
http://google.com%23.example.com/
http://example.com:80%40google.com/
http://example.com%2egoogle.com/
/https:/%5cgoogle.com/
/http://google.com
/%2f%2fgoogle.com
/google.com/%2f%2e%2e
/http:/google.com
/.google.com
///\;@google.com
///google.com
/////google.com/
/////google.com
//google.com/%2f%2e%2e
//example.com@google.com/%2f%2e%2e
///google.com/%2f%2e%2e
///example.com@google.com/%2f%2e%2e
////google.com/%2f%2e%2e
////example.com@google.com/%2f%2e%2e
https://google.com/%2f%2e%2e
https://example.com@google.com/%2f%2e%2e
/https://google.com/%2f%2e%2e
/https://example.com@google.com/%2f%2e%2e
//google.com/
//example.com@google.com/
///google.com/
///example.com@google.com/
////google.com/
////example.com@google.com/
https://google.com/
https://example.com@google.com/
/https://google.com/
/https://example.com@google.com/
//google.com//
//example.com@google.com//
///google.com//
///example.com@google.com//
////google.com//
////example.com@google.com//
https://google.com//
https://example.com@google.com//
//https://google.com//
//https://example.com@google.com//
//google.com/%2e%2e%2f
//example.com@google.com/%2e%2e%2f
///google.com/%2e%2e%2f
///example.com@google.com/%2e%2e%2f
////google.com/%2e%2e%2f
////example.com@google.com/%2e%2e%2f
https://google.com/%2e%2e%2f
https://example.com@google.com/%2e%2e%2f
//https://google.com/%2e%2e%2f
//https://example.com@google.com/%2e%2e%2f
///google.com/%2e%2e
///example.com@google.com/%2e%2e
////google.com/%2e%2e
////example.com@google.com/%2e%2e
https:///google.com/%2e%2e
https:///example.com@google.com/%2e%2e
//https:///google.com/%2e%2e
//example.com@https:///google.com/%2e%2e
/https://google.com/%2e%2e
/https://example.com@google.com/%2e%2e
///google.com/%2f%2e%2e
///example.com@google.com/%2f%2e%2e
////google.com/%2f%2e%2e
////example.com@google.com/%2f%2e%2e
https:///google.com/%2f%2e%2e
https:///example.com@google.com/%2f%2e%2e
/https://google.com/%2f%2e%2e
/https://example.com@google.com/%2f%2e%2e
/https:///google.com/%2f%2e%2e
/https:///example.com@google.com/%2f%2e%2e
/%09/google.com
/%09/example.com@google.com
//%09/google.com
//%09/example.com@google.com
///%09/google.com
///%09/example.com@google.com
////%09/google.com
////%09/example.com@google.com
https://%09/google.com
https://%09/example.com@google.com
/%5cgoogle.com
/%5cexample.com@google.com
//%5cgoogle.com
//%5cexample.com@google.com
///%5cgoogle.com
///%5cexample.com@google.com
////%5cgoogle.com
////%5cexample.com@google.com
https://%5cgoogle.com
https://%5cexample.com@google.com
/https://%5cgoogle.com
/https://%5cexample.com@google.com
https://google.com
https://example.com@google.com
//google.com
https:google.com
//google%E3%80%82com
\/\/google.com/
/\/google.com/
//google%00.com
javascript://example.com?%a0alert%281%29
http://0xd8.0x3a.0xd6.0xce
http://example.com@0xd8.0x3a.0xd6.0xce
http://3H6k7lIAiqjfNeN@0xd8.0x3a.0xd6.0xce
http://XY>.7d8T\205pZM@0xd8.0x3a.0xd6.0xce
http://0xd83ad6ce
http://example.com@0xd83ad6ce
http://3H6k7lIAiqjfNeN@0xd83ad6ce
http://XY>.7d8T\205pZM@0xd83ad6ce
http://3627734734
http://example.com@3627734734
http://3H6k7lIAiqjfNeN@3627734734
http://XY>.7d8T\205pZM@3627734734
http://472.314.470.462
http://example.com@472.314.470.462
http://3H6k7lIAiqjfNeN@472.314.470.462
http://XY>.7d8T\205pZM@472.314.470.462
http://0330.072.0326.0316
http://example.com@0330.072.0326.0316
http://3H6k7lIAiqjfNeN@0330.072.0326.0316
http://XY>.7d8T\205pZM@0330.072.0326.0316
http://00330.00072.0000326.00000316
http://example.com@00330.00072.0000326.00000316
http://3H6k7lIAiqjfNeN@00330.00072.0000326.00000316
http://XY>.7d8T\205pZM@00330.00072.0000326.00000316
http://[::216.58.214.206]
http://example.com@[::216.58.214.206]
http://3H6k7lIAiqjfNeN@[::216.58.214.206]
http://XY>.7d8T\205pZM@[::216.58.214.206]
http://[::ffff:216.58.214.206]
http://example.com@[::ffff:216.58.214.206]
http://3H6k7lIAiqjfNeN@[::ffff:216.58.214.206]
http://XY>.7d8T\205pZM@[::ffff:216.58.214.206]
http://0xd8.072.54990
http://example.com@0xd8.072.54990
http://3H6k7lIAiqjfNeN@0xd8.072.54990
http://XY>.7d8T\205pZM@0xd8.072.54990
http://0xd8.3856078
http://example.com@0xd8.3856078
http://3H6k7lIAiqjfNeN@0xd8.3856078
http://XY>.7d8T\205pZM@0xd8.3856078
http://00330.3856078
http://example.com@00330.3856078
http://3H6k7lIAiqjfNeN@00330.3856078
http://XY>.7d8T\205pZM@00330.3856078
http://00330.0x3a.54990
http://example.com@00330.0x3a.54990
http://3H6k7lIAiqjfNeN@00330.0x3a.54990
http://XY>.7d8T\205pZM@00330.0x3a.54990
http:0xd8.0x3a.0xd6.0xce
http:example.com@0xd8.0x3a.0xd6.0xce
http:3H6k7lIAiqjfNeN@0xd8.0x3a.0xd6.0xce
http:XY>.7d8T\205pZM@0xd8.0x3a.0xd6.0xce
http:0xd83ad6ce
http:example.com@0xd83ad6ce
http:3H6k7lIAiqjfNeN@0xd83ad6ce
http:XY>.7d8T\205pZM@0xd83ad6ce
http:3627734734
http:example.com@3627734734
http:3H6k7lIAiqjfNeN@3627734734
http:XY>.7d8T\205pZM@3627734734
http:472.314.470.462
http:example.com@472.314.470.462
http:3H6k7lIAiqjfNeN@472.314.470.462
http:XY>.7d8T\205pZM@472.314.470.462
http:0330.072.0326.0316
http:example.com@0330.072.0326.0316
http:3H6k7lIAiqjfNeN@0330.072.0326.0316
http:XY>.7d8T\205pZM@0330.072.0326.0316
http:00330.00072.0000326.00000316
http:example.com@00330.00072.0000326.00000316
http:3H6k7lIAiqjfNeN@00330.00072.0000326.00000316
http:XY>.7d8T\205pZM@00330.00072.0000326.00000316
http:[::216.58.214.206]
http:example.com@[::216.58.214.206]
http:3H6k7lIAiqjfNeN@[::216.58.214.206]
http:XY>.7d8T\205pZM@[::216.58.214.206]
http:[::ffff:216.58.214.206]
http:example.com@[::ffff:216.58.214.206]
http:3H6k7lIAiqjfNeN@[::ffff:216.58.214.206]
http:XY>.7d8T\205pZM@[::ffff:216.58.214.206]
http:0xd8.072.54990
http:example.com@0xd8.072.54990
http:3H6k7lIAiqjfNeN@0xd8.072.54990
http:XY>.7d8T\205pZM@0xd8.072.54990
http:0xd8.3856078
http:example.com@0xd8.3856078
http:3H6k7lIAiqjfNeN@0xd8.3856078
http:XY>.7d8T\205pZM@0xd8.3856078
http:00330.3856078
http:example.com@00330.3856078
http:3H6k7lIAiqjfNeN@00330.3856078
http:XY>.7d8T\205pZM@00330.3856078
http:00330.0x3a.54990
http:example.com@00330.0x3a.54990
http:3H6k7lIAiqjfNeN@00330.0x3a.54990
http:XY>.7d8T\205pZM@00330.0x3a.54990
〱google.com
〵google.com
ゝgoogle.com
ーgoogle.com
ーgoogle.com
/〱google.com
/〵google.com
/ゝgoogle.com
/ーgoogle.com
/ーgoogle.com
%68%74%74%70%3a%2f%2f%67%6f%6f%67%6c%65%2e%63%6f%6d
http://%67%6f%6f%67%6c%65%2e%63%6f%6d
<>javascript:alert(1);
<>//google.com
//google.com\@example.com
https://:@google.com\@example.com
ja\nva\tscript\r:alert(1)
\j\av\a\s\cr\i\pt\:\a\l\ert\(1\)
\152\141\166\141\163\143\162\151\160\164\072alert(1)
http://google.com:80#@example.com/
http://google.com:80?@example.com/
http://3H6k7lIAiqjfNeN@example.com+@google.com/
http://XY>.7d8T\205pZM@example.com+@google.com/
http://3H6k7lIAiqjfNeN@example.com@google.com/
http://XY>.7d8T\205pZM@example.com@google.com/
http://example.com+&@google.com#+@example.com/
http://google.com\texample.com/
//google.com:80#@example.com/
//google.com:80?@example.com/
//3H6k7lIAiqjfNeN@example.com+@google.com/
//XY>.7d8T\205pZM@example.com+@google.com/
//3H6k7lIAiqjfNeN@example.com@google.com/
//XY>.7d8T\205pZM@example.com@google.com/
//example.com+&@google.com#+@example.com/
//google.com\texample.com/
//;@google.com
http://;@google.com
javascript://https://example.com/?z=%0Aalert(1)
http://google.com%2f%2f.example.com/
http://google.com%5c%5c.example.com/
http://google.com%3F.example.com/
http://google.com%23.example.com/
http://example.com:80%40google.com/
http://example.com%2egoogle.com/
/https:/%5cgoogle.com/
/http://google.com
/%2f%2fgoogle.com
/google.com/%2f%2e%2e
/http:/google.com
/.google.com
///\;@google.com
///google.com
/////google.com/
/////google.com
`)
for _, rawurl := range rawurls {
u, err := url.Parse(rawurl)
if err != nil {
continue
}
assert.False(t, IsRedirectAllowed(u, []string{"example.com"}), "for %s expected false",
u.String())
}
}
func TestIsLoopback(t *testing.T) {
tcs := []struct {
rawurl string
value bool
}{
{"http://localhost", true},
{"http://test.localhost.pomerium.io", false},
{"http://127.0.0.1:9999", true},
{"http://127.22.0.1", true},
{"http://[::1]", true},
{"http://[::2]", false},
{"http://example.com", false},
}
for _, tc := range tcs {
u, err := url.Parse(tc.rawurl)
require.NoError(t, err)
assert.Equal(t, tc.value, IsLoopback(u), "for %s expected %v",
u.String(), tc.value)
}
}
|
package main
import (
"fmt"
"github.com/gorilla/mux"
"net/http"
)
func getView(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/html")
fmt.Fprintln(w, "<h1>GET!</h1>")
}
func postView(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/html")
fmt.Fprintln(w, "<h1>POST!</h1>")
}
func main() {
r := mux.NewRouter()
// Specify GET HTTP action
r.HandleFunc("/", getView).Methods("GET")
// Specify POST HTTP action
r.HandleFunc("/", postView).Methods("POST")
http.ListenAndServe(":3000", r)
}
|
package user
import (
"encoding/json"
"github.com/MerinEREN/iiPackages/datastore/user"
"github.com/MerinEREN/iiPackages/session"
"google.golang.org/appengine/datastore"
"google.golang.org/appengine/memcache"
"log"
)
// Get tries to return logged user from the memcache first,
// if fails, tries to return logged user's datastore key from the memcache and then
// gets the logged user via that key from the datastore.
// Finally, if both atemps above fails, tries to get logged user via user's email
// which stores in the session struct.
// And also returns an error.
// In adition to those, adds one or both of the logged user and the logged user's key
// to the memcache if they are apsent.
func Get(s *session.Session) (*user.User, error) {
u := new(user.User)
item, err := memcache.Get(s.Ctx, "u")
if err == nil {
err = json.Unmarshal(item.Value, u)
} else {
var bs []byte
k := new(datastore.Key)
item, err = memcache.Get(s.Ctx, "uKey")
if err == nil {
err = json.Unmarshal(item.Value, k)
if err != nil {
return nil, err
}
u, err = user.Get(s.Ctx, k)
} else {
u, k, err = user.GetViaEmail(s)
if err != nil {
return nil, err
}
bs, err = json.Marshal(k)
if err != nil {
log.Printf("Path: %s, Error: %v\n", s.R.URL.Path, err)
}
item = &memcache.Item{
Key: "uKey",
Value: bs,
}
err = memcache.Add(s.Ctx, item)
if err != nil {
log.Printf("Path: %s, Error: %v\n", s.R.URL.Path, err)
}
}
bs, err = json.Marshal(u)
if err != nil {
log.Printf("Path: %s, Error: %v\n", s.R.URL.Path, err)
}
item = &memcache.Item{
Key: "u",
Value: bs,
}
err = memcache.Add(s.Ctx, item)
if err != nil {
log.Printf("Path: %s, Error: %v\n", s.R.URL.Path, err)
}
}
return u, err
}
|
package main
import "fmt"
func main(){
f := func(v string)bool{
return v=="golang"
}
resutl := match("golang", f)
fmt.Println(resutl)
}
//membuat function yang mengembalikan nilai boolean
func match(v string, f func(string)bool)bool{
if f(v){
return true
}
return false
}
|
// Copyright 2023 Google LLC. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package beta
import (
"github.com/GoogleCloudPlatform/declarative-resource-client-library/dcl"
)
func DCLGatewaySchema() *dcl.Schema {
return &dcl.Schema{
Info: &dcl.Info{
Title: "NetworkServices/Gateway",
Description: "The NetworkServices Gateway resource",
StructName: "Gateway",
},
Paths: &dcl.Paths{
Get: &dcl.Path{
Description: "The function used to get information about a Gateway",
Parameters: []dcl.PathParameters{
dcl.PathParameters{
Name: "gateway",
Required: true,
Description: "A full instance of a Gateway",
},
},
},
Apply: &dcl.Path{
Description: "The function used to apply information about a Gateway",
Parameters: []dcl.PathParameters{
dcl.PathParameters{
Name: "gateway",
Required: true,
Description: "A full instance of a Gateway",
},
},
},
Delete: &dcl.Path{
Description: "The function used to delete a Gateway",
Parameters: []dcl.PathParameters{
dcl.PathParameters{
Name: "gateway",
Required: true,
Description: "A full instance of a Gateway",
},
},
},
DeleteAll: &dcl.Path{
Description: "The function used to delete all Gateway",
Parameters: []dcl.PathParameters{
dcl.PathParameters{
Name: "project",
Required: true,
Schema: &dcl.PathParametersSchema{
Type: "string",
},
},
dcl.PathParameters{
Name: "location",
Required: true,
Schema: &dcl.PathParametersSchema{
Type: "string",
},
},
},
},
List: &dcl.Path{
Description: "The function used to list information about many Gateway",
Parameters: []dcl.PathParameters{
dcl.PathParameters{
Name: "project",
Required: true,
Schema: &dcl.PathParametersSchema{
Type: "string",
},
},
dcl.PathParameters{
Name: "location",
Required: true,
Schema: &dcl.PathParametersSchema{
Type: "string",
},
},
},
},
},
Components: &dcl.Components{
Schemas: map[string]*dcl.Component{
"Gateway": &dcl.Component{
Title: "Gateway",
ID: "projects/{{project}}/locations/{{location}}/gateways/{{name}}",
ParentContainer: "project",
LabelsField: "labels",
HasCreate: true,
SchemaProperty: dcl.Property{
Type: "object",
Required: []string{
"name",
"ports",
"scope",
"project",
"location",
},
Properties: map[string]*dcl.Property{
"addresses": &dcl.Property{
Type: "array",
GoName: "Addresses",
Description: "One or more addresses with ports in format of \":\" that the Gateway must receive traffic on. The proxy binds to the ports specified. IP address can be anything that is allowed by the underlying infrastructure (auto-allocation, static IP, BYOIP).",
SendEmpty: true,
ListType: "list",
Items: &dcl.Property{
Type: "string",
GoType: "string",
},
},
"createTime": &dcl.Property{
Type: "string",
Format: "date-time",
GoName: "CreateTime",
ReadOnly: true,
Description: "Output only. The timestamp when the resource was created.",
Immutable: true,
},
"description": &dcl.Property{
Type: "string",
GoName: "Description",
Description: "Optional. A free-text description of the resource. Max length 1024 characters.",
},
"labels": &dcl.Property{
Type: "object",
AdditionalProperties: &dcl.Property{
Type: "string",
},
GoName: "Labels",
Description: "Optional. Set of label tags associated with the Gateway resource.",
},
"location": &dcl.Property{
Type: "string",
GoName: "Location",
Description: "The location for the resource",
Immutable: true,
},
"name": &dcl.Property{
Type: "string",
GoName: "Name",
Description: "Required. Name of the Gateway resource. It matches pattern `projects/*/locations/global/gateways/`.",
},
"ports": &dcl.Property{
Type: "array",
GoName: "Ports",
Description: "Required. One or more ports that the Gateway must receive traffic on. The proxy binds to the ports specified. Gateway listen on 0.0.0.0 on the ports specified below.",
SendEmpty: true,
ListType: "list",
Items: &dcl.Property{
Type: "integer",
Format: "int64",
GoType: "int64",
},
},
"project": &dcl.Property{
Type: "string",
GoName: "Project",
Description: "The project for the resource",
Immutable: true,
ResourceReferences: []*dcl.PropertyResourceReference{
&dcl.PropertyResourceReference{
Resource: "Cloudresourcemanager/Project",
Field: "name",
Parent: true,
},
},
},
"scope": &dcl.Property{
Type: "string",
GoName: "Scope",
Description: "Required. Immutable. Scope determines how configuration across multiple Gateway instances are merged. The configuration for multiple Gateway instances with the same scope will be merged as presented as a single coniguration to the proxy/load balancer. Max length 64 characters. Scope should start with a letter and can only have letters, numbers, hyphens.",
Immutable: true,
},
"selfLink": &dcl.Property{
Type: "string",
GoName: "SelfLink",
ReadOnly: true,
Description: "Output only. Server-defined URL of this resource",
Immutable: true,
},
"serverTlsPolicy": &dcl.Property{
Type: "string",
GoName: "ServerTlsPolicy",
Description: "Optional. A fully-qualified ServerTLSPolicy URL reference. Specifies how TLS traffic is terminated. If empty, TLS termination is disabled.",
ResourceReferences: []*dcl.PropertyResourceReference{
&dcl.PropertyResourceReference{
Resource: "Networksecurity/ServerTlsPolicy",
Field: "name",
Format: "projects/{{project}}/locations/global/serverTlsPolicies/{{name}}",
},
},
},
"type": &dcl.Property{
Type: "string",
GoName: "Type",
GoType: "GatewayTypeEnum",
Description: "Immutable. The type of the customer managed gateway. Possible values: TYPE_UNSPECIFIED, OPEN_MESH, SECURE_WEB_GATEWAY",
Immutable: true,
Enum: []string{
"TYPE_UNSPECIFIED",
"OPEN_MESH",
"SECURE_WEB_GATEWAY",
},
},
"updateTime": &dcl.Property{
Type: "string",
Format: "date-time",
GoName: "UpdateTime",
ReadOnly: true,
Description: "Output only. The timestamp when the resource was updated.",
Immutable: true,
},
},
},
},
},
},
}
}
|
//************************************************************************//
// rsc - RightScale API command line tool
//
// Generated with:
// $ praxisgen -metadata=ss/ssm/restful_doc -output=ss/ssm -pkg=ssm -target=1.0 -client=API
//
// The content of this file is auto-generated, DO NOT MODIFY
//************************************************************************//
package ssm
import (
"regexp"
"github.com/rightscale/rsc/metadata"
)
// Consists of a map of resource name to resource metadata.
var GenMetadata = map[string]*metadata.Resource{
"Execution": &metadata.Resource{
Name: "Execution",
Description: `An Execution is a launched instance of a CloudApp. Executions can be created from the catalog
by launching an Application, from Designer by launching a Template, or directly in Manager
by using the API and sending the CAT source or CAT Compiled source.
Executions are represented in RightScale Cloud Management by a deployment -- the resources
defined in the CAT are all created in the Deployment. Any action on a running CloudApp should
be made on its Execution resource.
Making changes to any resource directly in the CM deployment
may result in undesired behavior since the Execution only refreshes certain information as a
result of running an Operation on an Execution. For example, if a Server is replaced in CM
instead of through Self-Service, the new Server's information won' be available in
Self-Service.`,
Identifier: "application/vnd.rightscale.self_service.execution",
Attributes: []*metadata.Attribute{
&metadata.Attribute{
Name: "api_resources",
FieldName: "ApiResources",
FieldType: "[]*Resource",
},
&metadata.Attribute{
Name: "available_actions",
FieldName: "AvailableActions",
FieldType: "[]string",
},
&metadata.Attribute{
Name: "available_operations",
FieldName: "AvailableOperations",
FieldType: "[]*OperationDefinition",
},
&metadata.Attribute{
Name: "available_operations_info",
FieldName: "AvailableOperationsInfo",
FieldType: "[]*OperationInfo",
},
&metadata.Attribute{
Name: "compilation_href",
FieldName: "CompilationHref",
FieldType: "string",
},
&metadata.Attribute{
Name: "configuration_options",
FieldName: "ConfigurationOptions",
FieldType: "[]*ConfigurationOption",
},
&metadata.Attribute{
Name: "cost",
FieldName: "Cost",
FieldType: "*CostStruct",
},
&metadata.Attribute{
Name: "created_by",
FieldName: "CreatedBy",
FieldType: "*User",
},
&metadata.Attribute{
Name: "current_schedule",
FieldName: "CurrentSchedule",
FieldType: "string",
},
&metadata.Attribute{
Name: "dependencies",
FieldName: "Dependencies",
FieldType: "[]*CatDependency",
},
&metadata.Attribute{
Name: "deployment",
FieldName: "Deployment",
FieldType: "string",
},
&metadata.Attribute{
Name: "deployment_url",
FieldName: "DeploymentUrl",
FieldType: "string",
},
&metadata.Attribute{
Name: "description",
FieldName: "Description",
FieldType: "string",
},
&metadata.Attribute{
Name: "ends_at",
FieldName: "EndsAt",
FieldType: "*time.Time",
},
&metadata.Attribute{
Name: "href",
FieldName: "Href",
FieldType: "string",
},
&metadata.Attribute{
Name: "id",
FieldName: "Id",
FieldType: "string",
},
&metadata.Attribute{
Name: "kind",
FieldName: "Kind",
FieldType: "string",
},
&metadata.Attribute{
Name: "latest_notification",
FieldName: "LatestNotification",
FieldType: "*Notification",
},
&metadata.Attribute{
Name: "latest_notifications",
FieldName: "LatestNotifications",
FieldType: "[]*Notification",
},
&metadata.Attribute{
Name: "launched_from",
FieldName: "LaunchedFrom",
FieldType: "*LaunchedFrom",
},
&metadata.Attribute{
Name: "launched_from_summary",
FieldName: "LaunchedFromSummary",
FieldType: "map[string]interface{}",
},
&metadata.Attribute{
Name: "links",
FieldName: "Links",
FieldType: "*ExecutionLinks",
},
&metadata.Attribute{
Name: "name",
FieldName: "Name",
FieldType: "string",
},
&metadata.Attribute{
Name: "next_action",
FieldName: "NextAction",
FieldType: "*ScheduledAction",
},
&metadata.Attribute{
Name: "outputs",
FieldName: "Outputs",
FieldType: "[]*Output",
},
&metadata.Attribute{
Name: "running_operations",
FieldName: "RunningOperations",
FieldType: "[]*Operation",
},
&metadata.Attribute{
Name: "schedule_required",
FieldName: "ScheduleRequired",
FieldType: "bool",
},
&metadata.Attribute{
Name: "scheduled",
FieldName: "Scheduled",
FieldType: "bool",
},
&metadata.Attribute{
Name: "schedules",
FieldName: "Schedules",
FieldType: "[]*Schedule",
},
&metadata.Attribute{
Name: "source",
FieldName: "Source",
FieldType: "string",
},
&metadata.Attribute{
Name: "status",
FieldName: "Status",
FieldType: "string",
},
&metadata.Attribute{
Name: "timestamps",
FieldName: "Timestamps",
FieldType: "*TimestampsStruct",
},
},
Actions: []*metadata.Action{
&metadata.Action{
Name: "index",
Description: `List information about the Executions, or use a filter to only return certain Executions. A view can be used for various levels of detail.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "GET",
Pattern: "/api/manager/projects/%s/executions",
Variables: []string{"project_id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/executions`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "filter[]",
Description: `Filter by status, syntax is ["status==running"]`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "ids[]",
Description: `An optional list of execution IDs to retrieve`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "view",
Description: `Optional view to return`,
Type: "string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"default", "expanded", "index", "tiny"},
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "filter[]",
Description: `Filter by status, syntax is ["status==running"]`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "ids[]",
Description: `An optional list of execution IDs to retrieve`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "view",
Description: `Optional view to return`,
Type: "string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"default", "expanded", "index", "tiny"},
},
},
},
&metadata.Action{
Name: "show",
Description: `Show details for a given Execution. A view can be used for various levels of detail.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "GET",
Pattern: "/api/manager/projects/%s/executions/%s",
Variables: []string{"project_id", "id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/executions/([^/]+)`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "view",
Description: `Optional view to return`,
Type: "string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"default", "expanded", "source"},
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "view",
Description: `Optional view to return`,
Type: "string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"default", "expanded", "source"},
},
},
},
&metadata.Action{
Name: "create",
Description: `Create a new execution from a CAT, a compiled CAT, an Application in the Catalog, or a Template in Designer`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "POST",
Pattern: "/api/manager/projects/%s/executions",
Variables: []string{"project_id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/executions`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "application_href",
Description: `The href of the Application in Catalog from which to create the Execution. This attribute is mutually exclusive with: source, compiled_cat, compilation_href and template_href.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compilation_href",
Description: `The href of the Compilation from which to create the Execution. This attribute is mutually exclusive with: source, compiled_cat, template_href and application_href. NOTE: This requires :designer role at least.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[cat_parser_gem_version]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[compiler_ver]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[conditions]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[definitions]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[dependency_hashes][]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[imports]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[long_description]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[mappings]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[name]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[dependency_hashes][]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[operations]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[outputs]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[package]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[parameters]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[permissions]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[dependency_hashes][]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[resources]",
Description: ``,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[rs_ca_ver]",
Description: ``,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[short_description]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat[source]",
Description: ``,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "current_schedule",
Description: `The currently selected schedule name, or nil for CloudApps using the '24/7' schedule`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "defer_launch",
Description: `Whether or not to defer launching the execution. Setting this value to true will keep the execution in not_started state until it is explicitly launched or the first scheduled start operation occurs.`,
Type: "bool",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "description",
Description: `The description for the execution. The short_description of the Template will be used if none is provided.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "ends_at",
Description: `The day on which the CloudApp should be automatically terminated`,
Type: "*time.Time",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "name",
Description: `The name for the Execution. The Template name will be used if none is provided. This will be used as the name of the deployment (appended with a unique ID).`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "options[][name]",
Description: `Name of configuration option`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "options[][type]",
Description: `Type of configuration option.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"string", "number", "list"},
},
&metadata.ActionParam{
Name: "options[][value]",
Description: `Configuration option value, a string, integer or array of strings depending on type`,
Type: "interface{}",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedule_required",
Description: `Whether the CloudApp requires a schedule. If set to false, allows user to pick from '24/7' schedule in the UI`,
Type: "bool",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][action]",
Description: `The name of the action to be run. When the value is "run", the "operation" struct should contain the name of the operation to run as well as any options needed by the operation.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"launch", "start", "stop", "terminate", "run"},
},
&metadata.ActionParam{
Name: "scheduled_actions[][created_by][email]",
Description: `User email`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][created_by][id]",
Description: `User id`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][created_by][name]",
Description: `User name, usually of the form "First Last"`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][compilation_href]",
Description: `The HREF of the compilation used to create this execution`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][cost][unit]",
Description: `Currency used for the cost value`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][cost][updated_at]",
Description: `Timestamp of last cost refresh`,
Type: "*time.Time",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][cost][value]",
Description: `Amount of instance usage in CloudApp deployment, only available roughly 24 hours after launch, empty if not available`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][created_by][email]",
Description: `User email`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][created_by][id]",
Description: `User id`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][created_by][name]",
Description: `User name, usually of the form "First Last"`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][current_schedule]",
Description: `The currently selected schedule name, or nil for CloudApps using the '24/7' schedule`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][deployment]",
Description: `CloudApp deployment href`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][deployment_url]",
Description: `URL of the CloudApp deployment in the Cloud Management Dashboard`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][description]",
Description: `Description of execution`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][ends_at]",
Description: `The time of the next 'terminate' ScheduledAction (if any).`,
Type: "*time.Time",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][href]",
Description: `Execution href`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][created_by][id]",
Description: `User id`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][kind]",
Description: `The kind of this resource, always self_service#execution`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][latest_notification][category]",
Description: `Notification category, info or error`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"info", "error", "status_update"},
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][href]",
Description: `Execution href`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][created_by][id]",
Description: `User id`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][kind]",
Description: `The kind of this resource, always self_service#execution`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][latest_notification][message]",
Description: `Notification content`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][latest_notification][read]",
Description: `Whether notification was marked as read (not currently used)`,
Type: "bool",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][latest_notification][timestamps][created_at]",
Description: `Creation timestamp`,
Type: "*time.Time",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][launched_from][type]",
Description: `The type of the value (one of: application, template, compiled_cat, source, compilation`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][cost][value]",
Description: `Amount of instance usage in CloudApp deployment, only available roughly 24 hours after launch, empty if not available`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][launched_from_summary]",
Description: `How the CloudApp was launched, either from Application, Template, source, or compiled_cat`,
Type: "map",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][created_by][name]",
Description: `User name, usually of the form "First Last"`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][action]",
Description: `The name of the action to be run. When the value is "run", the "operation" struct should contain the name of the operation to run as well as any options needed by the operation.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"launch", "start", "stop", "terminate", "run"},
},
&metadata.ActionParam{
Name: "scheduled_actions[][created_by][email]",
Description: `User email`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][created_by][id]",
Description: `User id`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][created_by][name]",
Description: `User name, usually of the form "First Last"`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][next_action][execution_schedule]",
Description: `Indicates ScheduledActions that were created by the system as part of an execution schedule.`,
Type: "bool",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][next_action][first_occurrence]",
Description: `The time and day of the first occurrence when the action will be ran, similar to the "DTSTART" property specified by iCal. Used (in conjunction with timezone) to determine the time of day for the "next_occurrence". Can be set to the future or past. DateTimes should be passed as ISO-8601 formatted time strings. All DateTimes are converted to UTC when returned.`,
Type: "*time.Time",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][href]",
Description: `Execution href`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][created_by][id]",
Description: `User id`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][kind]",
Description: `The kind of this resource, always self_service#execution`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][created_by][name]",
Description: `User name, usually of the form "First Last"`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][next_action][next_occurrence]",
Description: `The Date/Time for the next occurrence. Since "DateTime implies a timezone offset (but no DST preference), the "timezone" parameter will be used to determine the DST preference. All DateTimes are converted to UTC when returned.`,
Type: "*time.Time",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][created_by][name]",
Description: `User name, usually of the form "First Last"`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][next_action][recurrence]",
Description: `iCal recurrence rule (RRULE) as described by RFC 5545. Expresses the days on which the action will be run. Optionally a "last occurrence" date can be set by passing the iCal "UNTIL" parameter in the rule (date-time must be passed in ISO-8601 format). If omitted, the action will only be run once, on the "first_occurrence".`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][next_action][recurrence_description]",
Description: `Read-only attribute that gets automatically generated from the recurrence definition`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][latest_notification][timestamps][created_at]",
Description: `Creation timestamp`,
Type: "*time.Time",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][next_action][timezone]",
Description: `The timezone in which the "first_occurrence" and "next_occurrence" times will be interpreted. Used to determine when Daylight Savings Time changes occur. Supports standardized "tzinfo" names [found here](http://www.iana.org/time-zones).`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][schedule_required]",
Description: `Whether the CloudApp requires a schedule. If set to false, allows user to pick from '24/7' schedule in the UI`,
Type: "bool",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][scheduled]",
Description: `Indicates whether or not an execution has a scheduled start action`,
Type: "bool",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][source]",
Description: `Original CAT source`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][status]",
Description: `Execution status.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"not_started", "launching", "starting", "enabling", "running", "disabling", "disabled", "terminating", "stopping", "waiting_for_operations", "canceling_operations", "stopped", "terminated", "failed", "provisioning", "decommissioning", "decommissioned"},
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][latest_notification][timestamps][created_at]",
Description: `Creation timestamp`,
Type: "*time.Time",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][next_action][execution_schedule]",
Description: `Indicates ScheduledActions that were created by the system as part of an execution schedule.`,
Type: "bool",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][next_action][first_occurrence]",
Description: `The time and day of the first occurrence when the action will be ran, similar to the "DTSTART" property specified by iCal. Used (in conjunction with timezone) to determine the time of day for the "next_occurrence". Can be set to the future or past. DateTimes should be passed as ISO-8601 formatted time strings. All DateTimes are converted to UTC when returned.`,
Type: "*time.Time",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][href]",
Description: `Execution href`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][created_by][id]",
Description: `User id`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][kind]",
Description: `The kind of this resource, always self_service#execution`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][created_by][name]",
Description: `User name, usually of the form "First Last"`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][next_action][next_occurrence]",
Description: `The Date/Time for the next occurrence. Since "DateTime implies a timezone offset (but no DST preference), the "timezone" parameter will be used to determine the DST preference. All DateTimes are converted to UTC when returned.`,
Type: "*time.Time",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][created_by][name]",
Description: `User name, usually of the form "First Last"`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][next_action][recurrence]",
Description: `iCal recurrence rule (RRULE) as described by RFC 5545. Expresses the days on which the action will be run. Optionally a "last occurrence" date can be set by passing the iCal "UNTIL" parameter in the rule (date-time must be passed in ISO-8601 format). If omitted, the action will only be run once, on the "first_occurrence".`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][next_action][recurrence_description]",
Description: `Read-only attribute that gets automatically generated from the recurrence definition`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][latest_notification][timestamps][created_at]",
Description: `Creation timestamp`,
Type: "*time.Time",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions[][execution][next_action][timezone]",
Description: `The timezone in which the "first_occurrence" and "next_occurrence" times will be interpreted. Used to determine when Daylight Savings Time changes occur. Supports standardized "tzinfo" names [found here](http://www.iana.org/time-zones).`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][created_from]",
Description: `optional HREF of the Schedule resource used to create this schedule`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][description]",
Description: `An optional description that will help users understand the purpose of the Schedule`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][name]",
Description: `The name of the Schedule`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][hour]",
Description: `The hour of day from 0 to 23.`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][minute]",
Description: `The minute from 0 to 59.`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][rule]",
Description: `A RRULE string describing the recurrence rule.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][hour]",
Description: `The hour of day from 0 to 23.`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][minute]",
Description: `The minute from 0 to 59.`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules[][start_recurrence][rule]",
Description: `A RRULE string describing the recurrence rule.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "source",
Description: `The raw CAT source from which to create the Execution. The CAT will be compiled first and then launched if successful. This attribute is mutually exclusive with: compiled_cat, template_href, compilation_href and application_href.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "template_href",
Description: `The href of the Template in Designer from which to create the Execution. This attribute is mutually exclusive with: source, compiled_cat, compilation_href and application_href. NOTE: This requires :designer role at least.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "application_href",
Description: `The href of the Application in Catalog from which to create the Execution. This attribute is mutually exclusive with: source, compiled_cat, compilation_href and template_href.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compilation_href",
Description: `The href of the Compilation from which to create the Execution. This attribute is mutually exclusive with: source, compiled_cat, template_href and application_href. NOTE: This requires :designer role at least.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "compiled_cat",
Description: `The compiled CAT source from which to create the Execution. This attribute is mutually exclusive with: source, template_href and application_href.`,
Type: "*CompiledCAT",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "current_schedule",
Description: `The currently selected schedule name, or nil for CloudApps using the '24/7' schedule`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "defer_launch",
Description: `Whether or not to defer launching the execution. Setting this value to true will keep the execution in not_started state until it is explicitly launched or the first scheduled start operation occurs.`,
Type: "bool",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "description",
Description: `The description for the execution. The short_description of the Template will be used if none is provided.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "ends_at",
Description: `The day on which the CloudApp should be automatically terminated`,
Type: "*time.Time",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "name",
Description: `The name for the Execution. The Template name will be used if none is provided. This will be used as the name of the deployment (appended with a unique ID).`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "options",
Description: `The configuration options of the Execution. These are the values provided for the CloudApp parameters.`,
Type: "[]*ConfigurationOption",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedule_required",
Description: `Whether the CloudApp requires a schedule. If set to false, allows user to pick from '24/7' schedule in the UI`,
Type: "bool",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "scheduled_actions",
Description: `The inital ScheduledActions to apply to the Execution.`,
Type: "[]*ScheduledActionParam",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "schedules",
Description: `The schedules available to the CloudApp`,
Type: "[]*Schedule",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "source",
Description: `The raw CAT source from which to create the Execution. The CAT will be compiled first and then launched if successful. This attribute is mutually exclusive with: compiled_cat, template_href, compilation_href and application_href.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "template_href",
Description: `The href of the Template in Designer from which to create the Execution. This attribute is mutually exclusive with: source, compiled_cat, compilation_href and application_href. NOTE: This requires :designer role at least.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "patch",
Description: `Updates an execution end date or selected schedule.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "PATCH",
Pattern: "/api/manager/projects/%s/executions/%s",
Variables: []string{"project_id", "id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/executions/([^/]+)`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "current_schedule",
Description: `The name of the schedule to select, or nil to use the '24/7' schedule`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "ends_at",
Description: `The day on which the CloudApp should be automatically terminated`,
Type: "*time.Time",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "current_schedule",
Description: `The name of the schedule to select, or nil to use the '24/7' schedule`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "ends_at",
Description: `The day on which the CloudApp should be automatically terminated`,
Type: "*time.Time",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "delete",
Description: `No description provided for delete.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "DELETE",
Pattern: "/api/manager/projects/%s/executions/%s",
Variables: []string{"project_id", "id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/executions/([^/]+)`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "force",
Description: `Force delete execution, bypassing state checks (only available to designers and admins).
Note: using this option only deletes the CloudApp from Self-Service and does not modify or terminate resources in any way. Any cloud resources running must be manually destroyed.`,
Type: "bool",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "force",
Description: `Force delete execution, bypassing state checks (only available to designers and admins).
Note: using this option only deletes the CloudApp from Self-Service and does not modify or terminate resources in any way. Any cloud resources running must be manually destroyed.`,
Type: "bool",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "multi_delete",
Description: `Delete several executions from the database. Note: if an execution has not successfully been terminated, there may still be associated cloud resources running.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "DELETE",
Pattern: "/api/manager/projects/%s/executions",
Variables: []string{"project_id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/executions`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "force",
Description: `Force delete execution, bypassing state checks (only available to designers and admins).
Note: using this option only deletes the CloudApp from Self-Service and does not modify or terminate resources in any way. Any cloud resources running must be manually destroyed.`,
Type: "bool",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "ids[]",
Description: `List of execution IDs to delete`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: true,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "force",
Description: `Force delete execution, bypassing state checks (only available to designers and admins).
Note: using this option only deletes the CloudApp from Self-Service and does not modify or terminate resources in any way. Any cloud resources running must be manually destroyed.`,
Type: "bool",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "ids[]",
Description: `List of execution IDs to delete`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: true,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "download",
Description: `Download the CAT source for the execution.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "GET",
Pattern: "/api/manager/projects/%s/executions/%s/download",
Variables: []string{"project_id", "id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/executions/([^/]+)/download`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "api_version",
Description: `The API version (only valid value is currently "1.0")`,
Type: "string",
Location: metadata.QueryParam,
Mandatory: true,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "api_version",
Description: `The API version (only valid value is currently "1.0")`,
Type: "string",
Location: metadata.QueryParam,
Mandatory: true,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "launch",
Description: `Launch an Execution.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "POST",
Pattern: "/api/manager/projects/%s/executions/%s/actions/launch",
Variables: []string{"project_id", "id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/executions/([^/]+)/actions/launch`),
},
},
CommandFlags: []*metadata.ActionParam{},
APIParams: []*metadata.ActionParam{},
},
&metadata.Action{
Name: "start",
Description: `Start an Execution.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "POST",
Pattern: "/api/manager/projects/%s/executions/%s/actions/start",
Variables: []string{"project_id", "id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/executions/([^/]+)/actions/start`),
},
},
CommandFlags: []*metadata.ActionParam{},
APIParams: []*metadata.ActionParam{},
},
&metadata.Action{
Name: "stop",
Description: `Stop an Execution.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "POST",
Pattern: "/api/manager/projects/%s/executions/%s/actions/stop",
Variables: []string{"project_id", "id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/executions/([^/]+)/actions/stop`),
},
},
CommandFlags: []*metadata.ActionParam{},
APIParams: []*metadata.ActionParam{},
},
&metadata.Action{
Name: "terminate",
Description: `Terminate an Execution.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "POST",
Pattern: "/api/manager/projects/%s/executions/%s/actions/terminate",
Variables: []string{"project_id", "id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/executions/([^/]+)/actions/terminate`),
},
},
CommandFlags: []*metadata.ActionParam{},
APIParams: []*metadata.ActionParam{},
},
&metadata.Action{
Name: "multi_launch",
Description: `Launch several Executions.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "POST",
Pattern: "/api/manager/projects/%s/executions/actions/launch",
Variables: []string{"project_id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/executions/actions/launch`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "ids[]",
Description: `List of execution IDs to launch`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: true,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "ids[]",
Description: `List of execution IDs to launch`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: true,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "multi_start",
Description: `Start several Executions.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "POST",
Pattern: "/api/manager/projects/%s/executions/actions/start",
Variables: []string{"project_id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/executions/actions/start`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "ids[]",
Description: `List of execution IDs to start`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: true,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "ids[]",
Description: `List of execution IDs to start`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: true,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "multi_stop",
Description: `Stop several Executions.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "POST",
Pattern: "/api/manager/projects/%s/executions/actions/stop",
Variables: []string{"project_id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/executions/actions/stop`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "ids[]",
Description: `List of execution IDs to stop`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: true,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "ids[]",
Description: `List of execution IDs to stop`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: true,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "multi_terminate",
Description: `Terminate several Executions.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "POST",
Pattern: "/api/manager/projects/%s/executions/actions/terminate",
Variables: []string{"project_id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/executions/actions/terminate`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "ids[]",
Description: `List of execution IDs to terminate`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: true,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "ids[]",
Description: `List of execution IDs to terminate`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: true,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "run",
Description: `Runs an Operation on an Execution.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "POST",
Pattern: "/api/manager/projects/%s/executions/%s/actions/run",
Variables: []string{"project_id", "id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/executions/([^/]+)/actions/run`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "configuration_options[][name]",
Description: `Name of configuration option`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "configuration_options[][type]",
Description: `Type of configuration option.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"string", "number", "list"},
},
&metadata.ActionParam{
Name: "configuration_options[][value]",
Description: `Configuration option value, a string, integer or array of strings depending on type`,
Type: "interface{}",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "name",
Description: `The name of the operation to run`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "configuration_options",
Description: `The configuration options of the operation. These are the values provided for the CloudApp parameters that this operation depends on.`,
Type: "[]*ConfigurationOption",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "name",
Description: `The name of the operation to run`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "multi_run",
Description: `Runs an Operation on several Executions.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "POST",
Pattern: "/api/manager/projects/%s/executions/actions/run",
Variables: []string{"project_id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/executions/actions/run`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "ids[]",
Description: `List of execution IDs to run`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "configuration_options[][name]",
Description: `Name of configuration option`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "configuration_options[][type]",
Description: `Type of configuration option.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"string", "number", "list"},
},
&metadata.ActionParam{
Name: "configuration_options[][value]",
Description: `Configuration option value, a string, integer or array of strings depending on type`,
Type: "interface{}",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "name",
Description: `The name of the operation to run`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "ids[]",
Description: `List of execution IDs to run`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "configuration_options",
Description: `The configuration options of the operation. These are the values provided for the CloudApp parameters that this operation depends on.`,
Type: "[]*ConfigurationOption",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "name",
Description: `The name of the operation to run`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
},
},
},
Links: map[string]string{
"latest_notifications": "",
"running_operations": "",
},
},
"Notification": &metadata.Resource{
Name: "Notification",
Description: `The Notification resource represents a system notification that an action has occurred. Generally
these Notifications are the start and completion of Operations. Currently notifications are only
available via the API/UI and are not distributed externally to users.`,
Identifier: "application/vnd.rightscale.self_service.notification",
Attributes: []*metadata.Attribute{
&metadata.Attribute{
Name: "category",
FieldName: "Category",
FieldType: "string",
},
&metadata.Attribute{
Name: "execution",
FieldName: "Execution",
FieldType: "*Execution",
},
&metadata.Attribute{
Name: "href",
FieldName: "Href",
FieldType: "string",
},
&metadata.Attribute{
Name: "id",
FieldName: "Id",
FieldType: "string",
},
&metadata.Attribute{
Name: "kind",
FieldName: "Kind",
FieldType: "string",
},
&metadata.Attribute{
Name: "links",
FieldName: "Links",
FieldType: "*NotificationLinks",
},
&metadata.Attribute{
Name: "message",
FieldName: "Message",
FieldType: "string",
},
&metadata.Attribute{
Name: "read",
FieldName: "Read",
FieldType: "bool",
},
&metadata.Attribute{
Name: "timestamps",
FieldName: "Timestamps",
FieldType: "*TimestampsStruct",
},
},
Actions: []*metadata.Action{
&metadata.Action{
Name: "index",
Description: `List the most recent 50 Notifications. Use the filter parameter to specify specify Executions.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "GET",
Pattern: "/api/manager/projects/%s/notifications",
Variables: []string{"project_id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/notifications`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "filter[]",
Description: `Filter by Execution`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "ids[]",
Description: `The Notification IDs to return`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "filter[]",
Description: `Filter by Execution`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "ids[]",
Description: `The Notification IDs to return`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "show",
Description: `Get details for a specific Notification`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "GET",
Pattern: "/api/manager/projects/%s/notifications/%s",
Variables: []string{"project_id", "id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/notifications/([^/]+)`),
},
},
CommandFlags: []*metadata.ActionParam{},
APIParams: []*metadata.ActionParam{},
},
},
Links: map[string]string{
"execution": "",
},
},
"Operation": &metadata.Resource{
Name: "Operation",
Description: `Operations represent actions that can be taken on an Execution.
When a CloudApp is launched, a sequence of Operations is run as [explained here](http://docs.rightscale.com/ss/reference/ss_CAT_file_language.html#operations) in the Operations section
While a CloudApp is running, users may launch any custom Operations as defined in the CAT.
Once a CAT is Terminated, a sequence of Operations is run as [explained here](http://docs.rightscale.com/ss/reference/ss_CAT_file_language.html#operations) in the Operations section`,
Identifier: "application/vnd.rightscale.self_service.operation",
Attributes: []*metadata.Attribute{
&metadata.Attribute{
Name: "configuration_options",
FieldName: "ConfigurationOptions",
FieldType: "[]*ConfigurationOption",
},
&metadata.Attribute{
Name: "created_by",
FieldName: "CreatedBy",
FieldType: "*User",
},
&metadata.Attribute{
Name: "execution",
FieldName: "Execution",
FieldType: "*Execution",
},
&metadata.Attribute{
Name: "href",
FieldName: "Href",
FieldType: "string",
},
&metadata.Attribute{
Name: "id",
FieldName: "Id",
FieldType: "string",
},
&metadata.Attribute{
Name: "kind",
FieldName: "Kind",
FieldType: "string",
},
&metadata.Attribute{
Name: "label",
FieldName: "Label",
FieldType: "string",
},
&metadata.Attribute{
Name: "links",
FieldName: "Links",
FieldType: "*OperationLinks",
},
&metadata.Attribute{
Name: "name",
FieldName: "Name",
FieldType: "string",
},
&metadata.Attribute{
Name: "status",
FieldName: "Status",
FieldType: "*StatusStruct",
},
&metadata.Attribute{
Name: "timestamps",
FieldName: "Timestamps",
FieldType: "*TimestampsStruct",
},
},
Actions: []*metadata.Action{
&metadata.Action{
Name: "index",
Description: `Get the list of 50 most recent Operations (usually filtered by Execution).`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "GET",
Pattern: "/api/manager/projects/%s/operations",
Variables: []string{"project_id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/operations`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "filter[]",
Description: `Filter by Execution ID or status`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "ids[]",
Description: `IDs of operations to filter on`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "limit",
Description: `The maximum number of operations to retrieve. The maximum (and default) limit is 50.If a limit of more than 50 is specified, only 50 operations will be returned`,
Type: "int",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "view",
Description: `Optional view to return`,
Type: "string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"default", "expanded"},
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "filter[]",
Description: `Filter by Execution ID or status`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "ids[]",
Description: `IDs of operations to filter on`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "limit",
Description: `The maximum number of operations to retrieve. The maximum (and default) limit is 50.If a limit of more than 50 is specified, only 50 operations will be returned`,
Type: "int",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "view",
Description: `Optional view to return`,
Type: "string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"default", "expanded"},
},
},
},
&metadata.Action{
Name: "show",
Description: `Get the details for a specific Operation`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "GET",
Pattern: "/api/manager/projects/%s/operations/%s",
Variables: []string{"project_id", "id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/operations/([^/]+)`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "view",
Description: `Optional view to return`,
Type: "string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"default", "expanded"},
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "view",
Description: `Optional view to return`,
Type: "string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"default", "expanded"},
},
},
},
&metadata.Action{
Name: "create",
Description: `Trigger an Operation to run by specifying the Execution ID and the name of the Operation.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "POST",
Pattern: "/api/manager/projects/%s/operations",
Variables: []string{"project_id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/operations`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "execution_id",
Description: `The Execution ID on which to run the operation`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "name",
Description: `The name of the operation to run`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "options[][name]",
Description: `Name of configuration option`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "options[][type]",
Description: `Type of configuration option.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"string", "number", "list"},
},
&metadata.ActionParam{
Name: "options[][value]",
Description: `Configuration option value, a string, integer or array of strings depending on type`,
Type: "interface{}",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "execution_id",
Description: `The Execution ID on which to run the operation`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "name",
Description: `The name of the operation to run`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "options",
Description: `The configuration options of the operation. These are the values provided for the CloudAPP parameters that this operation depends on.`,
Type: "[]*ConfigurationOption",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
},
},
Links: map[string]string{
"execution": "",
},
},
"ScheduledAction": &metadata.Resource{
Name: "ScheduledAction",
Description: `ScheduledActions describe a set of timed occurrences for an action to be run (at most once per day).
Recurrence Rules are based off of the [RFC 5545](https://tools.ietf.org/html/rfc5545) iCal spec, and timezones are from the standard [tzinfo database](http://www.iana.org/time-zones).
All DateTimes must be passed in [ISO-8601 format](https://en.wikipedia.org/wiki/ISO_8601)`,
Identifier: "application/vnd.rightscale.self_service.scheduled_action",
Attributes: []*metadata.Attribute{
&metadata.Attribute{
Name: "action",
FieldName: "Action",
FieldType: "string",
},
&metadata.Attribute{
Name: "created_by",
FieldName: "CreatedBy",
FieldType: "*User",
},
&metadata.Attribute{
Name: "execution",
FieldName: "Execution",
FieldType: "*Execution",
},
&metadata.Attribute{
Name: "execution_schedule",
FieldName: "ExecutionSchedule",
FieldType: "bool",
},
&metadata.Attribute{
Name: "first_occurrence",
FieldName: "FirstOccurrence",
FieldType: "*time.Time",
},
&metadata.Attribute{
Name: "href",
FieldName: "Href",
FieldType: "string",
},
&metadata.Attribute{
Name: "id",
FieldName: "Id",
FieldType: "string",
},
&metadata.Attribute{
Name: "kind",
FieldName: "Kind",
FieldType: "string",
},
&metadata.Attribute{
Name: "links",
FieldName: "Links",
FieldType: "*ScheduledActionLinks",
},
&metadata.Attribute{
Name: "name",
FieldName: "Name",
FieldType: "string",
},
&metadata.Attribute{
Name: "next_occurrence",
FieldName: "NextOccurrence",
FieldType: "*time.Time",
},
&metadata.Attribute{
Name: "operation",
FieldName: "Operation",
FieldType: "*OperationStruct",
},
&metadata.Attribute{
Name: "recurrence",
FieldName: "Recurrence",
FieldType: "string",
},
&metadata.Attribute{
Name: "recurrence_description",
FieldName: "RecurrenceDescription",
FieldType: "string",
},
&metadata.Attribute{
Name: "timestamps",
FieldName: "Timestamps",
FieldType: "*TimestampsStruct",
},
&metadata.Attribute{
Name: "timezone",
FieldName: "Timezone",
FieldType: "string",
},
},
Actions: []*metadata.Action{
&metadata.Action{
Name: "index",
Description: `List ScheduledAction resources in the project. The list can be filtered to a given execution.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "GET",
Pattern: "/api/manager/projects/%s/scheduled_actions",
Variables: []string{"project_id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/scheduled_actions`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "filter[]",
Description: `Filter by execution id or execution creator (user) id.`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "filter[]",
Description: `Filter by execution id or execution creator (user) id.`,
Type: "[]string",
Location: metadata.QueryParam,
Mandatory: false,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "show",
Description: `Retrieve given ScheduledAction resource.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "GET",
Pattern: "/api/manager/projects/%s/scheduled_actions/%s",
Variables: []string{"project_id", "id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/scheduled_actions/([^/]+)`),
},
},
CommandFlags: []*metadata.ActionParam{},
APIParams: []*metadata.ActionParam{},
},
&metadata.Action{
Name: "create",
Description: `Create a new ScheduledAction resource.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "POST",
Pattern: "/api/manager/projects/%s/scheduled_actions",
Variables: []string{"project_id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/scheduled_actions`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "action",
Description: `The name of the action to be run. When the value is "run", the "operation" struct should contain the name of the operation to run as well as any options needed by the operation.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
ValidValues: []string{"launch", "start", "stop", "terminate", "run"},
},
&metadata.ActionParam{
Name: "execution_id",
Description: `Id of the Execuion.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "first_occurrence",
Description: `The time and day of the first occurrence when the action will be ran, similar to the "DTSTART" property specified by iCal. Used (in conjunction with timezone) to determine the time of day for the "next_occurrence". Can be set to the future or past. DateTimes should be passed as ISO-8601 formatted time strings. All DateTimes are converted to UTC when returned.`,
Type: "*time.Time",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "name",
Description: `The human-readable name for the ScheduledAction.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "operation[configuration_options][][name]",
Description: `Name of configuration option`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "operation[configuration_options][][type]",
Description: `Type of configuration option.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
ValidValues: []string{"string", "number", "list"},
},
&metadata.ActionParam{
Name: "operation[configuration_options][][value]",
Description: `Configuration option value, a string, integer or array of strings depending on type`,
Type: "interface{}",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "operation[configuration_options][][name]",
Description: `Name of configuration option`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "recurrence",
Description: `iCal recurrence rule (RRULE) as described by RFC 5545. Expresses the days on which the action will be run. Optionally a "last occurrence" date can be set by passing the iCal "UNTIL" parameter in the rule (date-time must be passed in ISO-8601 format). If omitted, the action will only be run once, on the "first_occurrence".`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "timezone",
Description: `The timezone in which the "first_occurrence" and "next_occurrence" times will be interpreted. Used to determine when Daylight Savings Time changes occur. Supports standardized "tzinfo" names [found here](http://www.iana.org/time-zones).`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "action",
Description: `The name of the action to be run. When the value is "run", the "operation" struct should contain the name of the operation to run as well as any options needed by the operation.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
ValidValues: []string{"launch", "start", "stop", "terminate", "run"},
},
&metadata.ActionParam{
Name: "execution_id",
Description: `Id of the Execuion.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "first_occurrence",
Description: `The time and day of the first occurrence when the action will be ran, similar to the "DTSTART" property specified by iCal. Used (in conjunction with timezone) to determine the time of day for the "next_occurrence". Can be set to the future or past. DateTimes should be passed as ISO-8601 formatted time strings. All DateTimes are converted to UTC when returned.`,
Type: "*time.Time",
Location: metadata.PayloadParam,
Mandatory: true,
NonBlank: false,
},
&metadata.ActionParam{
Name: "name",
Description: `The human-readable name for the ScheduledAction.`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "operation",
Description: `When scheduling a "run" action, contains details on the operation to run`,
Type: "*OperationStruct",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "recurrence",
Description: `iCal recurrence rule (RRULE) as described by RFC 5545. Expresses the days on which the action will be run. Optionally a "last occurrence" date can be set by passing the iCal "UNTIL" parameter in the rule (date-time must be passed in ISO-8601 format). If omitted, the action will only be run once, on the "first_occurrence".`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
&metadata.ActionParam{
Name: "timezone",
Description: `The timezone in which the "first_occurrence" and "next_occurrence" times will be interpreted. Used to determine when Daylight Savings Time changes occur. Supports standardized "tzinfo" names [found here](http://www.iana.org/time-zones).`,
Type: "string",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "patch",
Description: `Updates the 'next_occurrence' property of a ScheduledAction.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "PATCH",
Pattern: "/api/manager/projects/%s/scheduled_actions/%s",
Variables: []string{"project_id", "id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/scheduled_actions/([^/]+)`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "next_occurrence",
Description: `The Date/Time for the next occurrence, useful for delaying a single occurrence. DateTimes should be passed as ISO-8601 formatted time strings.`,
Type: "*time.Time",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "next_occurrence",
Description: `The Date/Time for the next occurrence, useful for delaying a single occurrence. DateTimes should be passed as ISO-8601 formatted time strings.`,
Type: "*time.Time",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
},
&metadata.Action{
Name: "delete",
Description: `Delete a ScheduledAction.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "DELETE",
Pattern: "/api/manager/projects/%s/scheduled_actions/%s",
Variables: []string{"project_id", "id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/scheduled_actions/([^/]+)`),
},
},
CommandFlags: []*metadata.ActionParam{},
APIParams: []*metadata.ActionParam{},
},
&metadata.Action{
Name: "skip",
Description: `Skips the requested number of ScheduledAction occurrences. If no count is provided, one occurrence is skipped. On success, the next_occurrence view of the updated ScheduledAction is returned.`,
PathPatterns: []*metadata.PathPattern{
&metadata.PathPattern{
HTTPMethod: "POST",
Pattern: "/api/manager/projects/%s/scheduled_actions/%s/actions/skip",
Variables: []string{"project_id", "id"},
Regexp: regexp.MustCompile(`/api/manager/projects/([^/]+)/scheduled_actions/([^/]+)/actions/skip`),
},
},
CommandFlags: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "count",
Description: `The number of scheduled occurrences to skip. If not provided, the default count is 1.`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
APIParams: []*metadata.ActionParam{
&metadata.ActionParam{
Name: "count",
Description: `The number of scheduled occurrences to skip. If not provided, the default count is 1.`,
Type: "int",
Location: metadata.PayloadParam,
Mandatory: false,
NonBlank: false,
},
},
},
},
Links: map[string]string{
"execution": "",
},
},
}
|
package raft
// raft export interface
// push entries to raft cluster
func PushEntries(string entries)
// set entries commit recv chan
func SetCommitEntriesChan(entries_recv chan<-string);
|
package vugu
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestBuildEnvCachedComponent(t *testing.T) {
assert := assert.New(t)
be, err := NewBuildEnv()
assert.NoError(err)
assert.NotNil(be)
{ // just double check sane behavior for these keys
k1 := MakeCompKey(1, 1)
k2 := MakeCompKey(1, 1)
assert.Equal(k1, k2)
k3 := MakeCompKey(1, 2)
assert.NotEqual(k1, k3)
k4 := MakeCompKey(1, 1)
assert.Equal(k1, k4)
}
rb1 := &rootb1{}
// first run to intialize
res := be.RunBuild(rb1)
assert.NotNil(res)
c := be.CachedComponent(MakeCompKey(1, 1))
assert.Nil(c)
assert.Nil(be.compCache[MakeCompKey(1, 1)])
b1 := &testb1{}
be.UseComponent(MakeCompKey(1, 1), b1)
assert.NotNil(be.compUsed[MakeCompKey(1, 1)])
// run another one
res = be.RunBuild(rb1)
assert.NotNil(res)
// we should see b1 in the cache
assert.NotNil(be.compCache[MakeCompKey(1, 1)])
assert.Equal(b1, be.compCache[MakeCompKey(1, 1)])
// TODO: but not in the used (not used for this pass)
// TODO: now try to use it and make sure we can only get it once
}
type rootb1 struct{}
func (b *rootb1) Build(in *BuildIn) (out *BuildOut) {
return &BuildOut{
Out: []*VGNode{},
}
}
type testb1 struct{}
func (b *testb1) Build(in *BuildIn) (out *BuildOut) {
return &BuildOut{
Out: []*VGNode{},
}
}
|
// Copyright 2023 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package api
import (
"testing"
"time"
"github.com/stretchr/testify/require"
)
func TestTimerValidate(t *testing.T) {
// invalid insert
record := &TimerRecord{}
err := record.Validate()
require.EqualError(t, err, "field 'Namespace' should not be empty")
record.Namespace = "n1"
err = record.Validate()
require.EqualError(t, err, "field 'Key' should not be empty")
record.Key = "k1"
err = record.Validate()
require.EqualError(t, err, "field 'SchedPolicyType' should not be empty")
record.SchedPolicyType = "aa"
err = record.Validate()
require.EqualError(t, err, "schedule event configuration is not valid: invalid schedule event type: 'aa'")
record.SchedPolicyType = SchedEventInterval
record.SchedPolicyExpr = "1x"
err = record.Validate()
require.EqualError(t, err, "schedule event configuration is not valid: invalid schedule event expr '1x': unknown unit x")
record.SchedPolicyExpr = "1h"
require.Nil(t, record.Validate())
record.TimeZone = "a123"
err = record.Validate()
require.ErrorContains(t, err, "Unknown or incorrect time zone: 'a123'")
record.TimeZone = "tidb"
err = record.Validate()
require.ErrorContains(t, err, "Unknown or incorrect time zone: 'tidb'")
record.TimeZone = "+0800"
require.NoError(t, record.Validate())
record.TimeZone = "Asia/Shanghai"
require.NoError(t, record.Validate())
record.TimeZone = ""
require.NoError(t, record.Validate())
}
func TestTimerNextEventTime(t *testing.T) {
now := time.Now().In(time.UTC)
record := &TimerRecord{
TimerSpec: TimerSpec{
SchedPolicyType: SchedEventInterval,
SchedPolicyExpr: "1h",
Watermark: now,
Enable: true,
},
}
next, ok, err := record.NextEventTime()
require.NoError(t, err)
require.True(t, ok)
require.Equal(t, now.Add(time.Hour), next)
loc := time.FixedZone("UTC+1", 60*60)
record.Location = loc
next, ok, err = record.NextEventTime()
require.NoError(t, err)
require.True(t, ok)
require.Equal(t, now.Add(time.Hour).In(loc), next)
record.Enable = false
next, ok, err = record.NextEventTime()
require.NoError(t, err)
require.False(t, ok)
require.True(t, next.IsZero())
record.SchedPolicyExpr = "abcde"
next, ok, err = record.NextEventTime()
require.NoError(t, err)
require.False(t, ok)
require.True(t, next.IsZero())
record.Enable = true
next, ok, err = record.NextEventTime()
require.ErrorContains(t, err, "invalid schedule event expr")
require.False(t, ok)
require.True(t, next.IsZero())
record.SchedPolicyType = SchedEventCron
record.SchedPolicyExpr = "0 0 30 2 *"
next, ok, err = record.NextEventTime()
require.NoError(t, err)
require.False(t, ok)
require.True(t, next.IsZero())
}
|
package method
import (
"github.com/jinzhu/gorm"
"go-admin/models"
)
func PagingServer(pageParams models.KPIQueryParam, db *gorm.DB) {
var total int
limit := pageParams.PageSize
offset := pageParams.PageSize * (pageParams.Current - 1)
_ = db.Model(&models.KPI{}).Count(&total).Error
db.Limit(limit).Offset(offset).Order("id desc")
}
|
package main
import (
"fmt"
"math"
)
const (
InitialStep = float64(1)
Delta = .000000001
)
type ErrNegativeSqrt float64
func (e ErrNegativeSqrt) Error() string {
return fmt.Sprintf("Cannot Sqrt negative number: %g", float64(e))
}
func newtonsMethod(x float64) (float64, error) {
if x < 0 {
return 0, ErrNegativeSqrt(x)
}
step := InitialStep
previousStep := step
for step = newtonsMethodNextStep(x, step); math.Abs(step-previousStep) > Delta; {
previousStep = step
step = newtonsMethodNextStep(x, step)
}
return step, nil
}
func newtonsMethodNextStep(x, step float64) float64 {
nextStep := (step - ((step*step)-x)/(2*step))
return nextStep
}
func main() {
fmt.Println(newtonsMethod(2))
fmt.Println(newtonsMethod(-2))
}
|
package client
import (
"bufio"
"fmt"
"io/ioutil"
"net"
"net/http"
"net/url"
"os"
"path"
"strconv"
"strings"
printer "github.com/olekukonko/tablewriter"
konfig "github.com/zalando/chimp/conf/client"
. "github.com/zalando/chimp/types"
"golang.org/x/crypto/ssh/terminal"
)
//Client is the struct for accessing client functionalities
type Client struct {
Config *konfig.ClientConfig
AccessToken string
Scheme string
Clusters []string
}
var homeDirectories = []string{"HOME", "USERPROFILES"}
//RenewAccessToken is used to get a new Oauth2 access token
func (bc *Client) RenewAccessToken(username string) {
if username == "" {
reader := bufio.NewReader(os.Stdin)
fmt.Print("Enter your username: ")
username, _ = reader.ReadString('\n')
}
fmt.Print("Enter your password: ")
bytePassword, err := terminal.ReadPassword(0)
fmt.Println("")
if err != nil {
fmt.Printf("Cannot read password\n")
os.Exit(1)
}
password := strings.TrimSpace(string(bytePassword))
u, err := url.Parse(bc.Config.OauthURL)
if err != nil {
fmt.Printf("ERR: Could not parse given Auth URL: %s\n", bc.Config.OauthURL)
os.Exit(1)
}
authURLStr := fmt.Sprintf("https://%s%s%s%s", u.Host, u.Path, u.RawQuery, u.Fragment)
fmt.Printf("Getting token as %s\n", username)
client := &http.Client{}
req, err := http.NewRequest("GET", authURLStr, nil)
req.SetBasicAuth(username, password)
res, err := client.Do(req)
if res != nil {
defer res.Body.Close()
}
if err != nil {
fmt.Printf("ERR: Could not get Access Token, caused by: %s\n", err)
os.Exit(1)
}
respBody, err := ioutil.ReadAll(res.Body)
if err != nil {
fmt.Printf("ERR: Can not read response body, caused by: %s\n", err)
os.Exit(1)
}
if len(respBody) > 0 && res.StatusCode == 200 {
bc.AccessToken = string(respBody)
fmt.Printf("SUCCESS. Your access token is stored in .chimp-token in your home directory.\n")
//store token to file
var homeDir string
for _, home := range homeDirectories {
if dir := os.Getenv(home); dir != "" {
homeDir = dir
}
}
tokenFileName := fmt.Sprintf("%s/%s", homeDir, ".chimp-token")
f, _ := os.Create(tokenFileName)
_, _ = f.WriteString(strings.TrimSpace(bc.AccessToken)) //not important if doens't work, we'll try again next time
} else {
fmt.Printf("ERR: %d - %s\n", res.StatusCode, respBody)
}
}
//GetAccessToken sets the access token inside the request
func (bc *Client) GetAccessToken(username string) {
if bc.Config.Oauth2Enabled {
//before trying to get the token I try to read the old one
var homeDir string
for _, home := range homeDirectories {
if dir := os.Getenv(home); dir != "" {
homeDir = dir
}
}
tokenFileName := fmt.Sprintf("%s/%s", homeDir, ".chimp-token")
data, err := ioutil.ReadFile(tokenFileName)
var oldToken string
if err != nil {
fmt.Println("ERR: Could not get an AccessToken which is required. Please login again.")
os.Exit(1)
} else {
oldToken = strings.TrimSpace(string(data))
}
bc.AccessToken = oldToken
}
}
func (bc *Client) buildDeploymentURL(name string, params map[string]string, cluster string) string {
u := new(url.URL)
u.Scheme = bc.Scheme
host := bc.Config.Clusters[cluster].IP
port := bc.Config.Clusters[cluster].Port
u.Host = net.JoinHostPort(host, strconv.Itoa(port))
if bc.Scheme == "https" && port == 443 {
u.Host = host
}
u.Path = path.Join("/deployments", url.QueryEscape(name))
q := u.Query()
for k := range params {
q.Set(k, params[k])
}
u.RawQuery = q.Encode()
return u.String()
}
func (bc *Client) buildDeploymentReplicasURL(name string, replicas int, cluster string, force bool) string {
u := new(url.URL)
u.Scheme = bc.Scheme
host := bc.Config.Clusters[cluster].IP
port := bc.Config.Clusters[cluster].Port
u.Host = net.JoinHostPort(host, strconv.Itoa(port))
if bc.Scheme == "https" && port == 443 {
u.Host = host
}
q := u.Query()
q.Set("force", strconv.FormatBool(force))
u.RawQuery = q.Encode()
u.Path = path.Join("/deployments", url.QueryEscape(name), "replicas", strconv.Itoa(replicas))
return u.String()
}
//DeleteDeploy is used to delete a deployment from the cluster/server
func (bc *Client) DeleteDeploy(name string) {
for _, clusterName := range bc.Clusters {
url := bc.buildDeploymentURL(name, nil, clusterName)
_, res, err := bc.makeRequest("DELETE", url, nil)
if res != nil {
defer res.Body.Close()
}
if err != nil {
fmt.Println(errorMessageBuilder("Cannot delete deployment", err))
continue
}
if checkStatusOK(res.StatusCode) {
if checkAuthOK(res.StatusCode) {
if res.StatusCode >= 400 && res.StatusCode <= 499 {
e := Error{}
unmarshalResponse(res, &e)
fmt.Printf("Cannot delete deployment: %s\n", e.Err)
} else {
fmt.Println("Delete operation successful")
}
} else {
handleAuthNOK(res.StatusCode)
}
} else {
handleStatusNOK(res.StatusCode)
}
}
}
//InfoDeploy is used the get the information for a currently running deployment
func (bc *Client) InfoDeploy(name string, verbose bool) {
for _, clusterName := range bc.Clusters {
fmt.Println(clusterName)
url := bc.buildDeploymentURL(name, nil, clusterName)
_, res, err := bc.makeRequest("GET", url, nil)
if res != nil {
defer res.Body.Close()
}
if err != nil {
fmt.Println(errorMessageBuilder("Cannot get info for deploy", err))
continue
}
if checkStatusOK(res.StatusCode) {
if checkAuthOK(res.StatusCode) {
if res.StatusCode >= 400 && res.StatusCode <= 499 {
e := Error{}
unmarshalResponse(res, &e)
fmt.Printf("Cannot get info for deployment: %s\n", e.Err)
} else {
artifact := Artifact{}
unmarshalResponse(res, &artifact)
printInfoTable(verbose, artifact)
}
} else {
handleAuthNOK(res.StatusCode)
}
} else {
handleStatusNOK(res.StatusCode)
}
}
}
//ListDeploy is used to get a list of the running deployments in the cluster
func (bc *Client) ListDeploy(all bool) {
for _, clusterName := range bc.Clusters {
fmt.Println(clusterName)
var query map[string]string
if all {
query = map[string]string{"all": "true"}
}
url := bc.buildDeploymentURL("", query, clusterName)
_, res, err := bc.makeRequest("GET", url, nil)
if res != nil {
defer res.Body.Close()
}
if err != nil {
fmt.Println(errorMessageBuilder("Cannot list deployments", err))
continue
}
if checkStatusOK(res.StatusCode) {
if checkAuthOK(res.StatusCode) {
if res.StatusCode >= 400 && res.StatusCode <= 499 {
e := Error{}
err = unmarshalResponse(res, &e)
if err != nil {
fmt.Printf("Cannot get list of deployments: %s\n", err.Error())
continue
}
fmt.Printf("Cannot get list of deployments: %s\n", e.Err)
} else {
var ld ListDeployments
unmarshalResponse(res, &ld)
fmt.Printf("List of deployed applications: \n")
for _, name := range ld.Deployments {
fmt.Printf("\t%s\n", name)
}
}
} else {
handleAuthNOK(res.StatusCode)
}
} else {
handleStatusNOK(res.StatusCode)
}
}
}
//CreateDeploy is used to deploy a new app. If an app with the same name is already deployed,
//an error will be returned.
func (bc *Client) CreateDeploy(cmdReq *CmdClientRequest) {
//for each datacenter, create the app
for _, clusterName := range bc.Clusters {
fmt.Println(clusterName)
deploy := map[string]interface{}{"Name": cmdReq.Name, "Ports": cmdReq.Ports, "Labels": cmdReq.Labels,
"ImageURL": cmdReq.ImageURL, "Env": cmdReq.Env, "Replicas": cmdReq.Replicas, "CPULimit": cmdReq.CPULimit,
"MemoryLimit": cmdReq.MemoryLimit, "Force": cmdReq.Force, "Volumes": cmdReq.Volumes}
url := bc.buildDeploymentURL("", nil, clusterName)
_, res, err := bc.makeRequest("POST", url, deploy)
if res != nil {
defer res.Body.Close()
}
if err != nil {
fmt.Println(errorMessageBuilder("Deploy unsuccessful", err))
continue
}
if checkStatusOK(res.StatusCode) {
if checkAuthOK(res.StatusCode) {
if res.StatusCode >= 400 && res.StatusCode <= 499 {
e := Error{}
unmarshalResponse(res, &e)
fmt.Printf("Deploy unsuccessful: %s\n", e.Err)
} else {
fmt.Println("Application successfully deployed.")
}
} else {
handleAuthNOK(res.StatusCode)
}
} else {
handleStatusNOK(res.StatusCode)
}
}
}
//UpdateDeploy is used to update an already deployed app
func (bc *Client) UpdateDeploy(cmdReq *CmdClientRequest) {
for _, clusterName := range bc.Clusters {
fmt.Println(clusterName)
deploy := map[string]interface{}{"Name": cmdReq.Name, "Ports": cmdReq.Ports, "Labels": cmdReq.Labels,
"ImageURL": cmdReq.ImageURL, "Env": cmdReq.Env, "Replicas": cmdReq.Replicas, "CPULimit": cmdReq.CPULimit,
"MemoryLimit": cmdReq.MemoryLimit, "Force": cmdReq.Force}
url := bc.buildDeploymentURL(cmdReq.Name, nil, clusterName)
_, res, err := bc.makeRequest("PUT", url, deploy)
if res != nil {
defer res.Body.Close()
}
if err != nil {
fmt.Println(errorMessageBuilder("Deploy unsuccessful", err))
continue
}
if checkStatusOK(res.StatusCode) {
if checkAuthOK(res.StatusCode) {
if res.StatusCode >= 400 && res.StatusCode <= 499 {
e := Error{}
unmarshalResponse(res, &e)
fmt.Printf("Update unsuccessful: %s\n", e.Err)
} else {
fmt.Println("Application successfully updated.")
}
} else {
handleAuthNOK(res.StatusCode)
}
} else {
handleStatusNOK(res.StatusCode)
}
}
}
//Scale is used to scale an existing application to the number of replicas specified
func (bc *Client) Scale(name string, replicas int, force bool) {
for _, clusterName := range bc.Clusters {
fmt.Println(clusterName)
deploy := map[string]interface{}{"Name": name, "Replicas": replicas}
url := bc.buildDeploymentReplicasURL(name, replicas, clusterName, force)
_, res, err := bc.makeRequest("PATCH", url, deploy)
if res != nil {
defer res.Body.Close()
}
if err != nil {
fmt.Println(errorMessageBuilder("Cannot scale", err))
continue
}
if checkStatusOK(res.StatusCode) {
if checkAuthOK(res.StatusCode) {
if res.StatusCode >= 400 && res.StatusCode <= 499 {
e := Error{}
unmarshalResponse(res, &e)
fmt.Printf("Scale unsuccessful: %s\n", e.Err)
} else {
fmt.Println("Application scaled.")
}
} else {
handleAuthNOK(res.StatusCode)
}
} else {
handleStatusNOK(res.StatusCode)
}
}
}
func errorMessageBuilder(message string, err error) string {
if strings.Contains(err.Error(), "tls: oversized") {
return fmt.Sprintf("%s, caused by: cannot estabilish an https connection.", message)
}
return fmt.Sprintf("%s, caused by: %s", message, err.Error())
}
func printInfoTable(verbose bool, artifact Artifact) {
table := printer.NewWriter(os.Stdout)
//iterate table and print
table.SetHeader([]string{"Name", "Status", "Endpoints", "Num Replicas", "CPUs", "Memory", "Last Message"})
row := []string{}
var endpoints string
var ports string
for _, replica := range artifact.RunningReplicas {
endpoints = endpoints + fmt.Sprintf("%s\n", replica.Endpoints)
for _, port := range replica.Ports {
ports = ports + fmt.Sprintf("%d, ", port.Port)
}
}
row = append(row, artifact.Name)
row = append(row, artifact.Status)
row = append(row, artifact.Endpoint)
row = append(row, fmt.Sprintf("%d/%d", len(artifact.RunningReplicas), artifact.RequestedReplicas))
cpus := strconv.FormatFloat(artifact.CPUS, 'f', 1, 64)
memory := strconv.FormatFloat(artifact.Memory, 'f', 1, 64)
row = append(row, cpus)
row = append(row, memory)
row = append(row, artifact.Message)
table.Append(row)
table.Render()
//second table in case of verbose flag set
if verbose {
containerTable := printer.NewWriter(os.Stdout)
containerTable.SetRowLine(true)
containerTable.SetHeader([]string{"Container Status", "Image", "Endpoint", "Logfile"})
for _, replica := range artifact.RunningReplicas {
cRow := []string{}
cRow = append(cRow, replica.Containers[0].Status)
cRow = append(cRow, replica.Containers[0].ImageURL)
cRow = append(cRow, replica.Endpoints[0])
cRow = append(cRow, replica.Containers[0].LogInfo["containerName"])
cRow = append(cRow)
containerTable.Append(cRow)
}
containerTable.Render()
settingsTable := printer.NewWriter(os.Stdout)
settingsTable.SetRowLine(true)
settingsTable.SetHeader([]string{"Env name", "value"})
for k, v := range *artifact.Env {
sRow := make([]string, 0, 2)
sRow = append(sRow, k)
sRow = append(sRow, v)
settingsTable.Append(sRow)
}
settingsTable.Render()
labelsTable := printer.NewWriter(os.Stdout)
labelsTable.SetRowLine(true)
labelsTable.SetHeader([]string{"Label", "value"})
for k, v := range *artifact.Labels {
sRow := make([]string, 0, 2)
sRow = append(sRow, k)
sRow = append(sRow, v)
labelsTable.Append(sRow)
}
labelsTable.Render()
}
}
|
package crypt_test
import (
"testing"
"github.com/GehirnInc/crypt"
_ "github.com/GehirnInc/crypt/apr1_crypt"
"github.com/stretchr/testify/assert"
)
func TestIsHashSupported(t *testing.T) {
apr1 := crypt.IsHashSupported("$apr1$salt$hash")
assert.True(t, apr1)
other := crypt.IsHashSupported("$unknown$salt$hash")
assert.False(t, other)
}
|
package main
import "fmt"
func plusTwo() func(int) int {
return func(x int) int {
return x + 2
}
}
func plusX() func(int) int {
return func(x int) int {
return x + x
}
}
func main() {
p := plusTwo()
x := plusX()
fmt.Println(p(2))
fmt.Println(x(3))
}
|
package email
import (
"bytes"
"html/template"
"log"
"github.com/VolticFroogo/Animal-Pictures/models"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/ses"
)
// Register sends the account registry email.
func Register(code, username, email string) (err error) {
sess, err := session.NewSession(&aws.Config{
Region: aws.String("eu-west-1")},
)
if err != nil {
return
}
// Create an SES session.
svc := ses.New(sess)
t, err := template.ParseFiles("templates/email/register.html") // Parse the HTML page.
if err != nil {
log.Printf("Template parsing error: %v", err)
return
}
variables := models.EmailTemplateVariables{
Code: code,
Username: username,
}
var tBytes bytes.Buffer
err = t.Execute(&tBytes, variables)
if err != nil {
log.Printf("Template execution error: %v", err)
return
}
// Assemble the email.
input := &ses.SendEmailInput{
Source: aws.String("\"Animal Pictures\" <noreply@froogo.co.uk>"),
Destination: &ses.Destination{
ToAddresses: []*string{
aws.String(email),
},
},
Message: &ses.Message{
Subject: &ses.Content{
Charset: aws.String("UTF-8"),
Data: aws.String("Register Account"),
},
Body: &ses.Body{
Html: &ses.Content{
Charset: aws.String("UTF-8"),
Data: aws.String(tBytes.String()),
},
Text: &ses.Content{
Charset: aws.String("UTF-8"),
Data: aws.String("Welcome " + username + ",\nTo finish the registration process of your account please visit: https://ap.froogo.co.uk/verify?code=" + code + "\nIf you haven't registered an account please just ignore this email, sorry for any inconvenience."),
},
},
},
}
// Attempt to send the email.
_, err = svc.SendEmail(input)
return
}
// Recovery sends the recovery email.
func Recovery(code, username, email string) (err error) {
sess, err := session.NewSession(&aws.Config{
Region: aws.String("eu-west-1")},
)
if err != nil {
return
}
// Create an SES session.
svc := ses.New(sess)
t, err := template.ParseFiles("templates/email/recovery.html") // Parse the HTML page.
if err != nil {
log.Printf("Template parsing error: %v", err)
return
}
variables := models.EmailTemplateVariables{
Code: code,
Username: username,
}
var tBytes bytes.Buffer
err = t.Execute(&tBytes, variables)
if err != nil {
log.Printf("Template execution error: %v", err)
return
}
// Assemble the email.
input := &ses.SendEmailInput{
Source: aws.String("\"Animal Pictures\" <noreply@froogo.co.uk>"),
Destination: &ses.Destination{
ToAddresses: []*string{
aws.String(email),
},
},
Message: &ses.Message{
Subject: &ses.Content{
Charset: aws.String("UTF-8"),
Data: aws.String("Reset Your Password"),
},
Body: &ses.Body{
Html: &ses.Content{
Charset: aws.String("UTF-8"),
Data: aws.String(tBytes.String()),
},
Text: &ses.Content{
Charset: aws.String("UTF-8"),
Data: aws.String("Hello " + username + ",\nTo reset your password please click this link: https://ap.froogo.co.uk/password-recovery/?code=" + code + "\nIf it wasn't you trying to reset your password please just ignore this email, sorry for any inconvenience.\nHowever, if you are receiving lots of these emails please contact support for assistance."),
},
},
},
}
// Attempt to send the email.
_, err = svc.SendEmail(input)
return
}
|
package models
import (
"context"
"database/sql"
"fmt"
"strings"
"github.com/jmoiron/sqlx"
)
// LoadPersonConfig returns the config for person model entries.
func LoadPersonConfig(dialect *SQLDialect) *DatabaseModel {
conf := ModelConfig{
Create: `CREATE TABLE IF NOT EXISTS person(
id $TEXT PRIMARY KEY,
group_name $TEXT NOT NULL,
first_name $TEXT NOT NULL,
last_name $TEXT NOT NULL,
clarifier $TEXT NOT NULL,
note $TEXT NOT NULL
);`,
Constraints: "",
}
return NewDatabaseModel(dialect, conf)
}
// Person is a definition for a database model and JSON output model.
type Person struct {
ID string `db:"id" json:"id"`
GroupName string `db:"group_name" json:"group-name"`
FirstName string `db:"first_name" json:"first-name"`
LastName string `db:"last_name" json:"last-name"`
Clarifier string `db:"clarifier" json:"clarifier"`
Note string `db:"note" json:"note"`
}
// Write a person model to the database.
func (p *Person) Write(tx *sql.Tx, dialect SQLDialect) (sql.Result, error) {
statement := dialect.InsertStatement(`person (id, group_name, first_name, last_name, clarifier, note)
VALUES ($1, $2, $3, $4, $5, $6)`)
fmt.Println(p)
return tx.Exec(
statement,
p.ID,
p.GroupName,
p.FirstName,
p.LastName,
p.Clarifier,
p.Note,
)
}
// WritePeople writes the whole given list of people to the database.
func WritePeople(db *sqlx.DB, people []*Person, dialect SQLDialect) error {
tx, err := db.BeginTx(context.TODO(), &sql.TxOptions{ReadOnly: false})
if err != nil {
return err
}
for _, p := range people {
if _, err := p.Write(tx, dialect); err != nil {
tx.Rollback()
return err
}
}
return tx.Commit()
}
// PersonJSON is the model for incoming JSON definitions.
type PersonJSON struct {
GroupName string `json:"group-name"`
FirstName string `json:"first-name"`
LastName string `json:"last-name"`
Clarifier string `json:"clarifier"`
Note string `json:"note"`
}
// ToDBPerson converts this JSON model into a database writeable model.
func (p *PersonJSON) ToDBPerson() *Person {
return &Person{
ID: p.ID(),
GroupName: p.GroupName,
FirstName: p.FirstName,
LastName: p.LastName,
Clarifier: p.Clarifier,
Note: p.Note,
}
}
// ID returns the calculated ID for this JSON model.
func (p *PersonJSON) ID() string {
b := strings.Builder{}
fmt.Printf("group name *%s*\n", p.GroupName)
if len(p.GroupName) != 0 {
b.WriteString(convertKeyString([]string{p.GroupName}, 12))
} else {
fmt.Printf("Last name -- %s\n", convertKeyString([]string{p.LastName}, 7))
b.WriteString(convertKeyString([]string{p.LastName}, 7))
b.WriteString(".")
b.WriteString(convertKeyString([]string{p.FirstName}, 7))
}
if len(p.Clarifier) > 0 {
b.WriteString(".")
b.WriteString(convertKeyString([]string{p.Clarifier}, 5))
}
fmt.Printf("person id -- %s\n", b.String())
return b.String()
}
|
package tplmgr
import (
"context"
"net/http"
"strings"
"github.com/justinas/nosurf"
"github.com/pkg/errors"
"github.com/volatiletech/authboss"
)
type HTMLData = authboss.HTMLData
type AuthbossHTMLRenderer struct {
extension string
}
func NewAuthbossHTMLRenderer() *AuthbossHTMLRenderer {
return &AuthbossHTMLRenderer{}
}
func NewAuthbossHTMLRendererWithExt(extension string) *AuthbossHTMLRenderer {
return &AuthbossHTMLRenderer{
extension: extension,
}
}
func (abhr *AuthbossHTMLRenderer) SetExtension(extension string) {
abhr.extension = extension
}
func (abhr *AuthbossHTMLRenderer) Load(names ...string) error {
return nil
}
func (abhr *AuthbossHTMLRenderer) Render(ctx context.Context, name string, data HTMLData) (output []byte, contentType string, err error) {
if !strings.HasSuffix(name, abhr.extension) {
name += abhr.extension
}
template, ok := templates[name]
if !ok {
return nil, "", errors.Errorf("Template for page %s not found", name)
}
buf := bufpool.Get()
defer bufpool.Put(buf)
err = template.Execute(buf, data)
if err != nil {
return nil, "", errors.Wrapf(err, "failed to render template for page %s", name)
}
return buf.Bytes(), "text/html", nil
}
func AuthbossSAHTMLRenderer(w http.ResponseWriter, r *http.Request, name string, extension string, data HTMLData) {
var htmlData authboss.HTMLData
contextData := r.Context().Value(authboss.CTXKeyData)
if contextData == nil {
htmlData = authboss.HTMLData{}
} else {
htmlData = contextData.(authboss.HTMLData)
}
htmlData.MergeKV("csrf_token", nosurf.Token(r))
htmlData.Merge(data)
Render(w, name+extension, htmlData)
}
|
package rpc
import (
"encoding/json"
"errors"
)
const (
JSON_RPC_VER = "2.0"
MaxMultiRequest = 10
ParseErr = -32700 // -32700 语法解析错误,服务端接收到无效的json。该错误发送于服务器尝试解析json文本
InvalidRequest = -32600 // -32600 无效请求发送的json不是一个有效的请求对象。
MethodNotFound = -32601 // -32601 找不到方法 该方法不存在或无效
InvalidParamErr = -32602 // -32602 无效的参数 无效的方法参数。
InternalErr = -32603 // -32603 内部错误 JSON-RPC内部错误。
ServerErr = -32000 // -32000 to -32099 Server error服务端错误, 预留用于自定义的服务器错误。
)
var _messages = map[int]string{
ParseErr: "ParseErr",
InvalidRequest: "InvalidRequest",
MethodNotFound: "MethodNotFound",
InvalidParamErr: "InvalidParamErr",
InternalErr: "InternalErr",
ServerErr: "ServerErr",
}
// server send a response to client,
// and client parse to this
type Response struct {
ID string `json:"id"`
Error *JsonrpcErr `json:"error"`
Result interface{} `json:"result"`
Jsonrpc string `json:"jsonrpc"`
}
func NewResponse(id string, result interface{}, err *JsonrpcErr) *Response {
if err != nil {
id = ""
}
return &Response{
ID: id,
Error: err,
Result: result,
Jsonrpc: JSON_RPC_VER,
}
}
// JsonrpcErr while dealing with rpc request got err,
// must return this.
type JsonrpcErr struct {
Code int `json:"code"`
Message string `json:"message"`
Data interface{} `json:"data"`
}
func (je *JsonrpcErr) Error() string {
bs, err := json.Marshal(je)
if err != nil {
panic(err)
}
return string(bs)
}
// NewJsonrpcErr
func NewJsonrpcErr(code int, message string, data interface{}) *JsonrpcErr {
if message == "" {
message = _messages[code]
}
return &JsonrpcErr{
Code: code,
Message: message,
Data: data,
}
}
// Client send request to server,
// and server also parse request into this
type Request struct {
ID string `json:"id"`
Method string `json:"method"`
Params interface{} `json:"params"`
Jsonrpc string `json:"jsonrpc"`
}
func NewRequest(id string, params interface{}, method string) *Request {
return &Request{
ID: id,
Params: params,
Method: method,
Jsonrpc: JSON_RPC_VER,
}
}
// encodeRequest
func encodeRequest(req *Request) []byte {
bs, err := json.Marshal(req)
if err != nil {
panic(err)
}
return bs
}
// encodeMultiRequest
func encodeMultiRequest(reqs *[]*Request) []byte {
bs, err := json.Marshal(reqs)
if err != nil {
panic(err)
}
return bs
}
// parseRequest parse request data string into request
func parseRequest(bs []byte) ([]*Request, error) {
mr := make([]*Request, 0, MaxMultiRequest)
if err := json.Unmarshal(bs, &mr); err != nil {
// println("ParseMultiReq err:", err.Error())
goto ParseSingleReq
}
return mr, nil
ParseSingleReq:
r := new(Request)
if err := json.Unmarshal(bs, r); err != nil {
errmsg := "ParseSingleReq err: " + err.Error()
println(errmsg)
return mr, errors.New(errmsg)
}
mr = append(mr, r)
return mr, nil
}
// encodeRepsonse
func encodeResponse(resp *Response) []byte {
bs, err := json.Marshal(resp)
if err != nil {
panic(err)
}
return bs
}
// encodeMultiResponse
func encodeMultiResponse(resps []*Response) []byte {
bs, err := json.Marshal(resps)
if err != nil {
panic(err)
}
return bs
}
// parseResponse
func parseResponse(s string) *Response {
reps := new(Response)
if err := json.Unmarshal([]byte(s), reps); err != nil {
panic(err)
}
return reps
}
|
package process
import (
"github.com/yacc2007/pop-network/types"
"github.com/yacc2007/pop-network/vm"
"sync"
"github.com/pkg/errors"
log "github.com/Sirupsen/logrus"
"github.com/yacc2007/pop-network/util"
)
var (
ResultHashNotMatch error = errors.New("data hash not match")
)
type ExecutorMgr struct {
vmStore vm.VMStore
executorMap map[string]*Executor
mu sync.Mutex
}
func NewExecutorMgr(vmStore vm.VMStore) *ExecutorMgr {
return &ExecutorMgr{
vmStore: vmStore,
executorMap: make(map[string]*Executor),
}
}
func (m *ExecutorMgr) GetExecutor(address string) (*Executor, error) {
m.mu.Lock()
if exec, ok := m.executorMap[address]; ok {
m.mu.Unlock()
return exec, nil
}
m.mu.Unlock()
vm, err := m.vmStore.Load(address)
if err != nil {
return nil, err
}
//double check
m.mu.Lock()
defer m.mu.Unlock()
exec := NewExecutor()
err = exec.Init(vm)
if err != nil {
return nil, err
}
if _, ok := m.executorMap[address]; !ok {
m.executorMap[address] = exec
}
return exec, nil
}
func (m *ExecutorMgr) CheckTransaction(trans *types.CheckTransaction) error {
wantTrans, err := m.MakeCheckTransaction(trans.Tx.Action)
if err != nil {
log.WithFields(
log.Fields{
"err": err,
"address": trans.Tx.Action.Call.ContractAddress,
"function" : trans.Tx.Action.Call.FuncName,
"trans.ResultHash": trans.ResultHash,
},
).Error("CheckTransaction")
return err
}
if wantTrans.ResultHash != trans.ResultHash {
log.WithFields(
log.Fields{
"err": ResultHashNotMatch,
"address": trans.Tx.Action.Call.ContractAddress,
"function" : trans.Tx.Action.Call.FuncName,
"wantTrans.ResultHash": wantTrans.ResultHash,
"trans.ResultHash": trans.ResultHash,
},
).Error("CheckTransaction")
return ResultHashNotMatch
}
log.WithFields(
log.Fields{
"err": nil,
"address": trans.Tx.Action.Call.ContractAddress,
"function" : trans.Tx.Action.Call.FuncName,
"wantTrans.ResultHash": wantTrans.ResultHash,
"trans.ResultHash": trans.ResultHash,
},
).Debug("CheckTransaction")
return nil
}
func (m *ExecutorMgr) MakeCheckTransaction(req *types.CallFunctionReq) (*types.CheckTransaction, error) {
log.Debug("MakeTransaction")
exec, err := m.GetExecutor(req.Call.ContractAddress)
if err != nil {
log.WithFields(
log.Fields{
"err": err,
"address": req.Call.ContractAddress,
"function" : req.Call.FuncName,
},
).Error("MakeCheckTransaction")
return nil, err
}
trans := &types.Transaction{
Nonce: exec.vm.GetNonce() + 1,
Action: req,
}
util.SetTransactionHash(trans)
binLog, err := exec.TestApplyTransaction(trans)
if err != nil {
log.WithFields(
log.Fields{
"err": err,
"address": req.Call.ContractAddress,
"function" : req.Call.FuncName,
},
).Error("MakeCheckTransaction")
return nil, err
}
if binLog.Equal(exec.vm.NewestBinLog()) {
err := errors.New("vm db hash is not change")
log.WithFields(
log.Fields{
"err": err,
"address": req.Call.ContractAddress,
"function" : req.Call.FuncName,
},
).Error("MakeCheckTransaction")
return nil, err
}
resultHash := binLog.HashHex()
checkTx := &types.CheckTransaction{
Tx: trans,
ResultHash: resultHash,
}
log.WithFields(
log.Fields{
"address": req.Call.ContractAddress,
"function" : req.Call.FuncName,
"resultHash": resultHash,
"tx_hash": trans.TxHash,
"nonce": trans.Nonce,
},
).Debug("MakeCheckTransaction")
return checkTx, nil
}
func (m *ExecutorMgr) ApplyTransaction(trans *types.Transaction) (binLogHash string, err error) {
if trans == nil {
return defaultHash, errors.New("transaction is nil")
}
req := trans.Action
exec, err := m.GetExecutor(req.Call.ContractAddress)
if err != nil {
log.WithFields(
log.Fields{
"err": err,
"tx_hash": trans.TxHash,
"nonce": trans.Nonce,
"address": req.Call.ContractAddress,
"function" : req.Call.FuncName,
},
).Error("ApplyTransaction")
return defaultHash, err
}
binLogHash, err = exec.ApplyTransaction(trans)
if err != nil {
log.WithFields(
log.Fields{
"err": err,
"ContractAddress": req.Call.ContractAddress,
"function" : req.Call.FuncName,
},
).Error("ApplyTransaction")
return defaultHash, err
}
log.WithFields(
log.Fields{
"err": err,
"ContractAddress": req.Call.ContractAddress,
"function" : req.Call.FuncName,
"binLogHash": binLogHash,
},
).Debug("ApplyTransaction")
return binLogHash, nil
}
func (m *ExecutorMgr) CommitExecutor(address string) (string, error){
exec, err := m.GetExecutor(address)
if err != nil {
log.WithFields(
log.Fields{
"err": err,
"ContractAddress": address,
},
).Error("CommitExecutor")
return defaultHash, err
}
err = exec.Commit(m.vmStore)
if err != nil {
log.WithFields(
log.Fields{
"err": err,
"ContractAddress": address,
},
).Error("CommitExecutor")
return defaultHash, err
}
return util.Bytes2Hex(m.vmStore.Root()), nil
}
|
package main
import (
"errors"
"strconv"
)
var teams = [TeamCount]string{"yellow", "red", "green", "blue"}
func NotationRollFromString(rollStr string) (*roll, error) {
if len(rollStr) != 3 {
return nil, errors.New("roll should be in format die1+die2")
} else if rollStr[1] != '+' {
return nil, errors.New("bad roll string")
}
num1 := int(rollStr[0] - 48)
num2 := int(rollStr[2] - 48)
roll := RollNew(num1, num2)
if num1 < 1 || num1 > 6 || num2 < 1 || num2 > 6 {
return nil, errors.New("roll out of range")
}
return roll, nil
}
func NotationHoleFromString(holeStr string, teamPerspective int) int {
if holeStr[0] == 'h' {
if len(holeStr) < 3 {
return -1
}
hole, err := strconv.Atoi(holeStr[2:])
if err != nil {
return -1
}
return 47 + hole
}
for t := 0; t < TeamCount; t++ {
if holeStr[0] == teams[t][0] {
value, err := strconv.Atoi(holeStr[1:])
if err != nil {
return -1
}
return BitboardGetMSB(BitboardRotate(uint64(1<<(value-1)), t, teamPerspective))
}
}
return -1
}
|
/*
Tencent is pleased to support the open source community by making Basic Service Configuration Platform available.
Copyright (C) 2019 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except
in compliance with the License. You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under
the License is distributed on an "as IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
either express or implied. See the License for the specific language governing permissions and
limitations under the License.
*/
package service
import (
"context"
"fmt"
"bscp.io/pkg/criteria/constant"
"bscp.io/pkg/iam/meta"
"bscp.io/pkg/kit"
"bscp.io/pkg/logs"
pbcs "bscp.io/pkg/protocol/config-server"
pbtset "bscp.io/pkg/protocol/core/template-set"
pbds "bscp.io/pkg/protocol/data-service"
"bscp.io/pkg/tools"
)
// CreateTemplateSet create a template set
func (s *Service) CreateTemplateSet(ctx context.Context, req *pbcs.CreateTemplateSetReq) (*pbcs.CreateTemplateSetResp, error) {
grpcKit := kit.FromGrpcContext(ctx)
resp := new(pbcs.CreateTemplateSetResp)
// validate input param
idsLen := len(req.TemplateIds)
if idsLen > 500 {
return nil, fmt.Errorf("the length of template ids is %d, it must be within the range of [0,500]",
idsLen)
}
res := &meta.ResourceAttribute{Basic: &meta.Basic{Type: meta.TemplateSet, Action: meta.Create,
ResourceID: req.BizId}, BizID: grpcKit.BizID}
if err := s.authorizer.AuthorizeWithResp(grpcKit, resp, res); err != nil {
return nil, err
}
r := &pbds.CreateTemplateSetReq{
Attachment: &pbtset.TemplateSetAttachment{
BizId: grpcKit.BizID,
TemplateSpaceId: req.TemplateSpaceId,
},
Spec: &pbtset.TemplateSetSpec{
Name: req.Name,
Memo: req.Memo,
TemplateIds: req.TemplateIds,
Public: req.Public,
BoundApps: req.BoundApps,
},
}
rp, err := s.client.DS.CreateTemplateSet(grpcKit.RpcCtx(), r)
if err != nil {
logs.Errorf("create template set failed, err: %v, rid: %s", err, grpcKit.Rid)
return nil, err
}
resp = &pbcs.CreateTemplateSetResp{
Id: rp.Id,
}
return resp, nil
}
// DeleteTemplateSet delete a template set
func (s *Service) DeleteTemplateSet(ctx context.Context, req *pbcs.DeleteTemplateSetReq) (*pbcs.DeleteTemplateSetResp, error) {
grpcKit := kit.FromGrpcContext(ctx)
resp := new(pbcs.DeleteTemplateSetResp)
res := &meta.ResourceAttribute{Basic: &meta.Basic{Type: meta.TemplateSet, Action: meta.Delete,
ResourceID: req.TemplateSetId}, BizID: grpcKit.BizID}
if err := s.authorizer.AuthorizeWithResp(grpcKit, resp, res); err != nil {
return nil, err
}
r := &pbds.DeleteTemplateSetReq{
Id: req.TemplateSetId,
Attachment: &pbtset.TemplateSetAttachment{
BizId: grpcKit.BizID,
TemplateSpaceId: req.TemplateSpaceId,
},
Force: req.Force,
}
if _, err := s.client.DS.DeleteTemplateSet(grpcKit.RpcCtx(), r); err != nil {
logs.Errorf("delete template set failed, err: %v, rid: %s", err, grpcKit.Rid)
return nil, err
}
return resp, nil
}
// UpdateTemplateSet update a template set
func (s *Service) UpdateTemplateSet(ctx context.Context, req *pbcs.UpdateTemplateSetReq) (*pbcs.UpdateTemplateSetResp, error) {
grpcKit := kit.FromGrpcContext(ctx)
resp := new(pbcs.UpdateTemplateSetResp)
res := &meta.ResourceAttribute{Basic: &meta.Basic{Type: meta.TemplateSet, Action: meta.Update,
ResourceID: req.TemplateSetId}, BizID: grpcKit.BizID}
if err := s.authorizer.AuthorizeWithResp(grpcKit, resp, res); err != nil {
return nil, err
}
r := &pbds.UpdateTemplateSetReq{
Id: req.TemplateSetId,
Attachment: &pbtset.TemplateSetAttachment{
BizId: grpcKit.BizID,
TemplateSpaceId: req.TemplateSpaceId,
},
Spec: &pbtset.TemplateSetSpec{
Name: req.Name,
Memo: req.Memo,
TemplateIds: req.TemplateIds,
Public: req.Public,
BoundApps: req.BoundApps,
},
Force: req.Force,
}
if _, err := s.client.DS.UpdateTemplateSet(grpcKit.RpcCtx(), r); err != nil {
logs.Errorf("update template set failed, err: %v, rid: %s", err, grpcKit.Rid)
return nil, err
}
return resp, nil
}
// ListTemplateSets list template sets
func (s *Service) ListTemplateSets(ctx context.Context, req *pbcs.ListTemplateSetsReq) (*pbcs.ListTemplateSetsResp, error) {
grpcKit := kit.FromGrpcContext(ctx)
resp := new(pbcs.ListTemplateSetsResp)
res := &meta.ResourceAttribute{Basic: &meta.Basic{Type: meta.TemplateSet, Action: meta.Find}, BizID: grpcKit.BizID}
if err := s.authorizer.AuthorizeWithResp(grpcKit, resp, res); err != nil {
return nil, err
}
r := &pbds.ListTemplateSetsReq{
BizId: grpcKit.BizID,
TemplateSpaceId: req.TemplateSpaceId,
SearchFields: req.SearchFields,
SearchValue: req.SearchValue,
Start: req.Start,
Limit: req.Limit,
All: req.All,
}
rp, err := s.client.DS.ListTemplateSets(grpcKit.RpcCtx(), r)
if err != nil {
logs.Errorf("list template sets failed, err: %v, rid: %s", err, grpcKit.Rid)
return nil, err
}
resp = &pbcs.ListTemplateSetsResp{
Count: rp.Count,
Details: rp.Details,
}
return resp, nil
}
// ListAppTemplateSets list app template sets
func (s *Service) ListAppTemplateSets(ctx context.Context, req *pbcs.ListAppTemplateSetsReq) (*pbcs.
ListAppTemplateSetsResp,
error) {
grpcKit := kit.FromGrpcContext(ctx)
resp := new(pbcs.ListAppTemplateSetsResp)
res := &meta.ResourceAttribute{Basic: &meta.Basic{Type: meta.TemplateSet, Action: meta.Find}, BizID: grpcKit.BizID}
if err := s.authorizer.AuthorizeWithResp(grpcKit, resp, res); err != nil {
return nil, err
}
r := &pbds.ListAppTemplateSetsReq{
BizId: grpcKit.BizID,
AppId: req.AppId,
}
rp, err := s.client.DS.ListAppTemplateSets(grpcKit.RpcCtx(), r)
if err != nil {
logs.Errorf("list app template sets failed, err: %v, rid: %s", err, grpcKit.Rid)
return nil, err
}
resp = &pbcs.ListAppTemplateSetsResp{
Details: rp.Details,
}
return resp, nil
}
// ListTemplateSetsByIDs list template sets by ids
func (s *Service) ListTemplateSetsByIDs(ctx context.Context, req *pbcs.ListTemplateSetsByIDsReq) (*pbcs.
ListTemplateSetsByIDsResp,
error) {
grpcKit := kit.FromGrpcContext(ctx)
resp := new(pbcs.ListTemplateSetsByIDsResp)
// validate input param
ids := tools.SliceRepeatedElements(req.Ids)
if len(ids) > 0 {
return nil, fmt.Errorf("repeated ids: %v, id must be unique", ids)
}
idsLen := len(req.Ids)
if idsLen == 0 || idsLen > constant.ArrayInputLenLimit {
return nil, fmt.Errorf("the length of ids is %d, it must be within the range of [1,%d]",
idsLen, constant.ArrayInputLenLimit)
}
res := &meta.ResourceAttribute{Basic: &meta.Basic{Type: meta.TemplateSet, Action: meta.Find}, BizID: grpcKit.BizID}
if err := s.authorizer.AuthorizeWithResp(grpcKit, resp, res); err != nil {
return nil, err
}
r := &pbds.ListTemplateSetsByIDsReq{
Ids: req.Ids,
}
rp, err := s.client.DS.ListTemplateSetsByIDs(grpcKit.RpcCtx(), r)
if err != nil {
logs.Errorf("list template sets by ids failed, err: %v, rid: %s", err, grpcKit.Rid)
return nil, err
}
resp = &pbcs.ListTemplateSetsByIDsResp{
Details: rp.Details,
}
return resp, nil
}
// ListTemplateSetsOfBiz list template sets of one biz
func (s *Service) ListTemplateSetsOfBiz(ctx context.Context, req *pbcs.ListTemplateSetsOfBizReq) (*pbcs.
ListTemplateSetsOfBizResp,
error) {
grpcKit := kit.FromGrpcContext(ctx)
resp := new(pbcs.ListTemplateSetsOfBizResp)
res := &meta.ResourceAttribute{Basic: &meta.Basic{Type: meta.TemplateSet, Action: meta.Find}, BizID: grpcKit.BizID}
if err := s.authorizer.AuthorizeWithResp(grpcKit, resp, res); err != nil {
return nil, err
}
r := &pbds.ListTemplateSetsOfBizReq{
BizId: req.BizId,
}
rp, err := s.client.DS.ListTemplateSetsOfBiz(grpcKit.RpcCtx(), r)
if err != nil {
logs.Errorf("list template sets of biz failed, err: %v, rid: %s", err, grpcKit.Rid)
return nil, err
}
resp = &pbcs.ListTemplateSetsOfBizResp{
Details: rp.Details,
}
return resp, nil
}
|
package wps
import (
`encoding/json`
`fmt`
)
const (
// 状态码
// StatusOk 成功
StatusOk string = "200"
// 参数错误
// StatusParamsError string = "400"
)
// Wps 金山文档
type Wps struct {
// ApiUrl 服务器地址
ApiUrl string `json:"apiUrl"`
// PreviewUrl 浏览地址
ViewUrl string `json:"viewUrl"`
// 文档预览前缀
PreviewPrefix string `default:"web" json:"previewPrefix"`
// 内部文档预览前缀
OfficePrefix string `default:"office" json:"officePrefix"`
// 文档转换前缀
ConvertPrefix string `default:"web-preview" json:"convertPrefix"`
}
func (w Wps) String() string {
jsonBytes, _ := json.MarshalIndent(w, "", " ")
return string(jsonBytes)
}
func (w *Wps) previewUrl() string {
return fmt.Sprintf("%s/%s", w.ViewUrl, w.PreviewPrefix)
}
func (w *Wps) officeUrl() string {
return fmt.Sprintf("%s/%s", w.ViewUrl, w.OfficePrefix)
}
func (w *Wps) convertUrl() string {
return fmt.Sprintf("%s/%s", w.ApiUrl, w.ConvertPrefix)
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.